query
stringlengths 9
3.4k
| document
stringlengths 9
87.4k
| metadata
dict | negatives
sequencelengths 4
101
| negative_scores
sequencelengths 4
101
| document_score
stringlengths 3
10
| document_rank
stringclasses 102
values |
---|---|---|---|---|---|---|
Open an image file as a Pillow image object. | def handle_image_file(file: _FileLike) -> Image.Image:
try:
im = Image.open(file)
return im
except IOError as e:
raise HTTPException(500, detail=str(e)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def load_image(file_path):\r\n return Image.open(file_path)",
"def open_image(image_path, mode=\"RGB\"):\n print(\"Opening image file in '%s'.\" % image_path)\n return Image.open(image_path).convert(mode)",
"def open_image(filename):\n\n dataset = gdal.Open(filename, gdal.GA_ReadOnly)\n if dataset is None:\n raise IOError(\"cannot open %s\" % filename)\n\n return GdalImage(dataset, filename)",
"def open(self, infile, cache=True):\n return _image.image_open(self, infile, cache)",
"def hload_pil(filepath):\n img = Image.open(filepath)\n return img",
"def get_image(image_path):\r\n\r\n return Image.open(image_path)",
"def _openImage(self, fname):\n image = cv2.imread(fname,0)\n\n if(image != None):\n return image\n else:\n raise IOError, \"Image file can not be opened\"",
"def get_image_from_file(path):\n try:\n img = Image.open(path)\n return img\n except IOError as e:\n print e\n return None",
"def open_image(path, w, h, antialias=True) -> ImageTk.PhotoImage:\n image = Image.open(path)\n aliasing = Image.ANTIALIAS if antialias else Image.NEAREST\n return ImageTk.PhotoImage(image.resize((w, h), aliasing))",
"def load(path) -> Image:\n return Image.open(path)",
"def read_image(image_path):\n if not os.path.exists(image_path):\n raise IOError('File does not exist: %s' % image_path)\n else:\n return Image.open(image_path)",
"def _open_img(self, img_name):\n try:\n img = Image.open(img_name)\n photo = ImageTk.PhotoImage(img)\n return photo\n except IOError:\n Debug.printi(\"Unable to find image \" + img_name, Debug.Level.ERROR)",
"def read_image(path: str):\n return Image.open(path, mode=\"r\")",
"def load_image(path_to_image, image_name):\n print(\"Loading: \", path_to_image + image_name, \" ...\")\n return Image.open(path_to_image + image_name)",
"def _read_image_from_file(file_name):\n image_file = open(file_name, 'rb')\n image = image_file.read()\n image_file.close()\n return image",
"def load_image_file(filename, mode='RGB'):\n return imread(filename, mode=mode)",
"def _get_image_from_file(dir_path, image_file):\n # Save ourselves the effort if PIL is not present, and return None now\n if not PIL_ENABLED:\n return None\n # Put together full path\n path = os.path.join(dir_path, image_file)\n # Try to read the image\n img = None\n try:\n from PIL import Image\n img = Image.open(path)\n except IOError as exptn:\n print('Error loading image file %s: %s' % (path, exptn))\n # Return image or None\n return img",
"def open_image(self):\n self.orig_image = Image.open(self.filename)\n if self.in_rgb:\n self.orig_image = self.orig_image.convert(\"RGB\")\n if self.min_filter:\n self.orig_image.filter(ImageFilter.MinFilter(self.min_filter))",
"def load_pil_image(filename, cfg):\n try:\n img = Image.open(filename)\n if img.height != cfg.IMAGE_H or img.width != cfg.IMAGE_W:\n img = img.resize((cfg.IMAGE_W, cfg.IMAGE_H))\n\n if cfg.IMAGE_DEPTH == 1:\n img = img.convert('L')\n \n return img\n\n except Exception as e:\n logger.error(f'failed to load image from {filename}: {e.message}')\n return None",
"def opened_image(mocker):\n opened_image = mocker.patch(\"PIL.Image.open\")\n opened_image = opened_image.return_value.__enter__.return_value\n opened_image.size = (1, 1)\n opened_image.mode = \"L\"\n\n return opened_image",
"def read_image(path):\n img = misc.imread(path)\n return img",
"def read_image(image_path, *args, **kwargs):\n # TODO: Implement the method\n image2 = Image.open(image_path)\n image = num.asarray(image2)\n\n return image",
"def LoadPicture(filename):\n return Bitmap(filename)",
"def _image(filename):\n return TK.PhotoImage(file=filename)",
"def pil_loader(path):\n with open(path, 'rb') as f:\n with Image.open(f) as img:\n sqrWidth = np.ceil(np.sqrt(img.size[0]*img.size[1])).astype(int)\n return img.convert('L').resize((sqrWidth, sqrWidth))",
"def newimagefromfile(self, infile):\n return _image.image_newimagefromfile(self, infile)",
"def from_file(filename):\n try:\n img = opencv.imread(filename, opencv.IMREAD_UNCHANGED)\n except:\n log = logging.getLogger(\".\".join([__name__]))\n log.info(\"check file path\")\n img = None\n \n return Image(img)",
"def load_pil_image_from_file(file_path: str) -> (Image, np.array):\n img = Image.open(file_path)\n img.load()\n rgb_image = img.convert('RGB')\n\n return rgb_image, np.asarray(rgb_image, dtype=\"int32\")",
"def _open_image(self, path):\n return cv.imread(path, 1)\n # .astype(float)",
"def load(cls, path):\n assert os.path.exists(path), \"No such file: %r\" % path\n\n (folder, filename) = os.path.split(path)\n (name, extension) = os.path.splitext(filename)\n\n image = Image(None)\n image._path = path\n image._format = Image.image_format(extension)\n\n return image",
"def get_image(path):\n\n # Check if the picture exists or not.\n if not os.path.isfile(path):\n print('Cannot open the image. Please try again!')\n exit(1)\n\n try:\n # Open the image.\n image = Image.open(path)\n\n # If everything is okay return it.\n return image\n # If an error occurred.\n except Exception as err:\n print('Error occurred while trying to open the image:', err, 'Please try again!')\n exit(1)",
"def open_image(self, filename):\n return np.array(self.ds['test'].load_image(filename))",
"def image(self, label, fname):\n if not os.path.exists(fname):\n raise OptionError(\"%s - no such file or directory\" % label)\n try:\n return Image(fname)\n except:\n raise OptionError(\"%s - invalid image file\" % label)",
"def load_image(self, path, target_size=None):\n img = self.pil_image.open(path)\n if img.mode != 'RGB':\n img = img.convert('RGB')\n if target_size is not None:\n width_height_tuple = (target_size[1], target_size[0])\n if img.size != width_height_tuple:\n img = img.resize(width_height_tuple, self.pil_interpolation)\n return img",
"def imread(path):\n with open(path, 'rb') as f:\n with PIL.Image.open(f) as img:\n return img.convert('RGB')",
"def import_image(self, file: str) -> Any:\n pass",
"def read_image(img_path):\n img = imageio.imread(uri=img_path)\n return img",
"def image(self) -> PIL.Image.Image:\n try:\n data = io.BytesIO(self.data)\n return PIL.Image.open(data)\n except Exception: # Image data is incorrect, fix as a simple transparent image\n return PIL.Image.new('RGBA', Image.MAX_IMAGE_SIZE)",
"def load_image(cls, fullname):\n\t\ttry:\n\t\t\timage_stream = open(fullname, 'rb')\n\t\t\timage = pyglet.image.load(fullname, file=image_stream)\n\t\texcept IOError, message:\n\t\t\tprint 'Cannot load image:', fullname\n\t\t\traise ImageLoadFileIOError, message\n\t\treturn image",
"def load_image(fname):\n return load_tiff(fname)",
"def load_image(filename):\n with open(filename, 'rb') as img_handle:\n img = Image.open(img_handle)\n img_data = img.getdata()\n if img.mode.startswith('RGB'):\n pixels = [round(.299 * p[0] + .587 * p[1] + .114 * p[2])\n for p in img_data]\n elif img.mode == 'LA':\n pixels = [p[0] for p in img_data]\n elif img.mode == 'L':\n pixels = list(img_data)\n else:\n raise ValueError('Unsupported image mode: %r' % img.mode)\n w, h = img.size\n return {'height': h, 'width': w, 'pixels': pixels}",
"def load_image(filename):\n with open(filename, 'rb') as img_handle:\n img = Image.open(img_handle)\n img_data = img.getdata()\n if img.mode.startswith('RGB'):\n pixels = [round(.299 * p[0] + .587 * p[1] + .114 * p[2])\n for p in img_data]\n elif img.mode == 'LA':\n pixels = [p[0] for p in img_data]\n elif img.mode == 'L':\n pixels = list(img_data)\n else:\n raise ValueError('Unsupported image mode: %r' % img.mode)\n w, h = img.size\n return {'height': h, 'width': w, 'pixels': pixels}",
"def imread(fname):\r\n return skimage.io.imread(fname)",
"def _load_image(file: str) -> pyglet.image.AbstractImage:\n\n return pyglet.image.load(Config.RES_DIR + \"img\" + Config.FILE_SEPARATOR + file)",
"def read_image(filename):\n img = Image.open(filename)\n im = np.array(img)\n return im",
"def load_image(data_dir, image_file):\n image_path = os.path.join(data_dir, image_file)\n image = mpimg.imread(image_path)\n return image",
"def image(self, src=None, **kw):\n if src:\n kw['src'] = src\n return self._open('img', **kw)",
"def load_image(filename, size=None, scale=None):\n img = Image.open(filename)\n if size is not None:\n img = img.resize((size, size), Image.ANTIALIAS)\n elif scale is not None:\n img = img.resize((int(img.size[0]*scale), int(img.size[1]*scale)), Image.ANTIALIAS)\n return img",
"def load_image(image_path):\n image = io.imread(image_path)\n io.imshow(image)\n io.show()\n print(\"Size of the image is {} KB\".format(round(os.path.getsize(image_path)/1024,2)))\n return image",
"def read_image(url):\n f = urllib2.urlopen(url)\n img = StringIO(f.read())\n return Image.open(img)",
"def read_image(filepath, format=None):\n image = Image.open(filepath)\n\n # capture and ignore this bug:\n # https://github.com/python-pillow/Pillow/issues/3973\n try:\n image = ImageOps.exif_transpose(image)\n except Exception:\n pass\n\n if format is not None:\n # PIL only supports RGB, so convert to RGB and flip channels over below\n conversion_format = format\n if format == \"BGR\":\n conversion_format = \"RGB\"\n image = image.convert(conversion_format)\n image = np.asarray(image)\n if format == \"BGR\":\n # flip channels if needed\n image = image[:, :, ::-1]\n # PIL squeezes out the channel dimension for \"L\", so make it HWC\n if format == \"L\":\n image = np.expand_dims(image, -1)\n return image",
"def load_image_file(file, mode='RGB'):\n im = PIL.Image.open(file)\n if mode:\n im = im.convert(mode)\n return np.array(im)",
"def load_image(filename):\n rgb = imread(filename)\n return UncertainImage(rgb)",
"def image(fname):\n return cv2.imread(fname)",
"def create_img_object(file, scale):\n with Raw(filename=file) as raw:\n img = Image.open(io.BytesIO(raw.thumbnail_to_buffer())).convert('RGBA')\n resize_dims = list(map(int, (i * scale for i in img.size)))\n img = img.resize(resize_dims)\n return img",
"def download_pil_image(self, url):\r\n return Image.open(urlopen(url))",
"def read_image(filename):\n\n from matplotlib.image import pil_to_array\n\n with Image.open(filename) as image:\n return pil_to_array(image)",
"def import_image(self, imfile):\n img = self._load_image(imfile)\n img = self._trim_margins(img)\n self._check_size(img)\n return img",
"def load_image(filename):\r\n \r\n # Load the file\r\n print \"INFO: Loading Image: \" +str(filename)\r\n image = Image.open(filename)\r\n pixels = image.load()\r\n print \"INFO: Image loaded.\"\r\n \r\n return (image, pixels)",
"def _load_image(self, imfile):\n im = Image.open(str(imfile))\n dpi_ratio = num.true_divide(self.expected_dpi, num.array(im.info['dpi']))\n newsize = (num.array(im.size) * dpi_ratio).astype('i')\n if not all(newsize == num.array(im.size)):\n im = im.resize(newsize, Image.BICUBIC)\n img = num.array(im.convert('L')) # convert to greyscale array 0-255\n return img",
"def load_image_file(file, mode='RGB'):\n\tim = PIL.Image.open(file)\n\tif mode:\n\t\tim = im.convert(mode)\n\treturn np.array(im)",
"def _openFlt(self, fname):\n image = np.loadtxt(fname)\n\n if(image !=None):\n M,N=(int(image[0]), int(image[1]))\n image = image[2:image.shape[0]]\n image = image.reshape((M,N))\n else:\n raise IOError, \"Image file can not be opened\"\n\n return image",
"def open_image(name):\n img_name = 'input/' + name + '.png'\n return cv2.imread(img_name, cv2.IMREAD_UNCHANGED)",
"def load_image(infilename):\n data = mpimg.imread(infilename)\n return data",
"def load(image_path):\n\tpil_image = Image.open(image_path).convert(\"RGB\")\n\t# convert to BGR format\n\timage = np.array(pil_image)[:, :, [2, 1, 0]]\n\treturn image",
"def file(self):\n # ImageField (both django's and factory_boy's) require PIL.\n # Try to import it along one of its known installation paths.\n try:\n from PIL import Image as PILimage\n except ImportError:\n import Image as PILimage\n\n thumb = PILimage.new(\"RGB\", (100, 100), \"blue\")\n thumb_io = io.BytesIO()\n thumb.save(thumb_io, format=\"JPEG\")\n\n return File(thumb_io, name=self.original_filename)",
"def open_image(infile):\n with fits.open(infile) as f:\n header = f[0].header\n data = f[0].data\n if data.ndim == 2:\n # NAXIS=2: [Y, X]\n image = data\n elif data.ndim == 3 and data.shape[0] == 1:\n # NAXIS=3: [FREQ=1, Y, X]\n image = data[0, :, :]\n elif data.ndim == 4 and data.shape[0] == 1 and data.shape[1] == 1:\n # NAXIS=4: [STOKES=1, FREQ=1, Y, X]\n image = data[0, 0, :, :]\n else:\n raise ValueError(\"Slice '{0}' has invalid dimensions: {1}\".format(\n infile, data.shape))\n return (header, image)",
"def get_image(filename):\n if has_PIL or force_PNG:\n filename = filename + '.png'\n else:\n filename = filename + '.gif'\n if has_PIL:\n #pylint:disable=maybe-no-member\n return ImageTk.PhotoImage(Image.open(filename))\n else:\n return PhotoImage(file=filename)",
"def read_image(image_file_path: str):\n\n pixels = numpy.array(Image.open(image_file_path))\n\n return pixels",
"def imread(filename, *args, **kwargs):\r\n try:\r\n netpbm = NetpbmFile(filename)\r\n image = netpbm.asarray()\r\n finally:\r\n netpbm.close()\r\n return image",
"def load_image(self):\n try:\n return Image.open(self._path, 'r')\n except IOError:\n messagebox.showerror(\"Error\", \"Wrong sprite file path!\")",
"def read_image(img_path):\n got_img = False\n if not osp.exists(img_path):\n raise IOError(\"{} does not exist\".format(img_path))\n while not got_img:\n try:\n img = Image.open(img_path).convert('RGB')\n got_img = True\n except IOError:\n print(\"IOError incurred when reading '{}'. Will redo. Don't worry. Just chill.\".format(img_path))\n pass\n return img",
"def from_url(self) -> PngImagePlugin.PngImageFile:\n response = requests.get(self.url)\n img = Image.open(BytesIO(response.content))\n\n return img",
"def show_image(file_location):\n img = Image.open(file_location)\n img.show()",
"def __load_image_from_file(self, file):\n if isinstance(file, basestring): # file is a path\n path = file\n self.__ext = os.path.splitext(path)[1]\n self._content_type = self.__image_ext_content_type(self.__ext)\n with open(path, 'rb') as f:\n self._load_blob = f.read()\n else: # assume file is a file-like object\n self.__ext = self.__ext_from_image_stream(file)\n self._content_type = self.__image_ext_content_type(self.__ext)\n file.seek(0)\n self._load_blob = file.read()",
"def load_image(self):\n if isinstance(self.filename, str):\n self.image = np.asarray(PIL.Image.open(self.filename))\n elif isinstance(self.filename, np.ndarray):\n self.image = np.asarray(self.filename)\n if self.image.ndim < 3:\n self.bw = True\n if self.image.ndim < 2:\n self.image = None\n print(\"file {} is not an appropriate format.\".format(\n self.filename))\n if self.image.ndim == 3:\n if self.image.shape[-1] == 1:\n self.image = np.squeeze(self.image)\n elif self.image.shape[-1] > 3:\n self.image = self.image[..., :-1]\n if (self.image[..., 0] == self.image.mean(-1)).mean() == 1:\n self.image = self.image[..., 0]\n self.bw = True\n return self.image",
"def imread(file, as_pil=False, resize=None, to_rgb=False):\n # Read an example image as a numpy array.\n if is_url(file):\n hdr = {\n 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 '\n '(KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.'\n '11',\n 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*'\n '/*;q=0.8',\n 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',\n 'Accept-Encoding': 'none',\n 'Accept-Language': 'en-US,en;q=0.8',\n 'Connection': 'keep-alive'\n }\n req = urllib.request.Request(file, headers=hdr)\n file = urllib.request.urlopen(req)\n\n img = Image.open(file)\n if to_rgb:\n img = img.convert('RGB')\n if resize is not None:\n if not isinstance(resize, tuple) and not isinstance(resize, list):\n scale = float(resize) / float(min(img.size[0], img.size[1]))\n resize = [round(scale * h) for h in img.size]\n if resize != img.size:\n img = img.resize(resize, Image.ANTIALIAS)\n if as_pil:\n return img\n return pil_to_tensor(img)",
"def read_image(img_path):\n got_img = False\n if not osp.exists(img_path):\n raise IOError(\"{} does not exist\".format(img_path))\n while not got_img:\n try:\n img = Image.open(img_path).convert('RGB')\n got_img = True\n except IOError:\n from ipdb import set_trace; set_trace()\n print(\"IOError incurred when reading '{}'. Will redo. Don't worry. Just chill.\".format(img_path))\n pass\n return img",
"def image(self, path):\n im = Image.open(path).convert(\"RGB\")\n # Convert the RGB image in printable image\n self._convert_and_print_image(im)",
"def load_image(file):\n\n\tfile = os.path.join(DIR_MENU_PICTURES, file)\n\ttry:\n\t\tsurface = pygame.image.load(file)\n\texcept pygame.error:\n\t\terror = \"Could not load image \\\"%s\\\" %s\"%(file, pygame.get_error())\n\t\traise SystemExit(error)\n\treturn surface.convert()",
"def load(f, as_grey=False):\n use_plugin('pil')\n return imread(os.path.join(assets, f), as_grey=as_grey)",
"def openTifImage(image_path):\n assert image_path.lower().endswith(('.tif','.tiff')), \"Image is not a TIF file\"\n return tifffile.imread(image_path)",
"def open(self, img_name, size=\"default\"):\n print(\"Openning %s\" % img_name)\n self.img_original = Image.open(img_name, mode='r')\n self.img_name = img_name\n\n if size == \"default\":\n size = self.img_original.size[0]\n\n self.img_debut = self.img_resize(size)\n return self.img_debut",
"def load_image(img_file, as_float=False):\n if hasattr(img_file, 'read'):\n pil_img = Image.open(img_file)\n else:\n with open(img_file, 'rb') as f:\n pil_img = Image.open(f)\n pil_img.load()\n return pil_to_npa(pil_img, as_float=as_float)",
"def load_image_file_like(self, file_like_obj, colorkey=None): # -> image\n raise NotImplementedError(u'This should be implemented in a inherited class')",
"def load_image_file_like(self, file_like_obj, colorkey=None): # -> image\n raise NotImplementedError(u'This should be implemented in a inherited class')",
"def _load_image(path):\r\n image = Image.open(path)\r\n size = image.size\r\n \r\n image = image.resize((550,550), Image.ANTIALIAS)\r\n# image = image.thumbnail((200,200), Image.ANTIALIAS)\r\n return image",
"def get_image_by_path(image_path, target_size=None):\n img = image.load_img(image_path, target_size=target_size)\n return img",
"def onclick_open_image(self):\n filename = select_file(\n \"Select Image\",\n \"../\",\n \"Image Files (*.jpeg *.jpg *.png *.gif *.bmg)\")\n if filename:\n param_name = select_file(\n \"Select Parameter\", \"../\", \"Parameter Files (*.json)\")\n if param_name:\n self.moildev = Moildev(param_name)\n self.image = read_image(filename)\n self.h, self.w = self.image.shape[:2]\n self.show_to_window()",
"def load(filepath):\n canvas = Canvas(100, 100)\n canvas.img = PIL.Image.open(filepath)\n if not canvas.img.mode in (\"RGB\",\"RGBA\"):\n canvas.img = canvas.img.convert(\"RGBA\")\n canvas.drawer = aggdraw.Draw(canvas.img)\n canvas.pixel_space()\n return canvas",
"def load_image(self, filename):\n return pygame.image.load(os.path.join('images', filename))",
"def image(self):\n # TODO: make sure this method works for png, gif, tiff\n if self.has_metadata:\n self.extract_metadata()\n tempdir_path = self.make_tempdir()\n tempfile_path = os.path.join(tempdir_path, self.filename)\n warnings.simplefilter('error', Image.DecompressionBombWarning)\n try: # Do image conversions\n img_in = Image.open(self.src_path)\n img_out = Image.frombytes(img_in.mode, img_in.size, img_in.tobytes())\n img_out.save(tempfile_path)\n self.src_path = tempfile_path\n except Exception as e: # Catch decompression bombs\n # TODO: change this from all Exceptions to specific DecompressionBombWarning\n self.add_error(e, \"Caught exception (possible decompression bomb?) while translating file {}.\".format(self.src_path))\n self.make_dangerous()\n self.add_file_string('Image file')\n self.set_property('processing_type', 'image')",
"def get_image(path, width=1*cm):\r\n \r\n img = utils.ImageReader(path)\r\n iw, ih = img.getSize()\r\n aspect = ih / float(iw)\r\n return Image(path, width=width, height=(width * aspect))",
"def load_from_file(self, filename):\n\n loader = ImageLoader()\n loader.load(self, filename)",
"def image(self):\r\n\r\n if sys.version < '3':\r\n imageio = StringIO.StringIO(self._image_data)\r\n else:\r\n imageio = StringIO.BytesIO(self._image_data)\r\n\r\n try:\r\n source_image = PILImage.open(imageio)\r\n img = PILImage.new('RGBA', source_image.size, (0, 0, 0, 0))\r\n\r\n if source_image.mode == 'L':\r\n alpha = source_image.split()[0]\r\n transparency = source_image.info.get('transparency')\r\n mask = PILImage.eval(alpha, lambda a: 0 if a == transparency else 255)\r\n img.paste(source_image, (0, 0), mask=mask)\r\n else:\r\n img.paste(source_image, (0, 0))\r\n except IOError, e:\r\n raise PILUnavailableError(e.args[0].split()[1])\r\n finally:\r\n imageio.close()\r\n\r\n self.original_width, self.original_height = img.size\r\n\r\n # Crop the image searching for the smallest possible bounding box\r\n # without losing any non-transparent pixel.\r\n # This crop is only used if the crop flag is set in the config.\r\n if self.config['crop']:\r\n img = img.crop(img.split()[-1].getbbox())\r\n return img",
"def read_img(img_path:str) -> object:\n img = cv2.imread(img_path)\n return img",
"def load_image(img_file_name):\n file_name = os.path.join('.', 'images', img_file_name)\n img = pygame.image.load(file_name)\n img.convert()\n return img",
"def read_image(img_path):\n got_img = False\n if not os.path.exists(img_path):\n raise IOError(\"{} does not exist\".format(img_path))\n while not got_img:\n try:\n img = Image.open(img_path).convert('RGB')\n got_img = True\n except IOError:\n print(\"IOError incurred when reading '{}'. Will redo. Don't worry. Just chill.\".format(img_path))\n pass\n return img",
"def open(self, filepath=None):\n if filepath is None:\n filepath, dummy = QFileDialog.getOpenFileName(self, \"Open image file.\")\n if len(filepath) and os.path.isfile(filepath):\n image = QImage(filepath)\n self.setImage(image)",
"def openFile(self):\r\n from SXM import FileIO,Data\r\n fname = str(QFileDialog.getOpenFileName(self.widget,self.tr(\"Open File\"), \\\r\n \".\",FileIO.getFilterString(types=(Data.Image,))))\r\n if len(fname) > 0:\r\n root, ext = os.path.splitext(fname)\r\n self.statusBar().showMessage(self.tr(\"Loading data: %1\").arg(fname),2000)\r\n image = FileIO.fromFile(fname)\r\n image.load()\r\n imwin = ImageWindow(self,image)\r\n self.Images.append(imwin)\r\n self.updateImageList()\r\n imwin.windowModality = False\r\n imwin.show()"
] | [
"0.759027",
"0.73065186",
"0.718372",
"0.7101412",
"0.70892704",
"0.6963195",
"0.694717",
"0.69382864",
"0.68608856",
"0.6837533",
"0.6731151",
"0.6718459",
"0.6677456",
"0.66101444",
"0.65950394",
"0.65664005",
"0.6510118",
"0.6501636",
"0.6369527",
"0.63234633",
"0.6295427",
"0.62756",
"0.6267828",
"0.6256748",
"0.62241066",
"0.6218278",
"0.62177414",
"0.6197095",
"0.6193642",
"0.61826485",
"0.6181741",
"0.6167356",
"0.61609775",
"0.6156089",
"0.6147879",
"0.6127528",
"0.6124253",
"0.6118818",
"0.6109618",
"0.61023897",
"0.6092976",
"0.6092976",
"0.6079174",
"0.6075413",
"0.606688",
"0.606614",
"0.60531616",
"0.6046875",
"0.6042321",
"0.6042256",
"0.6040866",
"0.60335785",
"0.6030216",
"0.6023939",
"0.60205096",
"0.60063756",
"0.5964779",
"0.5962141",
"0.5961289",
"0.59604645",
"0.59603",
"0.5957744",
"0.5954756",
"0.5945115",
"0.5939457",
"0.59390765",
"0.59168917",
"0.5907715",
"0.5907036",
"0.5904371",
"0.5901624",
"0.5891547",
"0.58910275",
"0.58877563",
"0.58801484",
"0.58597165",
"0.58565503",
"0.58525074",
"0.58490384",
"0.5845557",
"0.5845047",
"0.58376414",
"0.5836188",
"0.583587",
"0.5834277",
"0.5834277",
"0.58322835",
"0.5823115",
"0.58141875",
"0.57970595",
"0.5789975",
"0.5780698",
"0.577605",
"0.5775046",
"0.576546",
"0.5762445",
"0.57518",
"0.574621",
"0.57457477",
"0.5743288"
] | 0.6875218 | 8 |
Fetch an image from a given URL. | def fetch_image(url: str) -> Image.Image:
r = httpx.get(url)
if not r.status_code == httpx.codes.OK:
raise HTTPException(r.status_code, detail=r.reason_phrase)
f = BytesIO(r.content)
im = handle_image_file(f)
return im | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fetch_image(img_url):\n\n r = requests.get(img_url)\n return r.content",
"def _download_img_from_url(self, img_url):\r\n response = requests.get(img_url)\r\n img = Image.open(BytesIO(response.content))\r\n print(\"Downloaded image from url\")\r\n return img",
"def getImage(url):\n response = requests.get(url)\n img = Image.open(BytesIO(response.content))\n return img",
"def get_image_by_url(url):\n retry_count = 0\n while True:\n try:\n req_headers = {\"User-Agent\": DEFAULT_REQUEST_UA}\n r = requests.get(\n url, headers=req_headers, stream=True, timeout=DEFAULT_REQUEST_TIMEOUT\n )\n image_data = r.content\n if isinstance(image_data, bytes):\n image_data = BytesIO(image_data)\n else:\n image_data = StringIO(image_data)\n\n im = Image.open(image_data)\n return im\n except Timeout as e:\n if retry_count <= DEFAULT_REQUEST_RETRY:\n continue\n else:\n raise e\n except Exception as e:\n logging.exception(e)\n raise RequestException(e)",
"def download_pil_image(self, url):\r\n return Image.open(urlopen(url))",
"def get_img_from_url(index, url):\n try:\n with urllib.request.urlopen(url) as response:\n if response.headers.get_content_maintype() == 'image':\n image_filename = image_filename_prefix.format(name=image_class_name,\n counter=index,\n ext=response.headers.get_content_subtype())\n image_filepath = os.path.join(target_folder, image_filename)\n with open(image_filepath, 'wb') as image_file:\n image_file.write(response.read())\n\n print('Fetched URL {}'.format(index))\n\n except urllib.request.HTTPError:\n pass\n except Exception:\n pass",
"def download_image(url):\n buffer = BytesIO()\n download_from_url(url, buffer, pbar=False)\n buffer.seek(0)\n return Image.open(buffer)",
"def read_image(url):\n f = urllib2.urlopen(url)\n img = StringIO(f.read())\n return Image.open(img)",
"def getOrDownloadImageObject(self, url):\n \n if \"//\" in url:\n return self.downloadImage(url)\n else:\n return self.getPILFromPath(url)",
"def get_image(self, url):\n\n log(\"Getting image {}\".format(url))\n response = requests.get(url)\n if response.status_code == 200:\n image = self._pilimg.open(io.BytesIO(response.content))\n return image.convert('RGBA')\n return None",
"def urlToImage(url):\n\n response = requests.get(url)\n image = Image.open(BytesIO(response.content))\n return image",
"def downloadImage(self, url):\n req = urllib2.Request(url)\n response = urllib2.urlopen(req)\n data = response.read()\n io = cStringIO.StringIO(data)\n return PIL.Image.open(io)",
"def _url_to_image(url: str) -> Image.Image:\n assert url.lower().startswith(\"http\"), \"invalid url, must start with http\"\n content = requests.get(url).content\n image = Image.open(BytesIO(content))\n return image",
"def download_image(url):\n request = urllib.request.Request(\n url, headers={'Authorization': 'Bearer %s' % ACCESS_TOKEN})\n return urllib.request.urlopen(request).read()",
"def GET(self, url):\n try:\n f = open(url, 'r')\n image = f.read()\n f.close()\n except:\n\n db_module.resave_img(url[5:])\n\n f = open(url, 'r')\n image = f.read()\n f.close()\n\n return image",
"def get_image(url, path):\n r = requests.get(url, stream=True)\n if r.status_code == 200:\n with open(path, 'wb') as f:\n r.raw.decode_content = True\n shutil.copyfileobj(r.raw, f)\n print(\"[>] get\", url, \">>\", path)\n f.close()",
"def download_image(url):\n request = urllib.request.Request(url, headers={'Authorization': 'Bearer %s' % BOT_TOKEN})\n return urllib.request.urlopen(request).read()",
"async def get_image(session, url):\n async with session.get(url) as resp:\n if resp.status != 200:\n logging.error(f'response status != 200 for image {url}')\n return None\n return await resp.read()",
"def joblib_read_img_url(url):\n\n from matplotlib.image import imread\n fd = urlopen(url, timeout=10)\n return imread(io.BytesIO(fd.read()))",
"def from_url(self) -> PngImagePlugin.PngImageFile:\n response = requests.get(self.url)\n img = Image.open(BytesIO(response.content))\n\n return img",
"def load_remote_image(image_url):\n response = requests.get(image_url, stream=True)\n img = Image.open(BytesIO(response.content))\n image = cv2.cvtColor(np.asarray(img), cv2.COLOR_RGB2BGR)\n return image",
"def getResponseFromHttpRequest(url):\n try:\n response = HTTP.Request(url, headers = {'User-agent': USER_AGENT, 'Accept': 'image/jpeg'})\n return response\n except:\n Log.Debug('Error fetching URL: \"%s\".' % url)\n return None",
"def set_image_from_url(self, url: str):\n response = httpx.get(url)\n if response.status_code == 200:\n file = ContentFile(response.content)\n file.name = \"url-\" + shortuuid.uuid()\n self.image = file\n self.save()",
"def get_image(\n url: str\n) -> Union[Dict[str, Union[int, str, BytesIO, None]], None]:\n try:\n logger.info('downloading image: %s', url)\n r = requests.get(url)\n\n if r.status_code == 200:\n\n # loading binary data to mem\n img = BytesIO(r.content)\n\n # loading image to PIL\n pil_img = Image.open(img)\n\n # seek to 0\n img.seek(0)\n\n return {\n 'content-type': r.headers.get('Content-Type'),\n 'image': img,\n 'width': pil_img.width,\n 'height': pil_img.height,\n }\n\n raise Exception('wrong status code %s', r.status_code)\n\n except BaseException as e:\n logger.error('could not download and analyze img: %s', str(e))\n\n return None",
"def download_image(url, filename):\n r = requests.get(url)\n open(filename, 'wb').write(r.content)",
"def download_image(filename):\n return ImageApiHandler.image_handler.get(filename)",
"def url2img(url : str, timeout = 1) -> Image:\n\n response = requests.get(url, timeout = timeout)\n return Image.open(BytesIO(response.content))",
"def download(self, url):\n req = self.request(url)\n inputfile, outputfile = BytesIO(urlopen(req).read()), BytesIO()\n\n img = Image.open(inputfile)\n img = img.convert(\"RGB\") if img.mode != \"RGB\" else img\n img.thumbnail((192, 192), Image.ANTIALIAS)\n img.save(outputfile, \"JPEG\")\n\n self.image.save(os.path.basename(\n self._clean_url(url)),\n ContentFile(outputfile.getvalue()),\n save=False,\n )",
"def fetchImgOrDir(url, verboseLogs):\n try:\n resp = urllib.request.urlopen(url)\n except Exception as e:\n if verboseLogs:\n logging.error('Result of fetch from %s: %s', url, str(e))\n return (None, None)\n if resp.getheader('content-type') == 'image/jpeg':\n return ('img', resp)\n else:\n return ('dir', resp)",
"def get_content(url):\n img=requests.get(url).content\n return img",
"def _import_image_by_url(self, url, session, field, line_number):\n maxsize = int(config.get(\"import_image_maxbytes\", DEFAULT_IMAGE_MAXBYTES))\n try:\n response = session.get(url, timeout=int(config.get(\"import_image_timeout\", DEFAULT_IMAGE_TIMEOUT)))\n response.raise_for_status()\n\n if response.headers.get('Content-Length') and int(response.headers['Content-Length']) > maxsize:\n raise ValueError(_(\"File size exceeds configured maximum (%s bytes)\") % maxsize)\n\n content = bytearray()\n for chunk in response.iter_content(DEFAULT_IMAGE_CHUNK_SIZE):\n content += chunk\n if len(content) > maxsize:\n raise ValueError(_(\"File size exceeds configured maximum (%s bytes)\") % maxsize)\n\n image = Image.open(io.BytesIO(content))\n w, h = image.size\n if w * h > 42e6: # Nokia Lumia 1020 photo resolution\n raise ValueError(\n u\"Image size excessive, imported images must be smaller \"\n u\"than 42 million pixel\")\n\n return base64.b64encode(content)\n except Exception as e:\n raise ValueError(_(\"Could not retrieve URL: %(url)s [%(field_name)s: L%(line_number)d]: %(error)s\") % {\n 'url': url,\n 'field_name': field,\n 'line_number': line_number + 1,\n 'error': e\n })",
"def download(self):\n data = urllib.urlopen(self.remoteurl).read()\n s = StringIO.StringIO(data)\n return Image.open(s)",
"def download_image(url, img_path):\n img_data = requests.get(url).content\n with open(img_path, 'wb') as file:\n file.write(img_data)\n img_path = os.path.abspath(img_path)\n return img_path",
"def download_image(self, url):\r\n file_path = os.path.join(self.temp_dir, 'image.png')\r\n urlretrieve(url, file_path)\r\n return file_path",
"def load(url):\n response = requests.get(url)\n pil_image = Image.open(BytesIO(response.content)).convert(\"RGB\")\n # convert to BGR format\n image = np.array(pil_image)[:, :, [2, 1, 0]]\n return image",
"def image_downloader(url, file_path, file_name):\n response = requests.get(url, stream=True)\n with open(file_path + \"/\" + file_name, 'wb') as out_file:\n shutil.copyfileobj(response.raw, out_file)",
"def download_single(data):\n url = data[0]\n image_id = data[1]\n target_path = data[2]\n\n if os.path.exists(target_path):\n return\n\n try:\n response = requests.get(url, timeout=30)\n response.raise_for_status()\n except:\n LOGGER.warning('Failed to fetch url %s (id=%d)', url, image_id)\n return\n\n try:\n content = response.content\n image = Image.open(BytesIO(content))\n except:\n LOGGER.warning('Failed to capture image at url %s (id=%d)', url, image_id)\n return\n\n if not image.format == 'JPEG':\n try:\n image = image.convert('RGB')\n except:\n logging.warning('Failed to convert RGB, %s (id=%d)', url, image_id)\n return\n\n try:\n image.save(target_path, format='JPEG', quality=100)\n except:\n LOGGER.warning('Failed to save url %s (id=%d)', url, image_id)\n return\n\n return",
"def download_img(url,name):\n resp = download(url)\n if (resp!=None):\n image = np.asarray(bytearray(resp), dtype=\"uint8\")\n image = cv2.imdecode(image, cv2.IMREAD_COLOR)\n cv2.imwrite(name,image)\n return",
"def imageGet(soup):\n img = soup.find('img', class_='a-hidden')\n img = str(img)\n imgURL = re.findall('https?://.+jpg', img)\n response = requests.get(imgURL[0])\n photo = Image.open(BytesIO(response.content))\n img = imgURL[0]\n\n return img",
"def process_image_url(self, image_url):\n image = image_util.load_image_from_url(image_url)\n return self.process_image(image)",
"def get_tile(url):\n hash_name = hashlib.md5(url.encode(\"utf-16\")).hexdigest()\n fname = hash_name + \".jpeg\"\n print(\"Checking tile\" + fname)\n #if image is already downloaded, return it\n if os.path.isfile(fname):\n print(\"Downloaded!\")\n try:\n # image was fully downloaded, good to return\n return Image.open(fname) \n except Exception:\n print(\"Tile is corrupt :(\")\n # file is corrupted for some reason, so try to download it\n pass\n print(\"Downloading \" + fname)\n req.urlretrieve(url, fname) \n return Image.open(fname)",
"def download_img(self, url, output):\n try:\n print(\"Downloading from: %s\" % url)\n with open(output, 'wb') as f:\n f.write(urllib2.urlopen(url).read())\n print(\"Wrote to: %s\" % output)\n except IOError, e:\n print(e)",
"def _get_image_content(image_url):\n response = requests.get(image_url)\n return response.content",
"def dl_image(img_name, img_url):\n path = os.path.join(base_path, img_name)\n res = requests.get(img_url)\n with open(path, 'wb') as fout:\n fout.write(res.content)",
"def download_image(url, dest):\n logging.info('Downloading {} into {}'.format(url, dest))\n dest = um.join_paths(dest, url.split('/')[-1])\n response = requests.get(url)\n if um.is_image_response(response):\n with open(dest, 'wb') as f:\n f.write(response.content)",
"def get_img_src(url):\n res = requests.get(urljoin(BASE_URL, url))\n soup = BeautifulSoup(res.text, \"html.parser\")\n img = soup.find(\"img\")\n if not img:\n raise ValueError(\"No <img> found on page\")\n return img[\"src\"]",
"def get_image_from_uri(cache, url_fetcher, options, url, forced_mime_type=None,\n context=None, orientation='from-image'):\n if url in cache:\n return cache[url]\n\n try:\n with fetch(url_fetcher, url) as result:\n parsed_url = urlparse(result.get('redirected_url'))\n if parsed_url.scheme == 'file':\n filename = url2pathname(parsed_url.path)\n else:\n filename = None\n if 'string' in result:\n string = result['string']\n else:\n string = result['file_obj'].read()\n mime_type = forced_mime_type or result['mime_type']\n\n image = None\n svg_exceptions = []\n # Try to rely on given mimetype for SVG\n if mime_type == 'image/svg+xml':\n try:\n tree = ElementTree.fromstring(string)\n image = SVGImage(tree, url, url_fetcher, context)\n except Exception as svg_exception:\n svg_exceptions.append(svg_exception)\n # Try pillow for raster images, or for failing SVG\n if image is None:\n try:\n pillow_image = Image.open(BytesIO(string))\n except Exception as raster_exception:\n if mime_type == 'image/svg+xml':\n # Tried SVGImage then Pillow for a SVG, abort\n raise ImageLoadingError.from_exception(svg_exceptions[0])\n try:\n # Last chance, try SVG\n tree = ElementTree.fromstring(string)\n image = SVGImage(tree, url, url_fetcher, context)\n except Exception:\n # Tried Pillow then SVGImage for a raster, abort\n raise ImageLoadingError.from_exception(raster_exception)\n else:\n # Store image id to enable cache in Stream.add_image\n image_id = md5(url.encode()).hexdigest()\n image = RasterImage(\n pillow_image, image_id, string, filename, cache,\n orientation, options)\n\n except (URLFetchingError, ImageLoadingError) as exception:\n LOGGER.error('Failed to load image at %r: %s', url, exception)\n image = None\n\n cache[url] = image\n return image",
"async def dl_image(url, filename):\n\ttry:\n\t\twith aiohttp.ClientSession() as session:\n\t\t\tasync with session.get(url) as resp:\n\t\t\t\ttest = await resp.read()\n\t\t\t\twith open('data/tmp/'+filename.lower(), \"wb\") as f:\n\t\t\t\t\tf.write(test)\n\t\t\t\treturn 0\n\texcept Exception as e:\n\t\tprint('[!ERROR!] in Get image')\n\t\tprint(e)\n\t\treturn -1",
"def download_image(filename, url):\n if not url:\n return url\n refresh_needed = False\n if xbmcvfs.exists(filename) and filename == url:\n # only overwrite if new image is different\n return filename\n else:\n if xbmcvfs.exists(filename):\n xbmcvfs.delete(filename)\n refresh_needed = True\n if xbmcvfs.copy(url, filename):\n if refresh_needed:\n refresh_image(filename)\n return filename\n\n return url",
"def get_image_from_camera(self, url):\n if DEBUG:\n print(\"[DEBUG] Getting image from BlueIris url: %s\" % url)\n\n resp = urllib.request.urlopen(url)\n image = np.asarray(bytearray(resp.read()), dtype=\"uint8\")\n image = cv2.imdecode(image, cv2.IMREAD_UNCHANGED)\n self.timestamp = time.time()\n self.trigger_image = image\n self.processed_image = image # Start off by having processed image same as initial image\n\n self._init_new_image()\n # if DEBUG:\n # # print(\"[DEBUG] [ImageFrame.get_image_from_camera] Image width: {}, height: {}\".format(\n # self.width, self.height))\n\n # return the image\n return self.trigger_image",
"def load_file_from_url(self, url: str) -> bytes:\n cached_content = self.cache_get(url)\n if cached_content is not None:\n return cached_content\n try:\n req = requests.get(url, timeout=self.requests_timeout)\n req.raise_for_status()\n content = req.content\n self.cache_set(url, content)\n except requests.RequestException as err:\n self.log_error(err)\n repl_content = self.get_replacement_file(url)\n if repl_content is None:\n raise ImageNotFound(err)\n content = repl_content\n return content",
"def get_image(result):\n article_id = result['id']\n id_ = article_id[14:]\n href = article_id[:14]\n\n #FIXME: not working\n image_url = \"http://www.jpress.nli.org.il/Olive/APA/NLI_heb/get/GetImage.ashx?kind=block&href=%s&id=%s&ext=.png\" %(href, id_)\n \n return image_url",
"def download_picture(url, filename):\n logger = logging.getLogger(\"steam.query.download_picture\")\n logger.debug(\"Downloading picture ({0}) ...\".format(filename))\n\n try:\n urllib.urlretrieve(url, filename)\n return True\n except:\n logger.error(\"Could not download picture {0} from {1}!\".format(filename, url))\n return False",
"def get_image(filename):\n\n client.download_file(S3_BUCKET, filename, 'uploads/{}'.format(filename))",
"async def get_url_images(session, url):\n content = await get_page(session, url)\n if not content:\n return []\n soup = BeautifulSoup(content, features=\"html.parser\")\n image_sources = [img['src'] for img in soup.find_all('img')]\n image_sources_fixed = [f'https:{source}' if 'https:' not in source else source for source in image_sources]\n images = []\n for source in image_sources_fixed:\n image = await get_image(session, source)\n if image:\n images.append((source, image))\n\n return images",
"def get_at_url(self, url):\n class NullDevice():\n def write(self, s):\n pass\n\n def get_gallery_item(id):\n \"\"\"\n Special helper method to get gallery items.\n\n The problem is that it's impossible to distinguish albums and\n images from each other based on the url. And there isn't a common\n url endpoints that return either a Gallery_album or a Gallery_image\n depending on what the id represents. So the only option is to\n assume it's a Gallery_image and if we get an exception then try\n Gallery_album. Gallery_image is attempted first because there is\n the most of them.\n \"\"\"\n try:\n # HACK: Problem is that send_request prints the error message\n # from Imgur when it encounters an error. This is nice because\n # this error message is more descriptive than just the status\n # code that Requests give. But since we first assume the id\n # belong to an image, it means we will get an error whenever\n # the id belongs to an album. The following code temporarily\n # disables stdout to avoid give a cryptic and incorrect error.\n\n # Code for disabling stdout is from\n # http://coreygoldberg.blogspot.dk/2009/05/\n # python-redirect-or-turn-off-stdout-and.html\n original_stdout = sys.stdout # keep a reference to STDOUT\n sys.stdout = NullDevice() # redirect the real STDOUT\n return self.get_gallery_image(id)\n # TODO: Add better error codes so I don't have to do a catch-all\n except Exception:\n return self.get_gallery_album(id)\n finally:\n sys.stdout = original_stdout # turn STDOUT back on\n\n if not self.is_imgur_url(url):\n return None\n\n objects = {'album': {'regex': \"a/(?P<id>[\\w.]*?)$\",\n 'method': self.get_album},\n 'comment': {'regex': \"gallery/\\w*/comment/(?P<id>[\\w.]*?)$\",\n 'method': self.get_comment},\n 'gallery': {'regex': \"(gallery|r/\\w*?)/(?P<id>[\\w.]*?)$\",\n 'method': get_gallery_item},\n # Valid image extensions: http://imgur.com/faq#types\n # All are between 3 and 4 chars long.\n 'image': {'regex': \"(?P<id>[\\w.]*?)(\\\\.\\w{3,4})?$\",\n 'method': self.get_image},\n 'user': {'regex': \"user/(?P<id>[\\w.]*?)$\",\n 'method': self.get_user}\n }\n parsed_url = urlparse(url)\n for obj_type, values in objects.items():\n regex_result = re.match('/' + values['regex'], parsed_url.path)\n if regex_result is not None:\n obj_id = regex_result.group('id')\n initial_object = values['method'](obj_id)\n if obj_type == 'image':\n try:\n # A better version might be to ping the url where the\n # gallery_image should be with a requests.head call. If\n # we get a 200 returned, then that means it exists and\n # this becomes less hacky.\n original_stdout = sys.stdout\n sys.stdout = NullDevice()\n if getattr(initial_object, 'section', None):\n sub = initial_object.section\n return self.get_subreddit_image(sub, obj_id)\n return self.get_gallery_image(obj_id)\n except Exception:\n pass\n finally:\n sys.stdout = original_stdout\n return initial_object",
"def FetchUrlContent(url):\n content = memcache.get(url)\n if content:\n return content\n\n request = urlfetch.fetch(url)\n\n if request.status_code == 200:\n content = request.content\n memcache.add(url, content, 60 * 60)\n return content\n\n raise LookupError('Unable to fetch URL. Response code: ' +\n str(request.status_code))",
"def __download_image_file(self):\n if not file_utils.file_exists(self.image_file_path):\n logger.info('Downloading Image from - ' + self.image_url)\n return file_utils.download(self.image_url, self.download_path)",
"def fetch(self, page, part):\n\n file = '/page-' + str(page)\n if part > 1:\n file += '.' + str(part)\n file += '.png'\n\n source = self.baseUrl + file\n destination = self.download\n\n no_problem_unlink(destination + '-small')\n no_problem_unlink(destination)\n\n image = http_get(source)\n if image.find('Not Found') == -1 and len(image) > 0:\n f = open(destination + '-small', 'w')\n f.write(image)\n f.close()\n\n if os.path.exists(destination + '-small') and os.path.getsize(destination + '-small') > 0:\n width = self.ORIGINAL_WIDTH * self.RESIZE_FACTOR\n height = self.ORIGINAL_HEIGHT * self.RESIZE_FACTOR\n resize_png(width, height, destination + '-small', destination)\n return destination\n else:\n return self.default",
"def fetch_url(self, url: str):\n log.debug(f\"Fetching {url}\")\n answer = self.session.get(url, timeout=self.timeout)\n answer.raise_for_status()\n\n # returning raw answer object, because due to redirects we may need to\n # double check answer.url to proceed\n return answer",
"def fetch_cover(self) -> None:\n self.cover_path = None\n if self.cover_url is None:\n Logger.Logger.log('No cover picture found for this song.')\n return\n Logger.Logger.log('Retrieving cover picture from iTunes...')\n url_hash: str = md5(self.cover_url.encode('utf-8')).hexdigest()\n filename: str = tempfile.gettempdir() + url_hash + '.jpg'\n try:\n request.urlretrieve(self.cover_url, filename)\n self.cover_path = filename\n except (HTTPError, TimeoutError) as ex:\n Logger.Logger.log_error(str(ex))\n Logger.Logger.log_error('Request failed for URL: ' + Utils.Utils.str(self.cover_url))\n self.cover_path = None",
"def download_image_from(link, directory, name):\n try:\n img_content = requests.get(link).content\n image_file = io.BytesIO(img_content)\n image = Image.open(image_file).convert('RGB')\n image.save(f'./{directory}/{name}.png', 'PNG', quality=100, subsampling=0)\n except:\n pass",
"def get_image(self, image_id):\n url = self.get_url(image_id)\n return image_util.load_image_from_url(url) if url else None",
"def download_image_and_save(image_url, destination):\n response = requests.get(image_url, stream=True)\n with open(destination, 'wb') as out_file:\n shutil.copyfileobj(response.raw, out_file)\n del response",
"def get_file(url):\n # Make request\n response = requests.get(url, stream=True)\n response.raise_for_status()\n # Read fits\n iofile = io.BytesIO(response.content)\n content_type = response.headers['Content-Type']\n if content_type == 'image/fits':\n obj = fits.open(iofile)\n else:\n raise Exception('Unknown content type: {0}.'.format(content_type))\n return obj",
"def get_image_url():",
"def get_url(url: str) -> Optional[str]:\n try:\n parsed = urlparse(url)\n except ValueError:\n return None\n\n if parsed.scheme in (\"file\", \"\"):\n return unquote(parsed.path)\n elif parsed.scheme in (\"http\", \"https\"):\n if url.startswith(\"https://open.spotify.com/image/\"):\n url = \"https://i.scdn.co/image/\" + url[len(\"https://open.spotify.com/image/\") :]\n\n name = hashlib.sha1(url.encode(\"utf-8\")).hexdigest()\n path = os.path.join(CACHE_PATH, name) + Path(parsed.path).suffix\n\n if os.path.isfile(path):\n info(f\"Already downloaded at {path}\")\n return path\n\n # Download the file to our cache. We should probably do this asynchronously,\n # but rely on the fact that the remote server is _probably_ fast enough.\n warning(f\"Downloading {url} -> {path}\")\n try:\n os.makedirs(CACHE_PATH, exist_ok=True)\n with urlopen(url) as read:\n with open(path, \"wb\") as write:\n while chunk := read.read(2048):\n write.write(chunk)\n\n return path\n except Exception as e:\n critical(\"Error getting image \" + str(e))\n\n try:\n os.remove(path)\n except:\n pass\n\n return None\n else:\n return None",
"def get_image():\n\n url = 'http://skyview.gsfc.nasa.gov/cgi-bin/images'\n params = dict(Position='%s,%s' % (source['ra'], source['dec']),\n Survey=source['survey'].val,\n Return='GIF')\n response = requests.get(url, params=params, stream=True)\n with open(files['image.gif'].rel, 'wb') as out_file:\n shutil.copyfileobj(response.raw, out_file)",
"def discover_image(url):\n LOG.info(\"Attempting to discover image for '{}'\"\n .format(url.encode('utf-8')))\n # hacky way to use urlparse to get favicon path\n parsed_url = urlparse.urlparse(url)\n favicon_url = urlparse.urlunparse((parsed_url.scheme, parsed_url.netloc,\n \"favicon.ico\", '', '', ''))\n try:\n response = requests.head(favicon_url)\n except requests.exceptions.RequestException:\n response = None\n\n if response:\n good_status = response.status_code == 200\n good_content_type = response.headers.get('content-type', '')\\\n .startswith('image/')\n good_content_size = int(response.headers.get('content-length', 0)) > 0\n if good_status and good_content_type and good_content_size:\n LOG.info(\"Image found at '{}'\".format(url.encode('utf-8')))\n return favicon_url\n\n LOG.info(\"No image found\")\n return None",
"def image_to_bytesio(url):\n if not url.startswith(\"http\"):\n url = \"http:\" + url\n resp = requests.get(url)\n if resp.status_code != requests.codes.ok:\n raise (Exception(\"Error getting image\"))\n return io.BytesIO(resp.content)",
"def fetch(url):\n content = requests.get(url).text\n if \"Error\" in content:\n raise ValueError(f\"Cannot read from: {url}\")\n return content",
"def save_image(filename: str, img_url: str) -> None:\n\n if not (os.path.isfile(filename)): # Check if the file already exists\n print('Downloading image {}...'.format(img_url))\n res = requests.get(img_url) # Download the image.\n res.raise_for_status()\n\n # Save the image\n image_file = open(filename, 'wb')\n for chunk in res.iter_content(100000):\n image_file.write(chunk)\n image_file.close()",
"def get_image(url, layer_name, check_blank=False):\n \n # get list of acceptable CRS' for the layer\n wms = WebMapService(url, version='1.3.0')\n crs_list = wms[layer_name].crsOptions\n print('Requesting these crs\\' %s' % crs_list)\n\n for crs in crs_list:\n params = get_params_and_bounding_box(url, layer_name, crs)\n resp = requests.get(url, params=params)\n print(\"The full URL request is '%s'\" % resp.url)\n \n # this should be 200\n print(\"The HTTP status code is: %i\" % resp.status_code)\n if resp.status_code != 200:\n raise SystemExit(resp.content)\n print('Status code OK')\n \n if resp.headers['content-type'] == 'image/png':\n if check_blank:\n # a PNG image was returned\n is_blank = check_blank(resp.content)\n if is_blank:\n raise SystemExit(\"A blank image was returned!\")\n else:\n print('Image data OK')\n else:\n # if there are errors then these can be printed out here\n raise SystemExit(resp.content)",
"def fetch(self, tag):\n return fetch_image(self.collection.client, tag)",
"def download_image(img_src, to_filename):\n res = requests.get(urljoin(BASE_URL, img_src), stream=True)\n with open(to_filename, \"wb\") as f:\n for chunk in res:\n f.write(chunk)",
"def store_image(self, http_client, link_hash, src, config):\r\n # check for a cache hit already on disk\r\n image = self.read_localfile(link_hash, src, config)\r\n if image:\r\n return image\r\n\r\n # no cache found download the image\r\n data = self.fetch(http_client, src)\r\n if data:\r\n image = self.write_localfile(data, link_hash, src, config)\r\n if image:\r\n return image\r\n\r\n return None",
"def handle_avatar(self, url, save=True):\r\n response = requests.get(url)\r\n if response.status_code == 200:\r\n fp = BytesIO(response.content)\r\n ext = url.split('.')[-1]\r\n ext = ext if ext in [\"png\", \"jpg\", \"gif\", \"jpeg\"] else \"jpg\"\r\n filename = \"{}.{}\".format(get_random_name(), ext)\r\n self.avatar.save(filename, File(fp), save=save)",
"def urlfetch(self, url, **kwargs):\n logging.debug('Fetching %s with kwargs %s', url, kwargs)\n resp = urlfetch.fetch(url, deadline=999, **kwargs)\n\n if resp.status_code == 200:\n return resp.content\n else:\n logging.warning('GET %s returned %d:\\n%s',\n url, resp.status_code, resp.content)\n self.handler.response.headers.update(resp.headers)\n self.handler.response.out.write(resp.content)\n raise exc.status_map.get(resp.status_code)(resp.content)",
"def fetch_image(client, name):\n try:\n image = client.images.get(name)\n except docker.errors.ImageNotFound:\n name, tag = _parse_image_tag(name)\n tag = 'latest' if tag is None else tag\n\n log.info(\"Pulling tag '{}' for image '{}'...\".format(tag, name))\n image = client.images.pull(name, tag=tag)\n\n log.debug(\"Found image '{}' for tag '{}'\".format(image.id, name))\n return image",
"def download(url, out_folder):\n \n filename = \"2.png\"\n \n outpath = os.path.join(out_folder, filename)\n \n if url.lower().startswith(\"http\"):\n urlretrieve(url, outpath)\n else:\n urlretrieve(urlparse.urlunparse(parsed), outpath)",
"def download(self, url, path_to_dir):\n\n if not os.path.exists(path_to_dir):\n os.makedirs(path_to_dir)\n\n raw_data = self.__class__.get_raw_data(url)\n path_to_image = os.path.join(path_to_dir, url.split('/')[-1].split('?')[0])\n with open(path_to_image, 'wb') as f:\n self.__class__.copy_to(raw_data, f)\n\n return path_to_image",
"def get_comic_src(url):\n html = requests.get(url).text\n soup = BeautifulSoup(html)\n images = soup.select('.img-comic-container a img')\n return images[0].attrs['src']",
"def download_and_save_image(imgurl, save_dir, num_retries=5, retry_interval=10):\n parse_result = urlparse(imgurl)\n img_name = os.path.basename(parse_result.path)\n img_id = img_name.split(\".\")[0]\n img_data = url_fetch(imgurl, attempt=0, num_retries=num_retries, retry_interval=retry_interval)\n save_name = os.path.join(save_dir, img_name)\n with open(save_name, \"wb\") as f:\n f.write(img_data)\n return {\"path\": save_name, \"img_id\": img_id}",
"def fetch_object(url):\n print(' GET ' + url)\n session = requests.Session()\n retry = Retry(connect=3, backoff_factor=15)\n adapter = HTTPAdapter(max_retries=retry)\n session.mount('http://', adapter)\n session.mount('https://', adapter)\n r = session.get(url)\n # Covering internal server errors by retrying one more time\n if r.status_code == 500:\n time.sleep(5)\n r = requests.get(url, allow_redirects=True)\n elif r.status_code != 200:\n print(f\"Problem with request: {str(r)}\")\n raise RuntimeError(\"Non-200 status code\")\n return r",
"def threaded_image(self, image_file : str, image_url : str) -> NoReturn:\n\n # Sets up retry configuration to prevent connection refusals from too many requests at once\n with requests.Session() as session:\n retry = Retry(connect=3, backoff_factor=0.5)\n adapter = HTTPAdapter(max_retries=retry)\n session.mount('http://', adapter)\n session.mount('https://', adapter)\n response = session.get(\n image_url,\n headers={\"Connection\":\"close\"}\n )\n\n if response and response.status_code == 200:\n with open(image_file, \"wb\") as img_file:\n img_file.write(response.content)\n self.update_completed(1)",
"def extract_image(page_html, family_url, folder):\n image_extractor = Extractor(page_html, family_url)\n for url in image_extractor.get_image_table():\n image_page_url = urljoin(family_url, url)\n # print(image_page_url)\n imres = requests.get(image_page_url)\n image_page_extractor = Extractor(imres.text, image_page_url)\n image_src, image_name = image_page_extractor.get_image_link()\n\n image_link = urljoin(image_page_url, image_src)\n\n print(image_link, image_name)\n # Download image\n fetch(image_link, image_name, folder)",
"def requesturl(url):\n r = requests.get(url)\n text = r.text.strip()\n try:\n image = Image.open(io.BytesIO(r.content))\n return {\n 'source_url': url,\n 'url': r.url,\n 'md5': getmd5(image),\n 'img_grey': image_to_byte_array(convertgrey(image)),\n 'height': image.height,\n 'width': image.width,\n 'datetime_created': datetime.datetime.now()\n }\n except:\n if 'Error' in text:\n text = find_between(text)\n\n return {\n 'error': text,\n 'source_url': url,\n 'url': r.url,\n 'datetime_created': datetime.datetime.now()\n }",
"def fetch(cls, url):\n delta = time.time() - cls._time_last_fetched\n wait_time = TIME_TIL_RETRY - delta\n if wait_time > 0:\n time.sleep(wait_time)\n resp = requests.get(url)\n cls._time_last_fetched = time.time()\n resp.raise_for_status()\n return resp",
"def get_image(soup):\n image = soup.find(\"div\", {\"class\": \"specs-photo-main\"}).find('img')['src']\n return image",
"def fetch_url(url):\n logger.info(\"Resolving \" + url)\n try:\n resp = requests.get(url, timeout=1.5)\n resp.raise_for_status()\n return {\n \"resolved_url\": resp.url,\n \"raw_content\": resp.text\n }\n except Exception as e:\n logger.error('Error fetching %s' % url, e)\n return {\n \"resolved_url\": url,\n \"raw_content\": \"\",\n \"url_error\": str(e)\n }",
"def get_image_url(self, size=None):\n return images.get_serving_url(self.image_blob_key, size=size)",
"def find_circuit_image(self, url):\n try:\n soup = set_soup(url)\n img_url_container = soup.find(\n \"div\", {\"class\": \"f1-race-hub--schedule-circuit-map\"}\n )\n img_url = img_url_container.find(\"a\")[\"href\"]\n soup = set_soup(self.BASE_URL + img_url)\n img_container = soup.find(\"div\", {\"class\": \"f1-race-hub--map-container\"})\n img = img_container.find(\"img\", {\"class\": \"lazy\"})[\"data-src\"]\n return self._add_timestamp_to_image(img)\n except Exception:\n logger.exception(\"Error getting circuit image\")",
"async def fetch_image_by_id(\n image_uid: str\n):\n image_uid = int(image_uid)\n image = utils_com.get_com_image_by_uid(image_uid)\n return image",
"def getfile(url):\n try:\n return urlreq.urlopen(url)\n except urlreq.HTTPError as e:\n safeprint(\"Sever returned with response code \" + str(e.getcode()) + \", download failed.\")",
"def download_image(full_image_url, image_name):\r\n\r\n logging.debug('download_image({}, {})'.format(full_image_url, image_name))\r\n\r\n if use_proxy:\r\n img_data = requests.get(full_image_url, proxies=proxies, timeout=15, verify=False).content\r\n else:\r\n img_data = requests.get(full_image_url).content\r\n dir_path = os.path.join(os.environ['TEMP'],'WarietyWallpaperImages')\r\n os.makedirs(dir_path, exist_ok=True)\r\n with open(os.path.join(dir_path, image_name), 'wb') as handler:\r\n handler.write(img_data)\r\n image_filesize = os.stat(os.path.join(dir_path, image_name)).st_size\r\n logging.debug('download_image - dir_path = {}'.format(dir_path))\r\n logging.debug('download_image - image_name = {}'.format(image_name))\r\n logging.debug('download_image - image_filesize = {}'.format(image_filesize))\r\n return os.path.join(dir_path, image_name)",
"def download_image(image_url, image_name, collection_id):\n try:\n response = requests.get(image_url)\n folder_path = imgs_directory + '/' + collection_id\n if not os.path.exists(folder_path):\n os.makedirs(folder_path)\n image_path = folder_path + '/' + image_name\n # image_path = os.path.join(folder_path, image_name)\n with open(image_path, 'wb') as f:\n f.write(response.content)\n return image_path\n except Exception as e:\n print(f\"An error occurred while downloading image {image_name}. Error message: {e}\")\n return None",
"def fetch_url_feed(self, url, **args):\n return self.fetch(\"/url\", url=url, **args)",
"def get_images(url):\n \n # =============================================================================\n # Selenium.\n # =============================================================================\n\n chrome_options = Options()\n #chrome_options.add_argument('--incognito')\n #chrome_options.add_argument('--headless')\n #chrome_options.add_argument('--no-sandbox')\n \n driver = webdriver.Chrome(options=chrome_options,executable_path='/usr/local/bin/chromedriver') # Optional argument, if not specified will search path.\n driver.get('https://' + url)\n \n #scrolling to bottom to load all images on the page\n driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight);\")\n #sleep to make sure everything loads\n time.sleep(5)\n \n \n html_source = driver.page_source\n \n img_alt_src(html_source)\n \n driver.close()\n driver.quit()",
"def capture_image(url, stream, **kwargs):\n url = capture_url(url, **kwargs)\n download_to_stream(url, stream)",
"def fetch_url(url):\n try:\n soup = bs(urlopen(url).read(), 'html.parser')\n return soup\n except:\n print \"Couldnot download the content from the URL\", url\n return \"\""
] | [
"0.8507021",
"0.80610484",
"0.79975206",
"0.7953096",
"0.79118997",
"0.7878863",
"0.7840954",
"0.7823508",
"0.77485085",
"0.77150685",
"0.7658794",
"0.76583415",
"0.7470759",
"0.7440883",
"0.7416042",
"0.74151766",
"0.73907363",
"0.73734015",
"0.7324511",
"0.7323661",
"0.73060286",
"0.7284261",
"0.7235358",
"0.71439093",
"0.7028359",
"0.7026992",
"0.6953102",
"0.69311",
"0.6927938",
"0.6915745",
"0.6902869",
"0.68917066",
"0.6882189",
"0.6842981",
"0.6833819",
"0.67638385",
"0.6744995",
"0.6739322",
"0.6714824",
"0.67097664",
"0.6707764",
"0.6697857",
"0.66938287",
"0.66699046",
"0.65611786",
"0.64933944",
"0.64716977",
"0.6447505",
"0.64364827",
"0.64356196",
"0.64053196",
"0.6364233",
"0.6353272",
"0.63315415",
"0.63288254",
"0.63226813",
"0.6309298",
"0.6277443",
"0.62727094",
"0.62673795",
"0.6261958",
"0.62571305",
"0.6256107",
"0.6251996",
"0.623416",
"0.6211328",
"0.62102246",
"0.6199035",
"0.6187662",
"0.61860037",
"0.6185407",
"0.61806315",
"0.6175144",
"0.61483544",
"0.60916555",
"0.6078371",
"0.6074284",
"0.60575706",
"0.6053674",
"0.6050014",
"0.60447603",
"0.6044462",
"0.60272145",
"0.6026622",
"0.60151327",
"0.60045415",
"0.6002706",
"0.5998588",
"0.5995297",
"0.5991851",
"0.5990394",
"0.5982862",
"0.5982657",
"0.59439015",
"0.59404474",
"0.5931592",
"0.5929859",
"0.5896138",
"0.58790237",
"0.5873403"
] | 0.8826489 | 0 |
Test case for command_trigger_webhook_post Launch a command via a Trigger | def test_command_trigger_webhook_post(self):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def test_webhook_endpoint_generates_telegram_command_event(\n hass: HomeAssistant,\n webhook_platform,\n hass_client: ClientSessionGenerator,\n update_message_command,\n) -> None:\n client = await hass_client()\n events = async_capture_events(hass, \"telegram_command\")\n\n response = await client.post(TELEGRAM_WEBHOOK_URL, json=update_message_command)\n assert response.status == 200\n assert (await response.read()).decode(\"utf-8\") == \"\"\n\n # Make sure event has fired\n await hass.async_block_till_done()\n\n assert len(events) == 1\n assert events[0].data[\"command\"] == update_message_command[\"message\"][\"text\"]",
"def trigger_build(self, postdata):\n pass",
"async def test_receive_post_ok(self):\n await self.webhook_connection.connect()\n assert self.webhook_connection.is_connected is True\n payload = {\"hello\": \"world\"}\n call_task = self.loop.create_task(self.call_webhook(\"test_topic\", json=payload))\n envelope = await asyncio.wait_for(self.webhook_connection.receive(), timeout=10)\n\n assert envelope\n\n message = cast(HttpMessage, envelope.message)\n dialogue = self.skill_dialogues.update(message)\n assert dialogue is not None\n assert message.method.upper() == \"POST\"\n assert message.body.decode(\"utf-8\") == json.dumps(payload)\n await call_task",
"def command_webhook(request):\n print(json.dumps(request.POST.copy(), indent=2))\n\n return JsonResponse({\"text\": \"ChangeTip services have been discontinued. See https://www.reddit.com/r/changetip/comments/5dn3rc/changetip_shutting_down/ Please close your account and disconnect ChangeTip from Slack.\"})\n\n if request.POST.get(\"noop\"):\n return JsonResponse({\"text\": \"Hi!\"})\n\n # Separated so we can still support the legacy webhook integration\n if 'command' in request.POST.keys():\n return slash_command(request)\n else:\n return outgoing_webhook(request)",
"def handle_post(self, api, command):\n return self._make_request_from_command('POST', command)",
"def test_create_trigger_with_curl(command_curl, test_rma_url, test_cma_creds):\n with open(\"etc/trigger.json\", \"r\") as json_file:\n data = json.load(json_file)\n cmd = [\n command_curl,\n \"--anyauth\",\n \"--user\",\n test_cma_creds,\n \"-v\",\n \"-H\",\n \"Content-type: application/json\",\n \"-d\",\n json.dumps(data),\n f\"{test_rma_url}/databases/kerndaten/triggers?format=json\",\n ]\n curl_result = subprocess.run(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE)\n print(curl_result.returncode)\n print(curl_result.stdout)\n assert curl_result.returncode == 0\n\n # sanity check trigger has been created\n assert check_resource_exists(\n test_cma_creds, test_rma_url, \"databases/kerndaten/triggers/only-one-crawler\"\n )",
"async def trigger_build(self, *, branch=None, message=None):",
"def test_post_hooks(self):\n os.makedirs('/tmp/localhost/pacha_post')\n touch_script = open('/tmp/localhost/pacha_post/bar.sh', 'w')\n touch_script.write('''touch /tmp/localhost/post_got_executed.txt''')\n touch_script.close()\n run = rebuild.Rebuild(hostname='localhost') \n run.post_hooks()\n self.assertTrue(os.path.isfile('/tmp/localhost/post_got_executed.txt'))",
"def handle_github_webhook():\n\n verify_signature(request)\n logger.info(\"Received webhook\")\n\n if should_deploy(request):\n schedule_deploy()\n\n return \"\"",
"def git_webhook():\n client = MongoClient(os.getenv('MONGODB_URI', 'mongodb://localhost:27017'))\n database = client.get_database()\n content = {\n \"event\": request.headers['X-GitHub-Event'],\n \"payload\" : request.json,\n \"date\": datetime.utcnow()\n }\n log.info(\"Content Received - \", request.headers['X-GitHub-Delivery'])\n inserted_id = database.events.insert_one(content).inserted_id\n log.info(\"Content Inserted - \", inserted_id)\n return jsonify({\n \"message\": \"Okay!\"\n })",
"def run_trigger_command(self, workdir: str, args: argparse.Namespace):\n for response_line in self.stub.exec_command(\n on_device_tests_gateway_pb2.OnDeviceTestsCommand(\n workdir=workdir,\n token=args.token,\n test_type=args.test_type,\n platform=args.platform,\n archive_path=args.archive_path,\n config=args.config,\n tag=args.tag,\n labels=args.label,\n builder_name=args.builder_name,\n change_id=args.change_id,\n build_number=args.build_number,\n loader_platform=args.loader_platform,\n loader_config=args.loader_config,\n version=args.version,\n dry_run=args.dry_run,\n dimension=args.dimension or [],\n unittest_shard_index=args.unittest_shard_index,\n test_attempts=args.test_attempts,\n retry_level=args.retry_level,\n )):\n\n print(response_line.response)",
"def test_issue_post_issue_reaction(self):\n pass",
"def test_webhook_build_success(self):\n payload = json.dumps({\n 'matrix': [\n {\n 'config': {\n 'env': [\n 'REVIEWBOARD_STATUS_UPDATE_ID=%d'\n % self.status_update.pk,\n 'REVIEWBOARD_TRAVIS_INTEGRATION_CONFIG_ID=%d'\n % self.config.pk,\n ],\n },\n },\n ],\n 'build_url': 'https://example.com/build',\n 'state': 'passed',\n })\n self.spy_on(TravisCIWebHookView._validate_signature,\n owner=TravisCIWebHookView,\n call_fake=lambda self, request, integration_config: True)\n\n rsp = self.client.post(self.webhook_url, {'payload': payload})\n\n self.assertEqual(rsp.status_code, 200)\n\n self.status_update = StatusUpdate.objects.get(pk=self.status_update.pk)\n self.assertEqual(self.status_update.url, 'https://example.com/build')\n self.assertEqual(self.status_update.state,\n StatusUpdate.DONE_SUCCESS)",
"def _send_post_request(self, item):\n tc_name = get_tcname(item)\n try:\n env_prop = item.config.env.env_prop\n except AttributeError:\n buildname = self.UNDEFINED_BUILD\n else:\n buildname = self.buildname(env_prop)\n suite_name = get_suite_name(item.nodeid)\n info = {\"brief\": get_brief(item, tc_name), \"description\": get_steps(item, tc_name)}\n\n if self.post_queue:\n self._send_post_queue(item, buildname)\n self.server_cmd(\"post\", [self.self_name, buildname, suite_name, tc_name, \"Run\", \"\", info, self._get_build_info(item)])",
"def at_post_cmd(self):\n pass",
"def Trigger(self):\n\t\tpayload = { \"Arg1\": self.href }\n\t\treturn self._execute('trigger', payload=payload, response_object=None)",
"def test_valid_webhook(self, mock_send):\n send_notification(\"valid_webhook\", self.message)\n mock_send.assert_called()",
"def build_trigger(ctx, build_type_id, branch, comment, parameter, agent_id,\n open_build_log, wait_for_run):\n parameters = dict([p.split('=', 1) for p in parameter])\n data = ctx.obj.trigger_build(\n build_type_id=build_type_id,\n branch=branch,\n comment=comment,\n parameters=parameters,\n agent_id=agent_id)\n build_id = data['id']\n ctx.invoke(build_queue_show, args=[build_id])\n if open_build_log:\n url = data['webUrl'] + '&tab=buildLog'\n webbrowser.open(url)\n if not wait_for_run:\n return\n while data['state'] == 'queued':\n data = ctx.obj.get_queued_build_by_build_id(build_id)\n click.echo('state: %s' % data['state'])\n time.sleep(1)\n ctx.invoke(build_queue_show, args=[build_id])",
"def slackbuild_webhook(req: Request):\n global config\n global slack\n global cloudbuild\n\n # slack submits a POST\n if req.method != \"POST\":\n return abort(405)\n\n # not a true request from slack\n verified, err = slack.verify_webhook(req)\n if not verified:\n print(err)\n return abort(403)\n\n body = Slack.parse_request(req)\n argv = Slack.parse_command(body)\n msg = \"\"\n\n output, success = Command.run(argv, cloudbuild, config)\n\n if output is None:\n if success:\n # intentionaly not responding with a slack message\n return ('', 200)\n else:\n return abort(500)\n elif Slack.is_interactive_message(body):\n msg = slack.render_interactive_message(body, success, output)\n else:\n color = Colors.SUCCESS if success else Colors.FAILURE\n msg = slack.render_message({\"result\": output, \"color\": color}, \"command.json\")\n\n msg = json.dumps(msg)\n print(msg)\n return Response(response=msg, content_type=\"application/json\")",
"def webhook_sender(url=WEBHOOK_URL):\n data = runner()\n print(json.dumps(data))\n try:\n r = requests.post(url,json=data)\n print(r)\n except requests.exceptions.RequestException as e:\n raise SystemExit(e)",
"async def test_webhook_endpoint_generates_telegram_callback_event(\n hass: HomeAssistant,\n webhook_platform,\n hass_client: ClientSessionGenerator,\n update_callback_query,\n) -> None:\n client = await hass_client()\n events = async_capture_events(hass, \"telegram_callback\")\n\n response = await client.post(TELEGRAM_WEBHOOK_URL, json=update_callback_query)\n assert response.status == 200\n assert (await response.read()).decode(\"utf-8\") == \"\"\n\n # Make sure event has fired\n await hass.async_block_till_done()\n\n assert len(events) == 1\n assert events[0].data[\"data\"] == update_callback_query[\"callback_query\"][\"data\"]",
"def send(self):\n payload = self.format_payload()\n\n # Makes sure that the required fields are provided before\n # sending the payload.\n if not self.webhook_url:\n print ('Error: Webhook URL is required.')\n\n elif not payload:\n print ('Error: Message payload cannot be empty.')\n\n else:\n try:\n request = requests.post(self.webhook_url,\n data=json.dumps(payload),\n headers={'Content-Type': 'application/json'})\n\n request.raise_for_status()\n\n except requests.exceptions.RequestException as error:\n print('Error: %s' % error)",
"def test_bot_triggered_event(self):\n lh = LambdaHandler(\"tests.test_bot_handler_being_triggered\")\n # from : https://docs.aws.amazon.com/lambda/latest/dg/eventsources.html#eventsources-lex\n event = {\n \"messageVersion\": \"1.0\",\n \"invocationSource\": \"DialogCodeHook\",\n \"userId\": \"user-id specified in the POST request to Amazon Lex.\",\n \"sessionAttributes\": {\n \"key1\": \"value1\",\n \"key2\": \"value2\",\n },\n \"bot\": {\"name\": \"bot-name\", \"alias\": \"bot-alias\", \"version\": \"bot-version\"},\n \"outputDialogMode\": \"Text or Voice, based on ContentType request header in runtime API request\",\n \"currentIntent\": {\n \"name\": \"intent-name\",\n \"slots\": {\n \"slot-name\": \"value\",\n \"slot-name\": \"value\",\n \"slot-name\": \"value\",\n },\n \"confirmationStatus\": \"None, Confirmed, or Denied (intent confirmation, if configured)\",\n },\n }\n\n response = lh.handler(event, None)\n\n self.assertEqual(response, \"Success\")",
"def test_workflows_post(self):\n pass",
"def test_slackP_send(get_slackpost, capsys):\n s = get_slackpost\n s.send()\n out, err = capsys.readouterr()\n assert \"Message sent\" in out",
"def test_slackWH_send_good(get_slackwebhook, capsys):\n s = get_slackwebhook\n s.send()\n out, err = capsys.readouterr()\n assert \"Message sent\" in out",
"def POST(self):\n\t\t\n\t\tjson_data = web.data()\t\t# Get the POST data sent from Webex Teams\n\t\t#print(\"\\nWEBHOOK POST RECEIVED:\")\n\t\t#print(json_data, \"\\n\")\n\n\t\twebhook_obj = Webhook(json_data)\t\t\t\t\t# Create a Webhook object from the JSON data\n\t\troom = api.rooms.get(webhook_obj.data.roomId)\t\t# Get the room details\n\t\tmessage = api.messages.get(webhook_obj.data.id)\t\t# Get the message details\n\n\t\t# Ignore messages bot itself sent\n\t\tif message.personId == me.id:\n\t\t\treturn 'OK'\n\t\telse:\t# Message was sent by someone else; parse message and respond.\n\t\t\tperson = api.people.get(message.personId)\t\t\t# Get the sender's details\n\t\t\t\n\t\t\tprint(\"NEW MESSAGE IN ROOM '{}'\".format(room.title))\n\t\t\tprint(\"FROM '{}'\".format(person.displayName))\n\t\t\tprint(\"MESSAGE '{}'\\n\".format(message.text))\n\n\t\t\t#Test message sent\n\t\t\t#response = 'Message received {}'.format(mention(person.emails[0]))\t\t\n\t\t\t#api.messages.create(room.id, markdown=response)\n\t\t\tactionSelector(api, message, teams)\t\t\t\t#Depending on message defines action to perform\t\t\n\t\t\t\n\t\treturn 'OK'",
"def test_postMessage(self): #GIVEN the appropriate environment variables are configured\n testBot = bot.Bot(os.environ['bot_id'], os.environ['token'], os.environ['group_ID'])\n status = testBot.postMessage('Zygium') #WHEN the bot posts a message\n self.assertTrue(status == 202) # a status code of 202 should be returned",
"def post(self):\n send_slack_log('Entered /slack/post_msg')\n send_slack_log('Request info:')\n send_slack_log(str(request.form))\n # unknown request.form\n trigger_id = request.form['trigger_id']\n channel_id = request.form['channel_id']\n response = open_form(channel_id,\n trigger_id,\n config['slack_post_form_path'])\n send_slack_log('Response info:')\n send_slack_log(str(response))\n return 'Please enter the new msg information in the form'",
"def test_user_actions_post(self):\n pass",
"def trig_code(self, bot, source, target, trigger, argument):\n\t\treturn \"Hello, I'm a pyirkbot based on pynik. My code https://github.com/blueCommand/pyirkbot For feature requests use https://github.com/blueCommand/pyirkbot/issues beer is good also\"",
"def test_first_log_post(self):\n commands = self.conveyer.log(\"{message: \\\"test\\\"}\")\n self.assertEquals(len(commands), 2)\n creator, appender = commands\n self.assertEquals(type(creator), CreateLogCmd)\n self.assertEquals(type(appender), AppendLogCmd)\n self.assertEquals(creator.filename, \"testfile.dat\")\n self.assertEquals(appender.event, \"{message: \\\"test\\\"}\")",
"def webhook(event, context):\n bot = configure_telegram()\n logger.info('Event: {}'.format(event))\n\n if event.get('httpMethod') == 'POST' and event.get('body'):\n logger.info('Message received')\n update = telegram.Update.de_json(json.loads(event.get('body')), bot)\n chat_id = update.message.chat.id\n text = update.message.text\n\n if text == '/start' or text == 'help':\n reply = \"Hey 👋🏻, Aashutosh here!\" \\\n \"\\nTo start stalking, just enter username and we will fetch their profile for you.\\n\" \\\n \"Give us a star at https://github.com/aashutoshrathi/git-profiler-bot\\n\" \\\n \"You can reach out to me at: https://aashutosh.dev\"\n else:\n reply = stalk(text)\n bot.sendMessage(chat_id=chat_id, parse_mode='HTML', text=reply)\n logger.info('Message sent')\n return OK_RESPONSE\n\n return ERROR_RESPONSE",
"def test_webhook_build_pending(self):\n payload = json.dumps({\n 'matrix': [\n {\n 'config': {\n 'env': [\n 'REVIEWBOARD_STATUS_UPDATE_ID=%d'\n % self.status_update.pk,\n 'REVIEWBOARD_TRAVIS_INTEGRATION_CONFIG_ID=%d'\n % self.config.pk,\n ],\n },\n },\n ],\n 'build_url': 'https://example.com/build',\n 'state': 'started',\n })\n self.spy_on(TravisCIWebHookView._validate_signature,\n owner=TravisCIWebHookView,\n call_fake=lambda self, request, integration_config: True)\n\n rsp = self.client.post(self.webhook_url, {'payload': payload})\n\n self.assertEqual(rsp.status_code, 200)\n\n self.status_update = StatusUpdate.objects.get(pk=self.status_update.pk)\n self.assertEqual(self.status_update.url, 'https://example.com/build')\n self.assertEqual(self.status_update.state,\n StatusUpdate.PENDING)",
"def testServicePost(self):\n\n text = \"This is a test sentence. And another sentence to split.\"\n results = self.client.post(\"workflow\", json={\"name\": \"post\", \"elements\": [text]}).json()\n\n self.assertEqual(len(results), 1)\n self.assertEqual(len(results[0]), 2)",
"def __do_trigger(self, request):\n dmp_trigger.DmpTrigger().trigger(request)\n return defines.ReturnCode.SUCC",
"async def test_webhook_endpoint_generates_telegram_text_event(\n hass: HomeAssistant,\n webhook_platform,\n hass_client: ClientSessionGenerator,\n update_message_text,\n) -> None:\n client = await hass_client()\n events = async_capture_events(hass, \"telegram_text\")\n\n response = await client.post(TELEGRAM_WEBHOOK_URL, json=update_message_text)\n assert response.status == 200\n assert (await response.read()).decode(\"utf-8\") == \"\"\n\n # Make sure event has fired\n await hass.async_block_till_done()\n\n assert len(events) == 1\n assert events[0].data[\"text\"] == update_message_text[\"message\"][\"text\"]",
"def fire_trigger(self, trigger):\n if not self.exists():\n return\n if trigger in self.events:\n for action in self.events[trigger]:\n action(requestor=self)",
"def test_blog_manual_commit():",
"def light_post():\n return 'do some magic!'",
"def test_simple_message(self):\n messaging = {\n 'sender': {'id': '1331235'},\n 'recipient': {'id': '1111111'},\n 'message': {'text': 'Hello world.'}\n }\n event = self.create_message_event(messaging)\n c = Client()\n response = c.post(self.webhook, data=event, content_type='application/json')\n self.assertEqual(response.status_code, 200)",
"def post_hook(config, final=False):\n if config.post_hook:\n if final or config.verb != \"renew\":\n logger.info(\"Running post-hook command: %s\", config.post_hook)\n _run_hook(config.post_hook)",
"def post(self):\n send_slack_log('Entered /slack/submit')\n send_slack_log('Request info:')\n send_slack_log(str(request.form))\n if request.form.get('payload') is None:\n send_slack_log('Invalid request: no payload')\n return\n else:\n return handle_interaction(json.loads(request.form['payload']))",
"def test_message_post(self):\r\n\r\n submission_time = datetime.strftime(datetime.now(UTC), xqueue_interface.dateformat)\r\n\r\n feedback_post = {\r\n 'feedback': 'feedback text',\r\n 'submission_id': '1',\r\n 'grader_id': '1',\r\n 'score': 3\r\n }\r\n result = self.openendedmodule.message_post(feedback_post, self.test_system)\r\n self.assertTrue(result['success'])\r\n\r\n # make sure it's actually sending something we want to the queue\r\n mock_send_to_queue_body_arg = json.loads(self.mock_xqueue.send_to_queue.call_args[1]['body'])\r\n self.assertEqual(mock_send_to_queue_body_arg['feedback'], feedback_post['feedback'])\r\n self.assertEqual(mock_send_to_queue_body_arg['submission_id'], int(feedback_post['submission_id']))\r\n self.assertEqual(mock_send_to_queue_body_arg['grader_id'], int(feedback_post['grader_id']))\r\n self.assertEqual(mock_send_to_queue_body_arg['score'], feedback_post['score'])\r\n body_arg_student_info = json.loads(mock_send_to_queue_body_arg['student_info'])\r\n self.assertEqual(body_arg_student_info['anonymous_student_id'], self.test_system.anonymous_student_id)\r\n self.assertGreaterEqual(body_arg_student_info['submission_time'], submission_time)\r\n\r\n state = json.loads(self.openendedmodule.get_instance_state())\r\n self.assertEqual(state['child_state'], OpenEndedModule.DONE)",
"def push(self, trigger, action):\n self.queue.append((trigger, action))",
"def create_trigger(self, trigger, conditions=[], dampenings=[]):\n full_trigger = {'trigger': trigger, 'conditions': conditions, 'dampenings': dampenings}\n self._post(path='triggers/trigger', data=full_trigger)",
"def setup(bot):\n bot.add_cog(ResendPost(bot, bot.post_queue))",
"def slack_post(title=\"Test\", message=\"Hello world!\", color=\"#999999\"):\n attach = dict(fallback=message, title=title, text=message, color=color)\n r = client.chat_postMessage(\n channel=CHANNEL, attachments=[attach], username=f\"{HOSTNAME} DBA alert\"\n )\n return r",
"def post(self):\n created = post_tool(request.json)\n return created, 201",
"def send(data, webhook_url):\n dis_data = data\n url = webhook_url\n headers = {\"Content-Type\": \"application/json\"}\n discord_request = requests.post(url, data=json.dumps(dis_data), headers=headers)\n\n try:\n discord_request.raise_for_status()\n except requests.exceptions.HTTPError as err:\n print(err)\n else:\n print(\"Payload delivered successfully, code {}.\".format(discord_request.status_code))",
"def execute(cls, slack_wrapper, args, channel_id, user_id, user_is_admin):\n slack_wrapper.post_message(channel_id, \"Pong!\")",
"def trigger(self, journey_id, step_id, data):\n self.journey_id = journey_id\n self.step_id = step_id\n if 'email_address' not in data:\n raise KeyError('The automation email queue must have an email_address')\n\n check_email(data['email_address'])\n response = self._mc_client._post(\n url=self._build_path(\"journeys\", journey_id, 'steps', step_id, 'actions', \"trigger\"),\n data=data\n )\n\n return response",
"def runner():\n print(\"Webhook 模拟器启动,目前支持推送 Balkobot 和 Cybersole 的webhook\")\n botName = input(\"请输入需要使用的Bot 全称: e.g Balkobot/Cybersole: \\n\")\n while (botName != \"Balkobot\" and botName != \"Cybersole\"):\n print(botName)\n print(\"你输入的bot 名称有误\")\n botName = input(\"请输入需要使用的Bot 全称: e.g Balkobot/Cybersole: \")\n productData = {}\n productData['Product'] = input(\"请输入产品名称, 如果想调用后台已有产品信息,请直接回车: \")\n if not productData['Product']:\n return populating_template(template=template_builder(botName = botName))\n try:\n productData['url'] = input(\"请输入产品url, 如无请回车: \")\n productData[\"Size\"] = input(\"请输入尺码 (US): \") or \"N/A\"\n productData[\"Picture_url\"] = input(\"请输入产品图片url,如无请回车: \")\n productData[\"Store\"] = input(\"请输入商店名,如无请回车: \") or \"N/A\"\n productData[\"Profile\"] = input(\"请输入Profile名称,如无请回车: \") or \"N/A\"\n productData[\"Order\"] = input(\"请输入Order Number,如无请回车: \") or \"N/A\"\n productData[\"Proxy List\"] = input(\"请输入Proxy List名称,如无请回车: \") or \"N/A\"\n productData[\"Mode\"] = input(\"请输入Mode, 如无请回车: \") or \"N/A\"\n productData[\"Delay\"] = input(\"请输入Task Delay, 如无请回车: \") or \"N/A\"\n productData[\"Tasks\"] = input(\"请输入Task数量, 如无请回车: \") or \"N/A\" \n except:\n print(\"输入信息有误兄弟\")\n raise(sys.exit(e))\n return populating_template(template=template_builder(botName=botName), productData = productData)",
"def triggered(self, *args, **kwargs): # real signature unknown\n pass",
"def test_run_command(self):\n self.build()\n self.data_formatter_commands()",
"def test_command():\n\n dispatcher = ntelebot.dispatch.Dispatcher()\n dispatcher.add_command('command', lambda ctx: 'DISPATCHED')\n ctx = MockContext()\n ctx.type = 'message'\n ctx.command = None\n assert dispatcher(ctx) is False\n ctx.command = 'command'\n assert dispatcher(ctx) == 'DISPATCHED'\n ctx.type = 'callback_query'\n assert dispatcher(ctx) == 'DISPATCHED'\n ctx.type = 'inline_query'\n assert dispatcher(ctx) is False",
"def trigger(self, type, event):",
"def run_post(payload, response):\n message = FakeMessage()\n message.raw_payload = payload\n response_queue = queue.Queue()\n headers = {\"Content-Type\": \"application/json\"}\n with aioresponses() as mocked:\n mocked.post(\n TestData.JOB_TEMPLATE_POST_URL,\n status=200,\n body=json.dumps(response),\n headers=headers,\n )\n worker.execute(message, TestData.RECEPTOR_CONFIG, response_queue)\n\n return response_queue",
"def test_nth_log_post(self):\n self.conveyer.execute(self.conveyer.log(\"{message: \\\"first\\\"}\"))\n commands = self.conveyer.log(\"{message: \\\"second\\\"}\")\n self.assertEquals(len(commands), 1)\n self.assertEquals(type(commands[0]), AppendLogCmd)\n self.assertEquals(commands[0].event, \"{message: \\\"second\\\"}\")\n self.assertEquals(self.events_out.getvalue(), \"{message: \\\"first\\\"}\")",
"def test_webhook_build_error(self):\n payload = json.dumps({\n 'matrix': [\n {\n 'config': {\n 'env': [\n 'REVIEWBOARD_STATUS_UPDATE_ID=%d'\n % self.status_update.pk,\n 'REVIEWBOARD_TRAVIS_INTEGRATION_CONFIG_ID=%d'\n % self.config.pk,\n ],\n },\n },\n ],\n 'build_url': 'https://example.com/build',\n 'state': 'failed',\n })\n self.spy_on(TravisCIWebHookView._validate_signature,\n owner=TravisCIWebHookView,\n call_fake=lambda self, request, integration_config: True)\n\n rsp = self.client.post(self.webhook_url, {'payload': payload})\n\n self.assertEqual(rsp.status_code, 200)\n\n self.status_update = StatusUpdate.objects.get(pk=self.status_update.pk)\n self.assertEqual(self.status_update.url, 'https://example.com/build')\n self.assertEqual(self.status_update.state,\n StatusUpdate.DONE_FAILURE)",
"def test_post__staging(self, mock_emailmessage_constructor):\n params = {\n 'to': self.to,\n 'subject': self.subject,\n 'html': self.html,\n }\n with notifier.app.test_request_context(self.request_path, json=params):\n actual_response = self.handler.process_post_data()\n\n expected_to = '[email protected]'\n mock_emailmessage_constructor.assert_called_once_with(\n sender=self.sender, to=expected_to, subject=self.subject,\n html=self.html)\n mock_message = mock_emailmessage_constructor.return_value\n mock_message.check_initialized.assert_called_once_with()\n mock_message.send.assert_called_once_with()\n self.assertEqual({'message': 'Done'}, actual_response)",
"def handle_command(message, slack_config):\n\n message.react(\"+1\")\n\n handler = {\n \"schedule_job\": handle_schedule_job,\n \"cancel_job\": handle_cancel_job,\n \"schedule_suppression\": handle_schedule_suppression,\n \"cancel_suppression\": handle_cancel_suppression,\n }[slack_config[\"type\"]]\n\n handler(message, slack_config)",
"def test_workflows_change_stream_post(self):\n pass",
"def test_pre_post_hooks(self):\n os.makedirs('/tmp/localhost/pacha_pre')\n os.makedirs('/tmp/localhost/pacha_post')\n pre_script = open('/tmp/localhost/pacha_pre/foo.sh', 'w')\n pre_script.write('''touch /tmp/localhost/pre_got_executed.txt''')\n pre_script.close()\n post_script = open('/tmp/localhost/pacha_post/bar.sh', 'w')\n post_script.write('''touch /tmp/localhost/post_got_executed.txt''')\n post_script.close()\n run = rebuild.Rebuild(hostname='localhost') \n run.pre_hooks()\n run.post_hooks()\n self.assertTrue(os.path.isfile('/tmp/localhost/post_got_executed.txt'))\n self.assertTrue(os.path.isfile('/tmp/localhost/pre_got_executed.txt'))",
"def handle_command(ARGS, CLIENT, command, channel):\n message = '''Commands I know:\n list teams\n scores <optional week number>\n does Brandon suck\n '''\n message = \"\"\n attachments = \"\"\n if command == \"list teams\":\n message = '\\n'.join(map(lambda x: x.team_name, ARGS.league.teams))\n elif command == \"does brandon suck\":\n message = 'yes'\n elif 'scores' in command:\n pieces = command.split(' ')\n if len(pieces) == 1:\n message = 'Current Scoreboard'\n matchups = ARGS.league.scoreboard(projections=True)\n else:\n message = 'Scoreboard for week ' + pieces[1]\n matchups = ARGS.league.scoreboard(pieces[1], projections=True)\n\n attachments = [{\n 'fallback': 'A textual representation of your table data',\n 'fields': [\n {\n 'title': 'Home',\n 'value': '\\n'.join(map(lambda x: x.home_team.team_abbrev + \" \" + str(x.home_score) + \" (\" + str(x.home_projection) + \")\", matchups)),\n 'short': True\n },\n {\n 'title': 'Away',\n 'value': '\\n'.join(map(lambda x: x.away_team.team_abbrev + \" \" + str(x.away_score) + \" (\" + str(x.away_projection) + \")\", matchups)),\n 'short': True\n }\n ]\n }]\n CLIENT.api_call(\"chat.postMessage\", channel=channel, text=message, attachments=attachments, as_user=True)\n\n # CLIENT.api_call(\"chat.postMessage\", channel=channel, text=message, as_user=True)",
"def test_post(self):\n pass",
"def __init__(self):\r\n # Webhook URL\r\n self.WEBHOOK = DiscordWebhook(url=\"https://discord.com/api/webhooks/806990374996410368/QqilGNrBo652oBEnsuX-BMkU1e8_PGIO4ENiyiQF_V6qtuQLkT6Z_1-lFmzmatp9M8Mz\")\r\n \r\n # Executing Function\r\n # self.ExecuteEmbed()\r",
"def on_msg(bot, trigger):\n\tif hasattr(trigger, 'tags') and trigger.tags.get('intent') == 'ACTION':\n\t\tlog(bot, trigger.sender, '* {} {}', trigger.nick, trigger);\n\telif is_action(trigger):\n\t\tlog(bot, trigger.sender, '* {} {}', trigger.nick, action_message(trigger));\n\telse:\n\t\tlog(bot, trigger.sender, '<{}> {}', trigger.nick, trigger);",
"async def test_create_and_forget_post_on_target(fixture_account):\n _ = await create_and_forget_post(fixture_account, TARGET_NODE, REFERENCE_NODE)",
"def test_AAAA_WORKAROUND__edit(self):\n data1 = {'url': 'http://httpbin.org/post',\n 'secret': 'secret'}\n data2 = {'url': 'http://example.com/hook',\n 'secret': 'secret2'}\n self.create_webhook(data1).follow()\n self.create_webhook(data2).follow()\n assert M.Webhook.query.find().count() == 2\n wh1 = M.Webhook.query.get(hook_url=data1['url'])\n r = self.app.get(self.url + '/repo-push/%s' % wh1._id)\n form = r.forms[0]\n assert form['url'].value == data1['url']\n assert form['secret'].value == data1['secret']\n assert form['webhook'].value == str(wh1._id)\n form['url'] = 'http://host.org/hook'\n form['secret'] = 'new secret'\n msg = 'edit webhook repo-push\\n{} => {}\\n{}'.format(\n data1['url'], form['url'].value, 'secret changed')\n with td.audits(msg):\n r = form.submit()\n wf = json.loads(self.webflash(r))\n assert wf['status'] == 'ok'\n assert wf['message'] == 'Edited successfully'\n assert M.Webhook.query.find().count() == 2\n wh1 = M.Webhook.query.get(_id=wh1._id)\n assert wh1.hook_url == 'http://host.org/hook'\n assert wh1.app_config_id == self.git.config._id\n assert wh1.secret == 'new secret'\n assert wh1.type == 'repo-push'\n\n # Duplicates\n r = self.app.get(self.url + '/repo-push/%s' % wh1._id)\n form = r.forms[0]\n form['url'] = data2['url']\n r = form.submit()\n self.find_error(r, '_the_form',\n '\"repo-push\" webhook already exists for Git http://example.com/hook',\n form_type='edit')",
"def on_deployment_topic(self, client, userdata, msg):\n command = msg.payload.decode(message_string_encoding)\n if command == 'reboot':\n self.loop.create_task(deploy.reboot())\n elif command == 'shutdown':\n self.loop.create_task(deploy.shutdown())\n elif command == 'restart':\n self.loop.create_task(deploy.restart())\n elif command == 'git pull':\n self.loop.create_task(deploy.git_pull(\n self.pi_username, restart_afterwards=True\n ))\n elif command == 'stop':\n raise KeyboardInterrupt",
"def execute(self):\n return LOGGER.info(f\"{datetime.datetime.now()} - Sending notification in Slack\")",
"async def _add_command(self, cmd_type, cmd_trigger, *, cmd: str):\n if cmd_type == 'tts':\n is_tts = True\n else:\n is_tts = False\n cur = self.conn.cursor()\n # Postgres query\n cur.execute( \n \"INSERT INTO message_commands (invoke, message, istts, idk)\"\n \" VALUES (%s, %s, %s, %s) \"\n \"ON CONFLICT (invoke) \"\n \"DO UPDATE SET message = EXCLUDED.message\",\n [cmd_trigger, cmd, is_tts, True])\n self.conn.commit()\n cur.close()",
"def test_logging_trigger_event_into_rabbitmq(self,\n admin_node,\n k8scluster,\n elastic_client_public,\n show_step):\n show_step(1)\n ec = elastic_client_public\n k8sclient = k8scluster.api\n rabbitmq_pod = [pod for pod in\n k8sclient.pods.list(\n namespace=ext.Namespace.BASE_NAMESPACE)\n if 'rabbitmq' in pod.name][0]\n\n show_step(2)\n rabbitmq_id = str(uuid.uuid4()).replace('-', '')\n rabbitmq_template = \"=INFO REPORT==== {} ===\\n\" \\\n \"accepting AMQP connection <0.580.0> \" \\\n \"(10.233.83.7 -> 10.233.83.89:5672):\\n\" \\\n \"{}\".format(datetime.today()\n .strftime(\"%d-%b-%Y::%H:%M:%S\"),\n rabbitmq_id)\n admin_node.check_call(\n 'kubectl exec {} --namespace={} -- {}'.format(\n rabbitmq_pod.name,\n ext.Namespace.BASE_NAMESPACE,\n '\\'/bin/bash\\' -xc \\'(echo -e \\\"{}\\n\\\" >> '\n '/var/log/ccp/rabbitmq/rabbitmq.log)\\''.format(\n rabbitmq_template)),\n expected=[ext.ExitCodes.EX_OK])\n\n show_step(3)\n injected = ec.find('Payload', rabbitmq_id)\n assert injected.count == 1,\\\n \"New log message from mysql from {} not picked by heka\".format(\n rabbitmq_pod)",
"def test_post_sends_text(self, publish_mock: mock.Mock) -> None:\n\n def side_effect(*args: str, **_: str) -> Any:\n if args[0] == \"registry:first:value\":\n return [\"00:00:00:00:00\"]\n if args[0] == \"app_url\":\n return [\"/\"]\n return mock.DEFAULT\n\n publish_mock.side_effect = side_effect\n\n response = self.request(\n \"/\",\n method=\"POST\",\n accept=\"text\",\n host=\"host1\"\n )\n\n self.assertEqual(response.code, 200)\n self.assertEqual(response.body, \"WoL packet sent.\")",
"async def test_webhook_different_callback_id(doof, event_loop, mocker):\n finish_release_mock = mocker.patch(\n 'bot.finish_release', autospec=True\n )\n await doof.handle_webhook(\n loop=event_loop,\n webhook_dict={\n \"token\": \"token\",\n \"callback_id\": \"xyz\",\n \"channel\": {\n \"id\": \"doof\"\n },\n \"user\": {\n \"id\": \"doofenshmirtz\"\n },\n \"message_ts\": \"123.45\",\n \"original_message\": {\n \"text\": \"Doof's original text\",\n }\n },\n )\n\n assert finish_release_mock.called is False",
"def trigger(self, branch=\"master\", type=\"custom\", commit=None, pattern=None, variables=None):\n data = {\n \"target\": {\n \"ref_type\": \"branch\",\n \"type\": \"pipeline_ref_target\",\n \"ref_name\": branch,\n },\n }\n if commit is not None:\n data[\"target\"][\"commit\"] = {\n \"type\": type,\n \"hash\": commit,\n }\n if pattern is not None:\n data[\"target\"][\"selector\"] = {\n \"type\": \"custom\",\n \"pattern\": pattern,\n }\n if variables is not None:\n data[\"variables\"] = variables\n\n return self.__get_object(self.post(None, trailing=True, data=data))",
"def test_push(self):\n url = '/api/apps'\n body = {'formation': 'autotest'}\n response = self.client.post(url, json.dumps(body), content_type='application/json')\n self.assertEqual(response.status_code, 201)\n app_id = response.data['id']\n # prepare a push body\n body = {\n 'sha': 'df1e628f2244b73f9cdf944f880a2b3470a122f4',\n 'fingerprint': '88:25:ed:67:56:91:3d:c6:1b:7f:42:c6:9b:41:24:80',\n 'receive_user': 'autotest',\n 'receive_repo': 'repo.git',\n 'ssh_connection': '10.0.1.10 50337 172.17.0.143 22',\n 'ssh_original_command': \"git-receive-pack 'repo.git'\",\n }\n # post a request without the auth header\n url = \"/api/apps/{app_id}/push\".format(**locals())\n response = self.client.post(url, json.dumps(body), content_type='application/json')\n self.assertEqual(response.status_code, 403)\n # now try with the builder key in the special auth header\n response = self.client.post(url, json.dumps(body), content_type='application/json',\n HTTP_X_DEIS_BUILDER_AUTH=settings.BUILDER_KEY)\n self.assertEqual(response.status_code, 201)\n for k in ('owner', 'app', 'sha', 'fingerprint', 'receive_repo', 'receive_user',\n 'ssh_connection', 'ssh_original_command'):\n self.assertIn(k, response.data)",
"def post(self):\n args = parser.parse_args()",
"async def async_attach_trigger(\n hass: HomeAssistant,\n config: ConfigType,\n action: TriggerActionType,\n trigger_info: TriggerInfo,\n *,\n platform_type: str = \"event\",\n) -> CALLBACK_TYPE:\n trigger_data = trigger_info[\"trigger_data\"]\n variables = trigger_info[\"variables\"]\n\n template.attach(hass, config[CONF_EVENT_TYPE])\n event_types = template.render_complex(\n config[CONF_EVENT_TYPE], variables, limited=True\n )\n removes = []\n\n event_data_schema = None\n if CONF_EVENT_DATA in config:\n # Render the schema input\n template.attach(hass, config[CONF_EVENT_DATA])\n event_data = {}\n event_data.update(\n template.render_complex(config[CONF_EVENT_DATA], variables, limited=True)\n )\n # Build the schema\n event_data_schema = vol.Schema(\n {vol.Required(key): value for key, value in event_data.items()},\n extra=vol.ALLOW_EXTRA,\n )\n\n event_context_schema = None\n if CONF_EVENT_CONTEXT in config:\n # Render the schema input\n template.attach(hass, config[CONF_EVENT_CONTEXT])\n event_context = {}\n event_context.update(\n template.render_complex(config[CONF_EVENT_CONTEXT], variables, limited=True)\n )\n # Build the schema\n event_context_schema = vol.Schema(\n {\n vol.Required(key): _schema_value(value)\n for key, value in event_context.items()\n },\n extra=vol.ALLOW_EXTRA,\n )\n\n job = HassJob(action, f\"event trigger {trigger_info}\")\n\n @callback\n def filter_event(event: Event) -> bool:\n \"\"\"Filter events.\"\"\"\n try:\n # Check that the event data and context match the configured\n # schema if one was provided\n if event_data_schema:\n event_data_schema(event.data)\n if event_context_schema:\n event_context_schema(dict(event.context.as_dict()))\n except vol.Invalid:\n # If event doesn't match, skip event\n return False\n return True\n\n @callback\n def handle_event(event: Event) -> None:\n \"\"\"Listen for events and calls the action when data matches.\"\"\"\n hass.async_run_hass_job(\n job,\n {\n \"trigger\": {\n **trigger_data,\n \"platform\": platform_type,\n \"event\": event,\n \"description\": f\"event '{event.event_type}'\",\n }\n },\n event.context,\n )\n\n removes = [\n hass.bus.async_listen(event_type, handle_event, event_filter=filter_event)\n for event_type in event_types\n ]\n\n @callback\n def remove_listen_events() -> None:\n \"\"\"Remove event listeners.\"\"\"\n for remove in removes:\n remove()\n\n return remove_listen_events",
"def test_duo_application_post(self):\n pass",
"def test_post_user_post(self):\n pass",
"def create_webhooks():\n client = RequestManager()\n client.set_method(\"POST\")\n client.set_endpoint(\"/projects/{0}/webhooks\".format(STORED_ID['project_id']))\n name = \"\".join(choices(string.ascii_letters, k=6))\n body = {\"webhook_url\": 'https://' + name, \"webhook_version\": 'v5'}\n client.set_body(json.dumps(body))\n response = client.execute_request()\n try:\n STORED_ID['webhook_id'] = response.json()['id']\n except KeyError:\n LOGGER.info(response.json())",
"def post(self, *args, **kwargs):\n body = {}\n # This is setting the code to success by default\n status_code = 201\n\n body_data = json.loads(self.json_body)\n\n try:\n stack_name = body_data['stack_name']\n automation_name = body_data['automation_name']\n tenant_name = body_data['tenant_name']\n except KeyError as a_error:\n error_msg = 'Missing Required Data. Error: %s' % (str(a_error))\n raise GatewayAPIException(status_code=400, reason=error_msg)\n\n try:\n # Step One - Query Automation Database for Automation Provisioning Data\n if self.provision_provider:\n automation_data = yield self.provision_provider.get_automation_data(automation_name=automation_name)\n body = {\"results\": automation_data}\n # Step Two - Trigger Automation determined from Automation Provisioning data\n result = self.provision_provider.trigger_automation(stack_name=stack_name, automation_data=automation_data, tenant_name=tenant_name)\n if result['result']:\n msg = 'Successfully kicked off automation for stack: %s at tenant name: %s' % (stack_name, tenant_name,)\n body = {\"results\": msg}\n else:\n msg = 'Failed to kick off automation for stack: %s at tenant name: %s' % (stack_name, tenant_name,)\n body = {\"results\": msg}\n else:\n LOGGER.error('Provision Provider is None.')\n raise Exception('Internal Coding Error.')\n except Exception as an_error:\n error_msg = str(an_error)\n LOGGER.exception(error_msg)\n raise GatewayAPIException(status_code=400, reason=error_msg)\n\n self.set_status(status_code)\n self.write(body)\n self.finish()",
"def post(self, request, framework):\n self._logger.debug(\"WebhookScoreBot post entered.\")\n\n pr_data = request.data.get(\"pull_request\")\n if not pr_data:\n self._logger.error(f\"Invalid pull request data received: {pr_data}\")\n return response.Response(status=status.HTTP_400_BAD_REQUEST)\n\n pr_url = pr_data.get(\"html_url\")\n if not pr_url:\n self._logger.error(f\"Invalid pull request url received: {pr_data}\")\n return response.Response(status=status.HTTP_400_BAD_REQUEST)\n\n user = pr_data.get(\"user\")\n if user:\n login = user.get(\"login\")\n self._logger.info(f\"Username associated with PR: {login}\")\n\n # Drop excluded repos\n excluded_repos = ScorebotConfig.objects.filter(config=\"excluded_repos\")\n excluded_repos = excluded_repos.values()[0][\"value\"].split(\",\") if excluded_repos else []\n\n try:\n repo = pr_url.split(\"/pull\")[0]\n repo = repo[repo.rfind(\"/\")+1:]\n\n except Exception:\n self._logger.error(f\"Invalid pull request url received: {pr_url}\")\n return response.Response(status=status.HTTP_400_BAD_REQUEST)\n\n if repo in excluded_repos:\n self._logger.info(f\"Repo {repo} found in excluded repos list\")\n return response.Response(status=status.HTTP_200_OK)\n\n # Otherwise queue pull request data for processing\n self._logger.info(f\"Queuing action: {pr_url} for pull request\")\n response_status = self._queue_pull_request_data(pr_url, framework)\n\n return response.Response(status=response_status)",
"def test_zendesk_comment_and_resolve_ticket_command_closes_the_issue(\n post_message,\n create_ticket,\n close_ticket,\n get_ticket,\n add_comment,\n resolve_command,\n log,\n db\n):\n slack_client = MagicMock()\n zendesk_client = MagicMock()\n workspace_uri = 'https://s.l.a.c.k'\n zendesk_uri = 'https://z.e.n.d.e.s.k'\n user_id = '100000000004'\n group_id = '200000000005'\n\n slack_client.users_info.return_value = FakeUserResponse()\n get_ticket.return_value = None\n ticket = FakeTicket(ticket_id='77')\n create_ticket.return_value = ticket\n assert ZenSlackChat.objects.count() == 0\n\n def handle_message(payload):\n is_handled = handler(\n payload,\n our_channel='C0192NP3TFG',\n workspace_uri=workspace_uri,\n zendesk_uri=zendesk_uri,\n slack_client=slack_client,\n zendesk_client=zendesk_client,\n user_id=user_id,\n group_id=group_id,\n )\n assert is_handled is True\n\n # Create an issue\n #\n handle_message({\n 'channel': 'C0192NP3TFG',\n 'event_ts': '1602064330.001600',\n 'text': 'My 🖨 is on 🔥',\n 'ts': '1602064330.001600',\n 'user': 'UGF7MRWMS',\n })\n\n # There should now be one instance here:\n assert ZenSlackChat.objects.count() == 1\n assert len(ZenSlackChat.open_issues()) == 1\n\n # Verify what the stored issue should look like:\n issue = ZenSlackChat.get('C0192NP3TFG', '1602064330.001600')\n assert issue.active is True\n assert issue.opened is not None\n assert issue.closed is None\n assert issue.channel_id == 'C0192NP3TFG'\n assert issue.chat_id == '1602064330.001600'\n assert issue.ticket_id == '77'\n\n # Check a new comment is sent over to zendesk:\n #\n create_ticket.reset_mock()\n post_message.reset_mock()\n\n # Return the fake ticket instance this time\n get_ticket.return_value = ticket\n\n handle_message({\n 'channel': 'C0192NP3TFG',\n 'event_ts': '1602064330.001600',\n 'text': 'No wait, it was just a blinking red light',\n # This is a reply message so thread_ts refers to the parent chat id:\n 'thread_ts': issue.chat_id,\n # and the ts refers to the reply message id:\n 'ts': '1602065965.003200',\n 'user': 'UGF7MRWMS',\n })\n assert ZenSlackChat.objects.count() == 1\n assert len(ZenSlackChat.open_issues()) == 1\n\n # None of test should have changed yet:\n issue = ZenSlackChat.get('C0192NP3TFG', '1602064330.001600')\n assert issue.active is True\n assert issue.opened is not None\n assert issue.closed is None\n assert issue.channel_id == 'C0192NP3TFG'\n assert issue.chat_id == '1602064330.001600'\n assert issue.ticket_id == '77'\n\n # No ticket should be created here\n create_ticket.assert_not_called()\n\n # Check the comment was \"sent\" to Zendesk correctly:\n add_comment.assert_called_with(\n zendesk_client,\n ticket,\n \"Bob Sprocket (Slack): No wait, it was just a blinking red light\"\n )\n\n # No slack message should have been sent:\n post_message.assert_not_called()\n\n # Resolve the issue:\n #\n create_ticket.reset_mock()\n post_message.reset_mock()\n add_comment.reset_mock()\n\n handle_message({\n 'channel': 'C0192NP3TFG',\n 'event_ts': '1602064330.001600',\n 'text': resolve_command,\n # This is a reply message so thread_ts refers to the parent chat id\n 'thread_ts': '1602064330.001600',\n 'ts': '1602065965.003200',\n 'user': 'UGF7MRWMS',\n })\n\n # There should now be one instance here:\n assert ZenSlackChat.objects.count() == 1\n assert len(ZenSlackChat.open_issues()) == 0\n\n # Verify what the stored issue should look like:\n issue = ZenSlackChat.get('C0192NP3TFG', '1602064330.001600')\n assert issue.active is False\n assert issue.opened is not None\n assert issue.closed is not None\n assert issue.channel_id == 'C0192NP3TFG'\n assert issue.chat_id == '1602064330.001600'\n assert issue.ticket_id == '77'\n\n slack_client.users_info.assert_called_with(user='UGF7MRWMS')\n create_ticket.assert_not_called()\n add_comment.assert_not_called()\n\n # Check the message that should go to slack closing the issue:\n url = f'https://z.e.n.d.e.s.k/{ticket.id}'\n post_message.assert_called_with(\n slack_client,\n '1602064330.001600',\n 'C0192NP3TFG',\n f'🤖 Understood. Ticket {url} has been closed.'\n )",
"def __call__(self, trigger, type, event):",
"async def async_attach_trigger(hass, config, action, automation_info):\n trigger_data = automation_info.get(\"trigger_data\", {}) if automation_info else {}\n webhook_id = config.get(CONF_WEBHOOK_ID)\n job = HassJob(action)\n hass.components.webhook.async_register(\n automation_info[\"domain\"],\n automation_info[\"name\"],\n webhook_id,\n partial(_handle_webhook, job, trigger_data),\n )\n\n @callback\n def unregister():\n \"\"\"Unregister webhook.\"\"\"\n hass.components.webhook.async_unregister(webhook_id)\n\n return unregister",
"def trigger(builder, revision, files=[], dry_run=False, extra_properties=None):\n repo_name = query_repo_name_from_buildername(builder)\n return buildapi.trigger_arbitrary_job(repo_name, builder, revision, files, dry_run,\n extra_properties)",
"def post_activities():\n pass",
"def test_webhook_unkown_action(self):\n event = {\n \"body\": json.dumps({\n \"queryResult\": {\n \"action\": \"1manage_bmi\"\n }})\n }\n context = {}\n resp = webhook(event, context)\n self.assertEqual(resp[\"statusCode\"], 500)\n self.assertEqual(resp[\"body\"], json.dumps({}))",
"def test_trigger():\n # assume\n subject = Subject()\n observer = Observer()\n subject.handle('test', observer.on_test)\n\n # act\n subject.trigger('test', 1, 'one', True)\n\n # assert\n assert observer.invocations[0] == (1, 'one', True), \"observer did not receive event\"",
"def example_webhook(self, incoming_request):\n return \"Example\"",
"def example_webhook(self, incoming_request):\n return \"Example\"",
"def webhooks(request, deployment_hash):\n if request.method != \"POST\":\n return JsonResponse({'message': 'not found.'}, status=404)\n # Now we pull out the slug and try to preform a new build\n try:\n dep = Deployment.objects.get(webhook_text=deployment_hash)\n # First try to preform a git pull, since the deployment\n # has to exist we know that it was at least cloned\n pull_repo(dep.dir_text, dep.git_branch_text)\n # First check to see if the container is running\n if dep.is_running:\n # It is so stop the container\n stop_container(dep.container_id_text)\n # Now we should build and tag our new image\n build_container(dep.dir_text, dep.name_text)\n # Finally if it should be running we\n # should bring the container back up\n if dep.is_running:\n dep.container_id_text = start_container(dep)\n dep.save()\n\n return JsonResponse({'message': 'success'})\n except ObjectDoesNotExist as e:\n return JsonResponse({'message': 'not found'}, status=404)",
"def do_post(self, *args):\n raise NotImplementedError()",
"def webhook():\n if request.headers.get('content-type') == 'application/json':\n\n json_string = request.get_data().decode('utf-8')\n update = Update.de_json(json_string)\n bot.process_new_updates([update])\n return ''\n\n else:\n abort(403)",
"async def async_attach_trigger(\n hass, config, action, automation_info, *, platform_type=\"event\"\n):\n event_type = config.get(CONF_EVENT_TYPE)\n event_data_schema = None\n if config.get(CONF_EVENT_DATA):\n event_data_schema = vol.Schema(\n {\n vol.Required(key): value\n for key, value in config.get(CONF_EVENT_DATA).items()\n },\n extra=vol.ALLOW_EXTRA,\n )\n\n @callback\n def handle_event(event):\n \"\"\"Listen for events and calls the action when data matches.\"\"\"\n if event_data_schema:\n # Check that the event data matches the configured\n # schema if one was provided\n try:\n event_data_schema(event.data)\n except vol.Invalid:\n # If event data doesn't match requested schema, skip event\n return\n\n hass.async_run_job(\n action,\n {\n \"trigger\": {\n \"platform\": platform_type,\n \"event\": event,\n \"description\": f\"event '{event.event_type}'\",\n }\n },\n event.context,\n )\n\n return hass.bus.async_listen(event_type, handle_event)",
"def trigger(\n self, generative_design_execution_input: GenerativeDesignInput\n ) -> GenerativeDesignExecution:\n path = self._get_path()\n request_dict = generative_design_execution_input.dump()\n data = self.session.post_resource(path, request_dict)\n return self.build(data)",
"def handle_scheduled_command(\n self, command, channel, user, msg_type, args=None):\n if args:\n command = \" \".join([command, args])\n\n response = self.handle_command(command, channel, user, msg_type)\n self.slack_client.response_to_client(response)"
] | [
"0.6756358",
"0.63620687",
"0.6353586",
"0.6080397",
"0.60362595",
"0.5910809",
"0.59053904",
"0.58640003",
"0.58217233",
"0.57251173",
"0.5721428",
"0.5700421",
"0.5697234",
"0.5693077",
"0.56879246",
"0.5658715",
"0.5656193",
"0.5644521",
"0.5596793",
"0.5591541",
"0.55492896",
"0.5533883",
"0.55296797",
"0.5521012",
"0.54766965",
"0.54514354",
"0.54494166",
"0.5446674",
"0.54450625",
"0.54301065",
"0.54237705",
"0.5418888",
"0.53981775",
"0.5353109",
"0.534515",
"0.53450114",
"0.5342693",
"0.5339547",
"0.5300849",
"0.5291787",
"0.5291284",
"0.5283149",
"0.5274528",
"0.52715987",
"0.52691805",
"0.52687794",
"0.5246816",
"0.5245435",
"0.5238411",
"0.5232724",
"0.5232401",
"0.5231718",
"0.5224973",
"0.5219287",
"0.5218396",
"0.5216358",
"0.5215254",
"0.51896286",
"0.5185784",
"0.5181383",
"0.51758957",
"0.5166393",
"0.5165347",
"0.5157166",
"0.5154953",
"0.5152444",
"0.51471114",
"0.51283073",
"0.5120414",
"0.51168764",
"0.5113794",
"0.5089439",
"0.50848216",
"0.5080114",
"0.5078975",
"0.5075049",
"0.50669307",
"0.5065219",
"0.5064191",
"0.5063719",
"0.5060451",
"0.5056596",
"0.50517225",
"0.50494236",
"0.5045814",
"0.50267494",
"0.502388",
"0.5020428",
"0.50198585",
"0.5014348",
"0.50111127",
"0.5011109",
"0.5008035",
"0.5008035",
"0.5000864",
"0.49985275",
"0.4993311",
"0.49928686",
"0.49902236",
"0.49898556"
] | 0.88001287 | 0 |
Initialize adaptive histogram equalization | def __init__(self, active: bool):
self.clahe = cv2.createCLAHE(clipLimit=3.0, tileGridSize=(8, 8))
self.active = active | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def equalise_hist(image, bin_count=256):\n # TODO: your histogram equalization code\n #define arrays\n image = img_as_ubyte(image)\n row,col = image.shape\n new_image = np.zeros((row,col),dtype='uint8') \n\n # compute the value of each grayscale,and save in image_hist \n image_hist = np.bincount(image.flatten(), minlength=(bin_count))\n\n # normalise n[]\n norm_arr = (np.cumsum(image_hist)/(image.size))*(bin_count-1)\n norm_arr = norm_arr.astype('uint8')\n \n #Compute a normalized cumulative histogram\n for x in range(row):\n for y in range(col):\n new_image[x,y] = norm_arr[image[x,y]]\n \n return new_image",
"def setup_hist(self):\n self.x_min = {}\n self.x_max = {}\n self.x_max_minus_min = {}\n self.dx = {}\n self.n_bins = {}\n\n self.histogram_edges = {}\n self.histogram_values = {}\n self.histogram_cdf = {}",
"def histogram_equalize(im_orig):\n\n color_flag = False\n image = im_orig\n\n\n if len(im_orig.shape) == 3: #RGB image\n color_flag = True\n y_im = rgb2yiq(im_orig)\n image = y_im[:, :, 0]\n\n image *= NORMALIZE\n hist_orig, bins = np.histogram(image, range(BINS))\n hist_cum = np.cumsum(hist_orig) #cumulative distribution function\n\n cum = ((hist_cum - hist_cum.min()) / ( hist_cum.max() - hist_cum.min())) * NORMALIZE\n\n im_eq = cum[image.astype(np.uint8)]\n\n hist_eq, bins = np.histogram(im_eq, range(BINS)) #before getting back to float64 does the histogram)\n\n im_eq /= NORMALIZE\n im_eq = im_eq.astype(np.float64)\n\n\n if color_flag:\n y_im[:, :, 0] = im_eq\n im_eq = yiq2rgb(y_im)\n\n im_eq = im_eq.clip(0,1)\n return [im_eq, hist_orig, hist_eq]",
"def histogram_equalization(img):\n\n if len(img.shape) == 3:\n img_copy = np.copy(img)\n\n blue = img_copy[:,:,0]\n blue = histogram_equalize(blue)\n\n green = img_copy[:,:,1]\n green = histogram_equalize(green)\n\n red = img_copy[:,:,2]\n red = histogram_equalize(red)\n\n new_img = np.zeros(img_copy.shape)\n\n new_img[:,:,0] = blue\n new_img[:,:,1] = green\n new_img[:,:,2] = red\n\n return new_img\n\n else:\n return histogram_equalize(img)",
"def histogram_equalization_helper(im):\n\n im *= (255 / im.max())\n c_m = im.min()\n hist_orig, bins = np.histogram(im, bins=256, range=[0, 256])\n cumulative_hist = np.cumsum(hist_orig)\n cumulative_hist = (((cumulative_hist - c_m) * 255) /(im.size)).astype(int)\n im_eq = cumulative_hist[im.astype(int)]\n hist_eq, bins_eq = np.histogram(im_eq, bins=256, range=[0, 256])\n im_eq = im_eq/ 255\n\n # plt.plot((bins[:-1] + bins[1:]) / 2, hist_orig)\n # plt.hist(im.flatten(), bins=128)\n # plt.show()\n #\n # plt.plot((bins_eq[:-1] + bins_eq[1:]) / 2, hist_eq)\n # plt.hist(im.flatten(), bins=128)\n #\n # plt.show()\n return im_eq, hist_orig, hist_eq",
"def equalize_hist(input):\n return np.float32(skimage.exposure.equalize_hist(input.numpy()))",
"def histogram_equalize(im_orig):\n\n shape_len = len(im_orig.shape)\n if shape_len == 2: # grayscale\n return histogram_equalization_helper(im_orig)\n elif shape_len == 3 and im_orig.shape[2] == 3: # rgb\n im_yiq = rgb2yiq(im_orig)\n y = im_yiq[:, :, 0]\n y_eq, hist_orig, hist_eq = histogram_equalization_helper(y)\n im_yiq[:, :, 0] = y_eq\n im_eq = yiq2rgb(im_yiq)\n return im_eq, hist_orig, hist_eq\n\n else:\n print(\"error\")\n return",
"def histogram_equalize(im_orig):\n if im_orig.ndim == 3:\n return _histogram_equalize_rgb(im_orig)\n return _histogram_equalize_grayscale(im_orig)",
"def btn_equalize_hist_callback(self):\n self.show_as_waiting(True)\n self.image_proc_selected('Histogram Equalization')\n self.show_as_waiting(False)",
"def image_equalise_hist(image: np.ndarray):\n # Resize image to a shape of (48, 48)\n image = image_as_square(image)\n\n # Equalize the histogram of the image\n image = equalizeHist(image)\n\n # Resize the iamge back to a shape of (2304, )\n return image_as_array(image)",
"def adaptive_hist(image):\n mask = np.zeros(image.shape[:2], np.uint8)\n # spatially weighted by Gaussian distribtuion?\n mask = cv2.ellipse(mask, (image.shape[1] // 2,image.shape[0] // 2),\n (image.shape[1] // 2,image.shape[0] // 2), 0, 0, 360, 255, -1)\n\n # RGB color histogram\n hist1 = cv2.calcHist([image], [0], mask, [16], [0, 256]).reshape(1, -1)\n hist2 = cv2.calcHist([image], [1], mask, [16], [0, 256]).reshape(1, -1)\n hist3 = cv2.calcHist([image], [2], mask, [16], [0, 256]).reshape(1, -1)\n rgb_hist = np.concatenate((hist1, hist2, hist3), axis=1)\n cv2.normalize(rgb_hist, rgb_hist)\n\n # HSV color histogram\n img_hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)\n hist1 = cv2.calcHist([img_hsv], [0], mask, [16], [0, 256]).reshape(1, -1)\n hist2 = cv2.calcHist([img_hsv], [1], mask, [16], [0, 256]).reshape(1, -1)\n hsv_hist = np.concatenate((hist1, hist2), axis=1)\n cv2.normalize(hsv_hist, hsv_hist)\n\n # YCrCb color histogram\n img_YCrCb = cv2.cvtColor(image, cv2.COLOR_BGR2YCrCb)\n hist1 = cv2.calcHist([img_YCrCb], [1], mask, [16], [0, 256]).reshape(1, -1)\n hist2 = cv2.calcHist([img_YCrCb], [2], mask, [16], [0, 256]).reshape(1, -1)\n YCrCb_hist = np.concatenate((hist1, hist2), axis=1)\n cv2.normalize(YCrCb_hist, YCrCb_hist)\n\n # Lab color histogram\n img_lab = cv2.cvtColor(image, cv2.COLOR_BGR2Lab)\n hist1 = cv2.calcHist([img_lab], [1], mask, [16], [0, 256]).reshape(1, -1)\n hist2 = cv2.calcHist([img_lab], [2], mask, [16], [0, 256]).reshape(1, -1)\n lab_hist = np.concatenate((hist1, hist2), axis=1)\n cv2.normalize(lab_hist, lab_hist)\n\n # Hog\n #image_gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)\n #image_gray = cv2.resize(image_gray, (200,200))\n #hog_hist = hog(image_gray, orientations=8, block_norm = 'L2-Hys', pixels_per_cell=(50,50), cells_per_block=(1,1), visualize=False).reshape(1, -1)\n #cv2.normalize(hog_hist, hog_hist)\n\n # type?\n #type_hist = np.zeros(8).reshape(1,8) + 0.5\n #type_hist[0, int(image_path[-5])] = 1\n #cv2.normalize(type_hist, type_hist)\n\n #thist = np.transpose(np.concatenate((3 * rgb_hist, hsv_hist, YCrCb_hist, lab_hist, hog_hist), axis=1))\n thist = np.transpose(np.concatenate((3 * rgb_hist, hsv_hist, YCrCb_hist, lab_hist), axis=1))\n thist = thist / sum(thist)\n\n return np.transpose(thist)[0]",
"def create_fixed_hist(self):\n hist = cv2.calcHist([self.obj], [0, 1, 2], None, [32, 8, 8],\n [0, 256, 0, 256, 0, 256])\n self.hist = cv2.normalize(hist).flatten()\n print self.hist",
"def normalize(histogram):\n nbins = histogram.GetNbinsX()\n integral = histogram.Integral(1,nbins)\n newhist = histogram.Clone()\n newhist.Reset()\n for bin in range(1,nbins+1):\n ibinY = histogram.GetBinContent(bin)\n newhist.SetBinContent(bin,ibinY/integral)\n return newhist",
"def __init__(self, param, lower, upper, binCount = 50,\n xscale = None, yweight = None, autoFollow = True):\n logging.debug('Hist init: {} [{}, {}]'\n .format(param.name(), lower, upper))\n super(Histogram, self).__init__(title = \"({0}, {1})\".format(lower, upper))\n # add it to the parameter here\n if isinstance(param, ParameterBase):\n self.param = param # parameter we belong to is mandatory\n self.binCount = int(binCount) # bin count is mandatory\n self.xrange = (float(lower), float(upper))\n # setter chose the first option available for invalid options\n self.xscale = xscale\n self.yweight = yweight\n if not isinstance(autoFollow, bool):\n autoFollow = (autoFollow.title() == \"True\")\n self.autoFollow = autoFollow",
"def histogram_equalize(im_orig):\n rgb, y, im_yiq = check_rgb(im_orig)\n # The algorithm of histogram equalization starts from here\n hist_orig, bin_edges = np.histogram(y*(BITS - 1), BITS, (0, BITS - 1)) # calculate original histogram\n cum_hist = np.cumsum(hist_orig)\n cum_hist_eq = np.rint(((cum_hist - cum_hist[np.nonzero(cum_hist)[0][0]]) / # formula from the class\n (cum_hist[BITS-1] - cum_hist[np.nonzero(cum_hist)[0][0]]))*(BITS-1))\n im_eq = cum_hist_eq[(y*(BITS - 1)).astype(int)] # get the equalized image\n hist_eq, bin_edges_eq = np.histogram(im_eq, BITS, (0, BITS - 1)) # calculate equalized histogram\n im_eq = gray2rgb(rgb, im_eq, im_yiq)\n if rgb:\n im_eq = np.clip(im_eq, 0, 1) # to avoid cases when the pixel value is less then 0 or more then 1\n return [im_eq, hist_orig, hist_eq]",
"def yieldhist(self):\n labels = [\"initial\"] + [f\"N - {i}\" for i in self._names] + [\"N\"]\n if not self._delayed_mode:\n h = hist.Hist(hist.axis.Integer(0, len(labels), name=\"N-1\"))\n h.fill(numpy.arange(len(labels)), weight=self._nev)\n\n else:\n h = hist.dask.Hist(hist.axis.Integer(0, len(labels), name=\"N-1\"))\n for i, weight in enumerate(self._masks, 1):\n h.fill(dask_awkward.full_like(weight, i, dtype=int), weight=weight)\n h.fill(dask_awkward.zeros_like(weight))\n\n return h, labels",
"def _histogram_equalize_image(image, hist_orig):\n cum_hist = np.cumsum(hist_orig)\n cum_hist = (cum_hist * 255) / cum_hist[-1]\n\n image = np.interp(image, np.linspace(0, 1, 256), np.round(cum_hist))\n\n return utils.normalize_image(image)",
"def parameters_histograms(w, dw, a, da, b, db):\n w = w.cpu()\n dw = dw.cpu()\n a = a.cpu()\n da = da.cpu()\n b = b.cpu()\n db = db.cpu()\n \n fig = plt.figure(figsize=(10,6))\n ax = fig.add_subplot(231)\n ax.hist(w.reshape(1, w.shape[0] * w.shape[1]))\n ax.set_title('Weights', fontsize = 11)\n ax.ticklabel_format(style='sci', axis='y', scilimits=(0,0))\n ax = fig.add_subplot(232)\n ax.hist(dw.reshape(1, dw.shape[0] * dw.shape[1]))\n ax.set_title('Weights variations', fontsize = 11)\n ax.ticklabel_format(style='sci', axis='y', scilimits=(0,0))\n ax = fig.add_subplot(233)\n ax.hist(a)\n ax.set_title('Visible bias', fontsize = 11)\n ax.ticklabel_format(style='sci', axis='y', scilimits=(0,0))\n ax = fig.add_subplot(234)\n ax.hist(da)\n ax.set_title('Visible bias variations', fontsize = 11)\n ax.ticklabel_format(style='sci', axis='y', scilimits=(0,0))\n ax = fig.add_subplot(235)\n ax.hist(b)\n ax.set_title('Hidden bias', fontsize = 11)\n ax.ticklabel_format(style='sci', axis='y', scilimits=(0,0))\n ax = fig.add_subplot(236)\n ax.hist(db)\n ax.set_title('Hidden bias variations', fontsize = 11)\n ax.ticklabel_format(style='sci', axis='y', scilimits=(0,0))\n plt.subplots_adjust(hspace=0.25)\n plt.show()\n plt.close('all')",
"def Init_Adaptive_Grid(self,):\n self.Indicator[0] = 0\n self.Old[0] = 0\n \n for i in range(self.num_dim):\n self.Active[i] = i+1\n self.Indicator[i+1] = 1\n self.N_Forward[i,0] = i+2\n self.N_Backward[i,i+1] = 1\n self.Idx[i,i+1] = 1",
"def fit_hist(self, h, iters = 20):\n N = np.sum(h)\n for iter in range(iters):\n\n term1, term2, term3, term4 = 0,0,0,0\n\n for i in range(h.shape[0]):\n term1 += (math.log(i+1) * h[i])\n term2 += h[i]*(math.pow(i/self.k_alp[1],self.k_alp[0]))*(math.log((i+1)/self.k_alp[1]))\n term3 += h[i]*(math.pow(i/self.k_alp[1],self.k_alp[0]))\n term4 += h[i]*(math.pow(i/self.k_alp[1],self.k_alp[0]))*((math.log((i+1)/self.k_alp[1]))**2)\n # print(term1,term2,term3,term4)\n\n dL_dk = (N / self.k_alp[0]) - (N * math.log(self.k_alp[1])) + term1 - term2\n dL_dalpha = (self.k_alp[0] / self.k_alp[1]) * (term3 - N)\n d2L_dk2 = -(N / (self.k_alp[0] ** 2)) - term4\n d2L_dalpha2 = (self.k_alp[0] / (self.k_alp[1] ** 2)) * (N - ((self.k_alp[0] + 1) * term3))\n d2L_dkdalpha = ((1 / self.k_alp[1]) * term3) + ((self.k_alp[0]/self.k_alp[1])*term2) - (N/self.k_alp[1])\n # print(dL_dk,dL_dalpha, d2L_dk2,d2L_dalpha2,d2L_dkdalpha)\n\n self.k_alp = self.k_alp + \\\n np.dot(np.linalg.inv(np.array([[d2L_dk2, d2L_dkdalpha],[d2L_dkdalpha, d2L_dalpha2]])) ,\n np.array([-dL_dk, -dL_dalpha]))",
"def weightHistogram(self, min=None, max=None, nbins=10):\n raise NotImplementedError",
"def histogram_equalize(img):\n\n img_copy = np.copy(img)\n\n elements,counts = np.unique(img_copy,return_counts=True)\n pdf = counts/counts.sum()\n cdf = np.cumsum(pdf)\n new_values = cdf * 255\n\n old_new_map = dict(zip(elements,new_values))\n\n img_new = np.zeros(img_copy.shape)\n for i in old_new_map:\n img_new[img_copy == i] = old_new_map[i]\n\n return img_new",
"def histogram_equalize(im_orig):\n img = get_gray_channel(im_orig)\n img = float2int(img)\n \n # step1: computing histogram\n hist_orig, bins = np.histogram(img, bins=np.arange(MAX_VALUE + 1))\n\n # step2: computing cumulative histogram\n cum_hist = np.cumsum(hist_orig)\n \n # step3+4: Normalizing cumulative histogram and multiplying by\n # the maximal gray level\n norm_factor = (MAX_VALUE - 1) / img.size\n cum_hist = np.multiply(cum_hist, norm_factor)\n \n # step5: Verifying values are in the right range\n if (int(np.amin(cum_hist)) != 0) or \\\n (int(np.amax(cum_hist)) != MAX_VALUE - 1):\n cum_hist = linear_stretch(cum_hist)\n\n # step6: Round values\n cum_hist = np.round(cum_hist)\n\n # step7: Map image intensity values using histogram\n im_eq = cum_hist[img]\n\n hist_eq = np.histogram(im_eq, bins=np.arange(MAX_VALUE + 1))[0]\n im_eq = int2float(im_eq)\n im_eq = update_gray_channel(im_orig, im_eq)\n\n return im_eq, hist_orig, hist_eq",
"def __init__(self, bin_edges, **kwargs):\n super().__init__(**kwargs)\n self.bin_edges = bin_edges\n self.num_bins = bin_edges.size + 1\n self.sigma = Parameter(1.0, transform=positive())",
"def histogramintegrals(self):\n return {}",
"def histogram_equalize(im_orig):\n if im_orig.ndim == RGB_DIM:\n im_yiq = rgb2yiq(im_orig) # Convert to YIQ space.\n\n # Histogram equalize only Y channel.\n result = __gray_histogram_equalize(im_yiq[:, :, Y_CHANNEL])\n im_yiq[:, :, Y_CHANNEL] = result[IMAGE_LOCATION]\n\n # Convert back to RGB.\n im_rgb = yiq2rgb(im_yiq)\n im_rgb_min = np.min(im_rgb)\n im_rgb = (im_rgb - im_rgb_min) / (np.max(im_rgb) - im_rgb_min) # Normalize.\n result[IMAGE_LOCATION] = im_rgb\n return result\n # Otherwise, just histogram equalize.\n return __gray_histogram_equalize(im_orig)",
"def _compute_histogram(self, x, momentum):\n num_bins = self.histogram.size(0)\n x_detached = x.detach()\n self.bin_width = (self._max_val - self._min_val) / (num_bins - 1)\n lo = torch.floor((x_detached - self._min_val) / self.bin_width).long()\n hi = (lo + 1).clamp(min=0, max=num_bins - 1)\n hist = x.new_zeros(num_bins)\n alpha = (\n 1.0\n - (x_detached - self._min_val - lo.float() * self.bin_width)\n / self.bin_width\n )\n hist.index_add_(0, lo, alpha)\n hist.index_add_(0, hi, 1.0 - alpha)\n hist = hist / (hist.sum() + 1e-6)\n self.histogram = (1.0 - momentum) * self.histogram + momentum * hist",
"def from_hist(cls, hist, **options):\n # For 1D hists derived from 2D hists\n if 'rebin' in options:\n hist.Rebin(options.pop('rebin'))\n if 'projectionx' in options:\n arg = options.pop('projectionx')\n bin1 = hist.GetYaxis().FindBin(arg[0])\n bin2 = hist.GetYaxis().FindBin(arg[1])\n hist = hist.ProjectionX(hist.GetName()+\"_px\",bin1,bin2,\"e\")\n if 'projectiony' in options:\n arg = options.pop('projectiony')\n bin1 = hist.GetXaxis().FindBin(arg[0])\n bin2 = hist.GetXaxis().FindBin(arg[1])\n hist = hist.ProjectionY(hist.GetName()+\"_py\",bin1,bin2,\"e\")\n if 'averagex' in options:\n arg = options.pop('averagex')\n bin1 = hist.GetYaxis().FindBin(arg[0])\n bin2 = hist.GetYaxis().FindBin(arg[1])\n avg_hist = hist.ProjectionX(hist.GetName()+\"_px\",bin1,bin1,\"e\")\n for i in range(1,avg_hist.GetNbinsX() + 1):\n avg_hist.SetBinContent(i,0)\n avg_hist.SetBinError(i,0)\n tmp_weights = avg_hist.Clone(hist.GetName()+\"_weights\")\n for i in range(bin1,bin2):\n tmp_hist = hist.ProjectionX(hist.GetName()+\"_px\",i,i+1,\"e\")\n for j in range(1,avg_hist.GetNbinsX() + 1):\n tmp_weights.SetBinContent(j,tmp_weights.GetBinContent(j) + (tmp_hist.GetBinError(j) ** -2))\n tmp_hist.SetBinContent(j,tmp_hist.GetBinContent(j) * (tmp_hist.GetBinError(j) ** -2))\n tmp_hist.SetBinError(j,tmp_hist.GetBinError(j) * (tmp_hist.GetBinError(j) ** -2))\n avg_hist.Add(tmp_hist)\n avg_hist.Divide(tmp_weights)\n hist = avg_hist\n if 'averagey' in options:\n arg = options.pop('averagey')\n bin1 = hist.GetXaxis().FindBin(arg[0])\n bin2 = hist.GetXaxis().FindBin(arg[1])\n avg_hist = hist.ProjectionY(hist.GetName()+\"_py\",bin1,bin1,\"e\")\n for i in range(1,avg_hist.GetNbinsX() + 1):\n avg_hist.SetBinContent(i,0)\n avg_hist.SetBinError(i,0)\n tmp_weights = avg_hist.Clone(hist.GetName()+\"_weights\")\n for i in range(bin1,bin2):\n tmp_hist = hist.ProjectionY(hist.GetName()+\"_py\",i,i+1,\"e\")\n for j in range(1,avg_hist.GetNbinsX() + 1):\n tmp_weights.SetBinContent(j,tmp_weights.GetBinContent(j) + (tmp_hist.GetBinError(j) ** -2))\n tmp_hist.SetBinContent(j,tmp_hist.GetBinContent(j) * (tmp_hist.GetBinError(j) ** -2))\n tmp_hist.SetBinError(j,tmp_hist.GetBinError(j) * (tmp_hist.GetBinError(j) ** -2))\n avg_hist.Add(tmp_hist)\n avg_hist.Divide(tmp_weights)\n hist = avg_hist\n if 'error' in options and options.pop('error'):\n for i in range(1, hist.GetNbinsX()+1):\n hist.SetBinContent(i,hist.GetBinError(i)/hist.GetBinContent(i))\n hist.SetBinError(i,0.0)\n nbins = hist.GetNbinsX()\n bin_edges = np.fromiter((hist.GetBinLowEdge(i) for i in xrange(1,nbins+2)), np.float, nbins+1)\n values = np.fromiter((hist.GetBinContent(i) for i in xrange(1,nbins+1)), np.float, nbins)\n yerr = np.fromiter((hist.GetBinError(i) for i in xrange(1,nbins+1)), np.float, nbins)\n return cls.from_bin_edges(bin_edges,values,yerr,**options)",
"def histogramEqualize(imgOrig: np.ndarray) -> (np.ndarray, np.ndarray, np.ndarray):\r\n\r\n if isRGB(imgOrig):\r\n imgYIQ = transformRGB2YIQ(imgOrig)\r\n Y=imgYIQ[:,:,0]\r\n unnormImg = unnormalize(Y).astype('int')\r\n else:\r\n unnormImg = unnormalize(imgOrig).astype('int')\r\n\r\n histOrig = calHist(unnormImg)\r\n cumSumOrig = calCumSum(histOrig)\r\n\r\n h, w = unnormImg.shape[:2]\r\n LUT=(np.ceil(cumSumOrig*255/(h*w))).astype('uint8')\r\n imgEq=np.zeros_like(unnormImg)\r\n for i in range(h):\r\n for j in range (w):\r\n imgEq[i,j]=LUT[unnormImg[i,j]]\r\n histEq=calHist(imgEq)\r\n imgEq = normalize(imgEq)\r\n\r\n if isRGB(imgOrig):\r\n imgYIQ[:,:,0] = imgEq\r\n imgEq = transformYIQ2RGB(imgYIQ)\r\n\r\n return imgEq,histOrig,histEq",
"def __init__(self, bins):\n self.bins = bins",
"def __init_af(self,i,h1,h2):\n self.params['W'+i]=np.random.randn(h1,h2)*self.weight_scale\n self.params['b'+i]=np.zeros(h2)\n if self.use_batchnorm:\n self.params['gamma'+i]=np.ones(h2)\n self.params['beta'+i]=np.zeros(h2)",
"def __init__(self, nbins):\n self.nbins = nbins\n # Since the kernel used to compute the Parzen histogram covers more\n # than one bin, we need to add extra bins to both sides of the\n # histogram to account for the contributions of the minimum and maximum\n # intensities. Padding is the number of extra bins used at each side\n # of the histogram (a total of [2 * padding] extra bins). Since the\n # support of the cubic spline is 5 bins (the center plus 2 bins at each\n # side) we need a padding of 2, in the case of cubic splines.\n self.padding = 2\n self.setup_called = False",
"def equalizeHist_color(img):\n image = np.empty(img.shape)\n for c in range(img.shape[2]):\n channel = img[:, :, c]\n channel = channel.astype(np.uint8)\n\n # CLAHE\n clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(2, 2))\n channel = clahe.apply(channel)\n\n # http://docs.opencv.org/3.1.0/d5/daf/tutorial_py_histogram_equalization.html\n channel = cv2.equalizeHist(channel)\n try:\n image[:, :, c] = channel\n except Exception as e:\n print(str(e))\n return image",
"def initializeDistribution(self):\n self.convertToDistrDict['Jacobi'] = self.convertJacobiToBeta\n self.convertToQuadDict ['Jacobi'] = self.convertBetaToJacobi\n self.measureNormDict ['Jacobi'] = self.stdProbabilityNorm\n #this \"if\" section can only be called if distribution not generated using readMoreXML\n if (not self.upperBoundUsed) and (not self.lowerBoundUsed):\n self._distribution = distribution1D.BasicBetaDistribution(self.alpha,self.beta,self.high-self.low,self.low)\n else:\n if self.lowerBoundUsed == False:\n a = 0.0\n else:\n a = self.lowerBound\n if self.upperBoundUsed == False:\n b = sys.float_info.max\n else:\n b = self.upperBound\n self._distribution = distribution1D.BasicBetaDistribution(self.alpha,self.beta,self.high-self.low,a,b,self.low)\n self.preferredPolynomials = 'Jacobi'\n self.compatibleQuadrature.append('Jacobi')\n self.compatibleQuadrature.append('ClenshawCurtis')",
"def adaptiveEqHist(img, clipLimit=2.0, tileGridSize=(8,8)):\n\tgray = grayscale(img)\n\tclahe = cv2.createCLAHE(clipLimit=clipLimit, tileGridSize=tileGridSize)\n\tcl1 = clahe.apply(gray)\n\treturn cl1",
"def _make_hist(self, oned_arr):\n hist_ = np.histogram(\n a=oned_arr,\n bins=self.null_distributions_[\"histogram_bins\"],\n range=(\n np.min(self.null_distributions_[\"histogram_bins\"]),\n np.max(self.null_distributions_[\"histogram_bins\"]),\n ),\n density=False,\n )[0]\n return hist_",
"def initializeDistribution(self):\n if (self.lowerBoundUsed == False and self.upperBoundUsed == False):\n self._distribution = distribution1D.BasicWeibullDistribution(self.k,self.lambdaVar,self.low)\n self.lowerBound = self.low\n self.upperBound = sys.float_info.max\n else:\n if self.lowerBoundUsed == False:\n self.lowerBound = self.low\n if self.upperBoundUsed == False:\n self.upperBound = sys.float_info.max\n self._distribution = distribution1D.BasicWeibullDistribution(self.k,self.lambdaVar,self.lowerBound,self.upperBound,self.low)",
"def compute_histogram(im, block_factor=3, color_space='HSV'):\n\n # Shape = rows and columns\n remainder_rows = im.shape[0] % block_factor\n remainder_cols = im.shape[1] % block_factor\n\n im_block = cv2.copyMakeBorder(im, block_factor - remainder_rows, 0, block_factor - remainder_cols, 0,\n cv2.BORDER_CONSTANT)\n\n windowsize_r = int(im_block.shape[0] / block_factor)\n windowsize_c = int(im_block.shape[1] / block_factor)\n\n # print(im_block.shape)\n # print(str(windowsize_r)+' '+str(windowsize_c))\n # cv2.imshow(\"fullImg\", im_block)\n\n hist = []\n for r in range(0, im_block.shape[0], windowsize_r):\n for c in range(0, im_block.shape[1], windowsize_c):\n hist_blocks = []\n window = im_block[r:r + windowsize_r, c:c + windowsize_c]\n if color_space == 'GRAY':\n window_gray = cv2.cvtColor(window, cv2.COLOR_BGR2GRAY)\n hist_block = cv2.calcHist([window_gray], [0], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n elif color_space == 'RGB':\n hist_block = cv2.calcHist([window], [0], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n hist_block = cv2.calcHist([window], [1], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n hist_block = cv2.calcHist([window], [2], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n elif color_space == 'HSV':\n window = cv2.cvtColor(window, cv2.COLOR_BGR2HSV)\n hist_block = cv2.calcHist([window], [0], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n hist_block = cv2.calcHist([window], [1], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n hist_block = cv2.calcHist([window], [2], None, [256], [0, 256])\n cv2.normalize(hist_block, hist_block, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)\n hist_blocks.append(hist_block)\n \n hist.append(hist_blocks)\n\n return hist",
"def __init__(self, array, compute_histogram=True):\n\n self.data = array\n self.histogram = np.array([])\n self.dim_x = array.shape[0]\n self.dim_y = array.shape[1]\n self.dim_z = array.shape[2]\n\n if compute_histogram:\n self.compute_histogram()",
"def makeHistogram(values, numBins, xLabel, yLabel, title=None):",
"def test_make_histograms(self):\r\n raw_lengths = [90, 100, 110, 110, 130, 135]\r\n pre_lengths = [100, 110, 105, 130, 135]\r\n post_lengths = [130, 135]\r\n raw_hist, pre_hist, post_hist, bin_edges = \\\r\n make_histograms(raw_lengths, pre_lengths, post_lengths)\r\n assert_almost_equal(pre_hist, array([0, 2, 1, 0, 2]))\r\n assert_almost_equal(post_hist, array([0, 0, 0, 0, 2]))\r\n assert_almost_equal(bin_edges, array([90, 100, 110, 120, 130, 140]))",
"def _histogram_with_spread(self):\n complexity_hist = np.bincount(\n self.epoch.array_annotations['complexity'])\n num_bins = (self.t_stop - self.t_start).rescale(\n self.bin_size.units).item() / self.bin_size.item()\n num_bins = round_binning_errors(num_bins, tolerance=self.tolerance)\n time_hist = np.zeros(num_bins, dtype=int)\n\n start_bins = (self.epoch.times - self.t_start).rescale(\n self.bin_size.units).magnitude / self.bin_size.item()\n stop_bins = (self.epoch.times + self.epoch.durations - self.t_start\n ).rescale(self.bin_size.units\n ).magnitude / self.bin_size.item()\n\n if self.sampling_rate is not None:\n shift = (.5 / self.sampling_rate / self.bin_size).simplified.item()\n # account for the first bin not being shifted in the epoch creation\n # if the shift would move it past t_start\n if self.epoch.times[0] == self.t_start:\n start_bins[1:] += shift\n else:\n start_bins += shift\n stop_bins += shift\n\n start_bins = round_binning_errors(start_bins, tolerance=self.tolerance)\n stop_bins = round_binning_errors(stop_bins, tolerance=self.tolerance)\n\n for idx, (start, stop) in enumerate(zip(start_bins, stop_bins)):\n time_hist[start:stop] = \\\n self.epoch.array_annotations['complexity'][idx]\n\n time_hist = neo.AnalogSignal(\n signal=np.expand_dims(time_hist, axis=1),\n sampling_period=self.bin_size, units=pq.dimensionless,\n t_start=self.t_start)\n\n empty_bins = (self.t_stop - self.t_start - self.epoch.durations.sum())\n empty_bins = empty_bins.rescale(self.bin_size.units\n ).magnitude / self.bin_size.item()\n empty_bins = round_binning_errors(empty_bins, tolerance=self.tolerance)\n complexity_hist[0] = empty_bins\n\n return time_hist, complexity_hist",
"def __init__(self, iterable=None):\n super(Histogram, self).__init__()\n self.types = 0 # the number of distinct item types in this histogram\n self.tokens = 0 # the total count of all item tokens in this histogram\n if iterable:\n self.update(iterable)",
"def yieldhist(self):\n labels = [\"initial\"] + list(self._names)\n\n if not self._delayed_mode:\n honecut = hist.Hist(hist.axis.Integer(0, len(labels), name=\"onecut\"))\n hcutflow = honecut.copy()\n hcutflow.axes.name = (\"cutflow\",)\n honecut.fill(numpy.arange(len(labels)), weight=self._nevonecut)\n hcutflow.fill(numpy.arange(len(labels)), weight=self._nevcutflow)\n\n else:\n honecut = hist.dask.Hist(hist.axis.Integer(0, len(labels), name=\"onecut\"))\n hcutflow = honecut.copy()\n hcutflow.axes.name = (\"cutflow\",)\n\n for i, weight in enumerate(self._masksonecut, 1):\n honecut.fill(\n dask_awkward.full_like(weight, i, dtype=int), weight=weight\n )\n honecut.fill(dask_awkward.zeros_like(weight))\n for i, weight in enumerate(self._maskscutflow, 1):\n hcutflow.fill(\n dask_awkward.full_like(weight, i, dtype=int), weight=weight\n )\n hcutflow.fill(dask_awkward.zeros_like(weight))\n\n return honecut, hcutflow, labels",
"def fillHistograms(self, params, hists, mode = INTENS):\n\t\tif mode.IS_THEO and not self.hasTheo:\n\t\t\tprint \"No theory loaded, cannot fill histogram\"\n\t\tif not len(hists) == self.nSect:\n\t\t\traise IndexError(\"Histogram number mismatch\")\n\t\tcorrAmp = self.getCorrectedAmplitudes(params)\n\t\tfor s in range(self.nSect):\n\t\t\tcount = 0\n\t\t\tstart = self.borders[s ]\n\t\t\tstop = self.borders[s+1]\n\t\t\tfor i in range(start, stop):\n\t\t\t\tampl = corrAmp[2*i] + 1.j * corrAmp[2*i+1]\n\t\t\t\tnorm = self.norms[i]\n\t\t\t\tcoma = np.zeros((2,2))\n\t\t\t\tjac = np.zeros((2))\n\t\t\t\tcoma[0,0] = self.coma[2*i ,2*i ]\n\t\t\t\tcoma[0,1] = self.coma[2*i ,2*i+1]\n\t\t\t\tcoma[1,0] = self.coma[2*i+1,2*i ]\n\t\t\t\tcoma[1,1] = self.coma[2*i+1,2*i+1]\n\t\t\t\tif mode == INTENS:\n\t\t\t\t\tval = abs(ampl)**2\n\t\t\t\t\tjac[0] = 2*ampl.real\n\t\t\t\t\tjac[1] = 2*ampl.imag\n\t\t\t\telif mode == INTENSNORM:\n\t\t\t\t\tval = abs(ampl)**2/norm\n\t\t\t\t\tjac[0] = 2*ampl.real/norm\n\t\t\t\t\tjac[1] = 2*ampl.imag/norm\n\t\t\t\telif mode == REAL:\n\t\t\t\t\tval = ampl.real\n\t\t\t\t\tjac[0] = 1.\n\t\t\t\telif mode == IMAG:\n\t\t\t\t\tval = ampl.imag\n\t\t\t\t\tjac[1] = 1.\n\t\t\t\telif mode == REIMCORRELATION:\n\t\t\t\t\tval = coma[0,1]\n\t\t\t\telif mode == PHASE:\n\t\t\t\t\tval = phase(ampl)\n\t\t\t\t\tif ampl.real == 0.:\n\t\t\t\t\t\tif ampl.imag > 0.:\n\t\t\t\t\t\t\tjac[0] = -1./ampl.imag\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tjac[0] = 1./ampl.imag\n\t\t\t\t\telse:\n\t\t\t\t\t\tcommon = 1. + ampl.imag**2/ampl.real**2\n\t\t\t\t\t\tjac[0] = -ampl.imag/ampl.real**2/common\n\t\t\t\t\t\tjac[1] = 1./ampl.real/common\n\t\t\t\telif mode == INTENSTHEO:\n\t\t\t\t\tval = abs(self.theo[i])**2\n\t\t\t\telif mode == REALTHEO:\n\t\t\t\t\tval = self.theo[i].real\n\t\t\t\telif mode == IMAGTHEO:\n\t\t\t\t\tval = self.theo[i].imag\n\t\t\t\telif mode == PHASETHEO:\n\t\t\t\t\tval = phase(self.theo[i])\n\t\t\t\telse:\n\t\t\t\t\traise ValueError(\"Unknown mode '\" + mode + \"'\")\n\t\t\t\terr = np.dot(jac, np.dot(coma,jac))**.5\n\t\t\t\thists[s].SetBinContent(self.bin3pi+1, count + 1, val)\n\t\t\t\thists[s].SetBinError(self.bin3pi+1, count + 1, err)\n\t\t\t\tcount += 1",
"def initializeDistribution(self):\n if (self.lowerBoundUsed == False and self.upperBoundUsed == False):\n self._distribution = distribution1D.BasicExponentialDistribution(self.lambdaVar,self.low)\n self.lowerBound = self.low\n self.upperBound = sys.float_info.max\n else:\n if self.lowerBoundUsed == False:\n self.lowerBound = self.low\n if self.upperBoundUsed == False:\n self.upperBound = sys.float_info.max\n self._distribution = distribution1D.BasicExponentialDistribution(self.lambdaVar,self.lowerBound,self.upperBound,self.low)",
"def histo ( self ,\n xbins = 20 , xmin = None , xmax = None ,\n ybins = 20 , ymin = None , ymax = None ,\n hpars = () , \n histo = None ,\n integral = False ,\n errors = False , \n density = False ) :\n \n \n histos = self.make_histo ( xbins = xbins , xmin = xmin , xmax = xmax ,\n ybins = ybins , ymin = ymin , ymax = ymax ,\n hpars = hpars ,\n histo = histo )\n\n # loop over the historgam bins \n for ix,iy,x,y,z in histo.items() :\n\n xv , xe = x.value() , x.error()\n yv , ye = y.value() , y.error()\n \n # value at the bin center \n c = self ( xv , yv , error = errors ) \n\n if not integral : \n histo[ix,iy] = c\n continue\n\n # integral over the bin \n v = self.integral( xv - xe , xv + xe , yv - ye , yv + ye )\n \n if errors :\n if 0 == c.cov2 () : pass\n elif 0 != c.value() and 0 != v : \n v = c * ( v / c.value() )\n \n histo[ix,iy] = v \n\n ## coovert to density historgam, if requested \n if density : histo = histo.density()\n \n return histo",
"def __init__(self, convergence_threshold: float = 0.001, max_iterations: typing.List[int] = (50, 50, 50, 50),\n fullwidth_at_halfmax: float = 0.15, filter_noise: float = 0.01,\n histogram_bins: int = 200, control_points: typing.List[int] = (4, 4, 4),\n spline_order: int = 3):\n super().__init__()\n self.convergence_threshold = convergence_threshold\n self.max_iterations = max_iterations\n self.fullwidth_at_halfmax = fullwidth_at_halfmax\n self.filter_noise = filter_noise\n self.histogram_bins = histogram_bins\n self.control_points = control_points\n self.spline_order = spline_order",
"def _histogram_equalize_rgb(im_orig):\n imYIQ = utils.rgb2yiq(im_orig)\n\n hist_orig = utils.get_histogram(imYIQ[:, :, 0])\n\n imYIQ[:, :, 0] = _histogram_equalize_image(imYIQ[:, :, 0], hist_orig)\n\n hist_eq = utils.get_histogram(imYIQ[:, :, 0])\n\n im_eq = utils.yiq2rgb(imYIQ)\n\n return [im_eq, hist_orig, hist_eq]",
"def histeq( im, nbr_bins = 256):\n\t# get image histogram \n\timhist, bins = histogram( im.flatten(), nbr_bins, normed = True) \n\tcdf = imhist.cumsum() \n\t# cumulative distribution function cdf = 255 * cdf / cdf[-1] \n\t# normalize \n\t# use linear interpolation of cdf to find new pixel values \n\tim2 = interp( im.flatten(), bins[:-1], cdf) \n\treturn im2.reshape( im.shape), cdf",
"def __init__(self, bandits: List[Bandit]):\n self.bandits = bandits\n # mu_pri and var_pri are hypermaterers for prior distribution\n self.mu_pri = np.zeros(len(self.bandits)) # 5 for each\n self.var_pri = np.ones(len(self.bandits))*5\n # mu and var are hyperparameters for posterior distribution\n self.mu = self.mu_pri\n self.var = self.var_pri\n self.var0 = 1 # 1 is taken but any can be taken no prob upto a limit constant of inintial distribution\n self.logging = Logging()",
"def addHistogram1D(self, name, title, n_bins, minimum, maximum):\n\t\tself.histograms[ name ] = ROOT.TH1F(name, title, n_bins, minimum, maximum)",
"def BICHistogram(self):\n if not self._bicHistogram is 0:\n return self._bicHistogram\n hsvimg = self.HsvImage()\n #Note that in OpenCV hsv uses the ranges [0,179], [0,255] and [0,255] respectively\n histogram = numpy.zeros(56, dtype=numpy.float32)\n [width, height, depth] = hsvimg.shape\n swidth = width-1\n sheight = height-1\n for y in xrange(height):\n for x in xrange(width):\n #index = self.HsvBin(hsvimg[x][y])\n #if index != self.HsvBin(hsvimg[min(x+1, swidth)][min(y+1, sheight)]) or index != self.HsvBin(hsvimg[min(x+1, swidth)][max(y-1, 0)]) or index != self.HsvBin(hsvimg[max(x-1, 0)][min(y+1, sheight)]) or index != self.HsvBin(hsvimg[max(x-1, 0)][max(y-1, 0)]):\n index=self.HsvBin(x, y)\n if index != self.HsvBin(min(x+1, swidth),min(y+1, sheight)) or index != self.HsvBin(min(x+1, swidth),max(y-1, 0)) or index != self.HsvBin(max(x-1, 0),min(y+1, sheight)) or index != self.HsvBin(max(x-1, 0),max(y-1, 0)):\n histogram[28+index] += 1\n else:\n histogram[index] += 1\n histogram /= width*height\n sHistogram = numpy.zeros(56, dtype=numpy.float32)\n sHistogram[0] = 0.25 * histogram[20] + 0.5 * histogram[0] + 0.25 * histogram[1]\n sHistogram[20] = 0.5 * histogram[20] + 0.25 * histogram[0] + 0.25 * histogram[19]\n \n for i in xrange(1, 19):\n sHistogram[i] = 0.25 * histogram[i-1] + 0.5 * histogram[i] + 0.25 * histogram[i+1]\n \n sHistogram[28] = 0.25 * histogram[48] + 0.5 * histogram[28] + 0.25 * histogram[29]\n sHistogram[48] = 0.5 * histogram[48] + 0.25 * histogram[28] + 0.25 * histogram[47]\n \n for i in xrange(29, 47):\n sHistogram[i] = 0.25 * histogram[i-1] + 0.5 * histogram[i] + 0.25 * histogram[i+1]\n self._bicHistogram = sHistogram\n return sHistogram",
"def initialize_histogram(xx, alpha=1.0, colorV=None, facecolor='#80D080', nbins=75,\n fontsize=8, linewidth=1, xlabel=None, ylabel=None, label=None):\n fig = plt.figure(figsize=(10 * 0.6, 5 * 0.6))\n hist_ax = plt.axes([0.15, 0.25, 0.8, 0.65]) # axes constructor axes([left, bottom, width, height])\n draw_histogram(xx, hist_ax, alpha=alpha, colorV=colorV, facecolor=facecolor, nbins=nbins,\n fontsize=fontsize, linewidth=linewidth, xlabel=xlabel, ylabel=ylabel)\n return fig, hist_ax",
"def _histogram_equalize_grayscale(im_orig):\n image = im_orig.copy()\n\n hist_orig = utils.get_histogram(image)\n\n im_eq = _histogram_equalize_image(image, hist_orig)\n\n hist_eq = utils.get_histogram(im_eq)\n\n return [im_eq, hist_orig, hist_eq]",
"def update_histo_frame():\n min_histo.text = str(MIN_RANGE_F) # Display the legend\n max_histo.text = str(MAX_RANGE_F)\n\n histogram = np.zeros(GRID_AXIS) # Clear histogram accumulation array\n # Collect camera data and calculate the histogram\n for _row in range(0, GRID_AXIS):\n for _col in range(0, GRID_AXIS):\n histo_index = int(map_range(GRID_DATA[_col, _row], 0, 1, 0, GRID_AXIS - 1))\n histogram[histo_index] = histogram[histo_index] + 1\n\n histo_scale = np.max(histogram) / (GRID_AXIS - 1)\n if histo_scale <= 0:\n histo_scale = 1\n\n # Display the histogram\n for _col in range(0, GRID_AXIS):\n for _row in range(0, GRID_AXIS):\n if histogram[_col] / histo_scale > GRID_AXIS - 1 - _row:\n image_group[((_row * GRID_AXIS) + _col)].fill = index_to_rgb(\n round((_col / GRID_AXIS), 3)\n )\n else:\n image_group[((_row * GRID_AXIS) + _col)].fill = BLACK",
"def hog_histograms(*args, **kwargs): # real signature unknown\n pass",
"def histeq(im, nbr_bins = 256):\n\t# get image histogram\n\timhist, bins = pl.histogram(im.flatten(), nbr_bins, normed = True)\n\tcdf = imhist.cumsum() # cumulative distribution function\n\tcdf = 255 * cdf / cdf[-1] # normalize\n\t# use linear interpolation of cdf to find new pixel values\n\tim2 = pl.interp(im.flatten(), bins[:-1], cdf)\n\treturn im2.reshape(im.shape)",
"def init_weights(self) -> None:\n super().init_weights()\n kaiming_init(self.convs_all_levels, a=1, distribution='uniform')\n kaiming_init(self.conv_branch, a=1, distribution='uniform')\n kaiming_init(self.conv_pred, a=1, distribution='uniform')",
"def tabulate_histogram(self):\n\n # Generate a table of uniform variates\n from mitsuba.core import Float, Vector2f, Vector2u, Float32, \\\n UInt64, PCG32\n\n rng = PCG32(initseq=ek.arange(UInt64, self.sample_count))\n\n samples_in = getattr(mitsuba.core, 'Vector%if' % self.sample_dim)()\n for i in range(self.sample_dim):\n samples_in[i] = rng.next_float32() if Float is Float32 \\\n else rng.next_float64()\n\n self.pdf_start = time.time()\n\n # Invoke sampling strategy\n samples_out = self.sample_func(samples_in)\n\n if type(samples_out) is tuple:\n weights_out = samples_out[1]\n samples_out = samples_out[0]\n else:\n weights_out = Float(1.0)\n\n # Map samples into the parameter domain\n xy = self.domain.map_backward(samples_out)\n\n # Sanity check\n eps = self.bounds.extents() * 1e-4\n in_domain = ek.all((xy >= self.bounds.min - eps) &\n (xy <= self.bounds.max + eps))\n if not ek.all(in_domain):\n self._log('Encountered samples outside of the specified '\n 'domain: %s' % str(ek.compress(xy, ~in_domain)))\n self.fail = True\n\n # Normalize position values\n xy = (xy - self.bounds.min) / self.bounds.extents()\n xy = Vector2u(ek.clamp(xy * Vector2f(self.res), 0,\n Vector2f(self.res - 1)))\n\n # Compute a histogram of the positions in the parameter domain\n self.histogram = ek.zero(Float, ek.hprod(self.res))\n\n ek.scatter_add(\n target=self.histogram,\n index=xy.x + xy.y * self.res.x,\n source=weights_out\n )\n\n self.pdf_end = time.time()\n\n histogram_min = ek.hmin(self.histogram)\n if not histogram_min >= 0:\n self._log('Encountered a cell with negative sample '\n 'weights: %f' % histogram_min)\n self.fail = True\n\n self.histogram_sum = ek.hsum(self.histogram) / self.sample_count\n if self.histogram_sum > 1.1:\n self._log('Sample weights add up to a value greater '\n 'than 1.0: %f' % self.histogram_sum)\n self.fail = True",
"def __init__(self, histogram_levels: int = 256, match_points: int = 1, threshold_mean_intensity: bool = True):\n super().__init__()\n self.histogram_levels = histogram_levels\n self.match_points = match_points\n self.threshold_mean_intensity = threshold_mean_intensity",
"def test_num_bins(self):\n with Pandas() as pd:\n if pd is None:\n return\n with Numpy() as np: # noqa\n if numpy is None:\n return\n sys.stderr.write(\"\\n\")\n\n df1 = pd.DataFrame({'A': [0, 2, 4, 5, 7, 9, 11, 13, 13, 15]})\n df2 = pd.DataFrame({'A': [2, 4, 4, 6, 8, 7, 10, 14, 17, 19]})\n\n # building 1d-, 2d-, and 3d-histogram (iteratively)\n hist2 = hg.SparselyBin(origin=0.0, binWidth=1.0, quantity=unit('A'))\n hist3 = hg.SparselyBin(origin=0.0, binWidth=1.0, quantity=unit('A'))\n hist4 = hg.Bin(num=20, low=0.0, high=20., quantity=unit('A'))\n hist5 = hg.Bin(num=20, low=0.0, high=20., quantity=unit('A'))\n hist6 = hg.Bin(num=201, low=0.0, high=1.005)\n\n # fill them\n hist2.fill.numpy(df1)\n hist3.fill.numpy(df2)\n hist4.fill.numpy(df1)\n hist5.fill.numpy(df2)\n\n assert hist2.num_bins() == 16\n assert hist3.num_bins() == 18\n assert hist4.num_bins() == 20\n assert hist5.num_bins() == 20\n assert hist6.num_bins() == 201\n\n assert hist2.num_bins(low=10, high=25) == 15\n assert hist3.num_bins(low=10, high=25) == 15\n assert hist4.num_bins(low=10, high=25) == 10\n assert hist5.num_bins(low=10, high=25) == 10\n assert hist6.num_bins(low=0.2089, high=0.9333) == 146\n\n assert hist2.num_bins(low=-10, high=28) == 38\n assert hist3.num_bins(low=-10, high=28) == 38\n assert hist4.num_bins(low=-10, high=28) == 20\n assert hist5.num_bins(low=-10, high=28) == 20\n assert hist6.num_bins(low=0.205, high=0.935) == 146",
"def add_histogram(self, tag, values, global_step=None, bins='tensorflow'):\n values = make_np(values)\n self.vis.histogram(make_np(values), opts={'title': tag})",
"def __init__(self,name, histogram):\n\n\n assert isinstance(histogram,Histogram), \"input must be a 3ML histogram\"\n\n self._histogram = histogram #type: Histogram\n\n\n super(HistLike, self).__init__(name=name,\n x=self._histogram.mid_points,\n y=self._histogram.contents,\n yerr=self._histogram.errors,\n poisson_data=self._histogram.is_poisson)",
"def __init__(self, x, bin_edges, Nsamp):\n raw_vals, bin_edges = np.histogram(x, bins=bin_edges, normed=False)\n self.bin_edges = bin_edges\n self.bin_widths = np.diff(self.bin_edges)\n self.bin_centers = 0.5*(self.bin_edges[:-1] + self.bin_edges[1:])\n \n P, low, high = np.array([BinomialErrors(v, Nsamp) for v in raw_vals]).T\n self.raw_vals = P\n self.raw_low = low\n self.raw_high = high\n self.complete_vals = None\n self.malm_vals = None\n return",
"def histogram(self):\r\n channel = self.ui.channel_selection.itemData(self.ui.channel_selection.currentIndex())\r\n\r\n #create a window, the reference must be stored, because the window\r\n #gets destroyed when its reference is garbage collected\r\n #make plotWindow a list and append to that if multiple windows should be possible\r\n title = \"histogram of {:s} channel\".format(self.ui.channel_selection.currentText())\r\n self.plotWindow = pyguitools.SimplePlotWindow(name = title)\r\n self.plotWindow.ax1.hist(self.npImg[self.ui.y0.value():self.ui.y1.value(),\r\n self.ui.x0.value():self.ui.x1.value(), \r\n channel].flatten(),\r\n bins=self.settings[\"histogramm bins\"],\r\n range=(self.settings[\"histogramm min\"],self.settings[\"histogramm max\"]))\r\n self.plotWindow.ax1.set_xlim(self.settings[\"histogramm min\"],self.settings[\"histogramm max\"]) \r\n self.plotWindow.show()",
"def create_general_hist(self, obj):\n hist = cv2.calcHist([obj], [0, 1, 2], None, [32, 8, 8],\n [0, 256, 0, 256, 0, 256])\n print cv2.normalize(hist).flatten()\n return cv2.normalize(hist).flatten()",
"def __init__(self):\n self.counts = [0] * 10\n self.values = [0] * 10\n self.ucb_values = [0] * 10\n self.minmax = 0",
"def histogram_equalisation(image: Image, n_bins: int =256) -> (Image, array):\n\n # get image histogram\n image_hist, bins = histogram(image.flatten(), n_bins, density=True)\n cdf = image_hist.cumsum() # cumulative distribution function\n cdf = 255 * cdf / cdf[-1] # normalise\n\n # use linear interpolation of cdf to find new pixel values\n new_image = interp(image.flatten(), bins[:-1], cdf)\n\n return new_image.reshape(image.shape), cdf",
"def __init__(self, x0, P0, hist, sigma2, num):\n self.A = np.matrix([[1,0,0,0,1,0,0,0],\n [0,1,0,0,0,1,0,0],\n [0,0,1,0,0,0,1,0],\n [0,0,0,1,0,0,0,1],\n [0,0,0,0,1,0,0,1],\n [0,0,0,0,0,1,0,0],\n [0,0,0,0,0,0,1,0],\n [0,0,0,0,0,0,0,1]])\n self.dim=8\n self.num=num\n self.hist_ref=hist_ref\n self.Q = np.eye(self.dim) # Measurement noise covariance\n self.sigma2 = sigma2 # Process noise covariance\n self.x0=x0\n self.P0 = P0 # Estimated covariance\n self.states = np.random.multivariate_normal(self.x0,self.P0,self.num) # Estimated state\n self.weights=np.ones(num)/np.float(num)\n self.threshold=.5",
"def init_weight(self):\n init_bn(self.norm0)",
"def __init_finalaf(self, i,h1,classes):\n self.params['W'+i]=np.random.randn(h1,classes)*self.weight_scale\n self.params['b'+i]=np.zeros(classes)",
"def initializeDistribution(self):\n self.convertToDistrDict['Legendre'] = self.convertLegendreToUniform\n self.convertToQuadDict ['Legendre'] = self.convertUniformToLegendre\n self.measureNormDict ['Legendre'] = self.stdProbabilityNorm\n self.convertToDistrDict['ClenshawCurtis'] = self.convertLegendreToUniform\n self.convertToQuadDict ['ClenshawCurtis'] = self.convertUniformToLegendre\n self.measureNormDict ['ClenshawCurtis'] = self.stdProbabilityNorm\n self._distribution = distribution1D.BasicUniformDistribution(self.lowerBound,self.lowerBound+self.range)",
"def histeq(im,nbr_bins=256):\r\n # Calculate histogram of images\r\n imhist,bins = histogram(im.flatten(),nbr_bins,normed=True)\r\n cdf = imhist.cumsum() # cumulative distribution function\r\n cdf = 255 * cdf / cdf[-1] # 归一化\r\n # Using the linear interpolation of cumulative distribution function, the new pixel value is calculated.\r\n im2 = interp(im.flatten(),bins[:-1],cdf)\r\n return im2.reshape(im.shape), cdf",
"def __init__(self, i, h, o):\n self.Wh = np.random.randn(i + h, h)\n self.Wy = np.random.randn(h, o)\n self.bh = np.zeros((1, h))\n self.by = np.zeros((1, o))",
"def histeq(im, nbr_bins=256):\n # get image histogram\n imhist, bins = np.histogram(im.flatten(), nbr_bins, normed=True)\n cdf = imhist.cumsum() # cumulative distribution function\n cdf = 255 * cdf / cdf[-1] # normalize\n # use linear interpolation of cdf to find new pixel values\n im2 = np.interp(im.flatten(), bins[:-1], cdf)\n return im2.reshape(im.shape), cdf",
"def initialize(self, k, stats):\n\n k = k + 5\n\n qbin_sizes = 0.5 / k # Quantile sizes\n qbin_edges = 0.25 + qbin_sizes*np.arange(0, k+1) # Edge locations (in quantile terms)\n\n bin_edges = np.interp(qbin_edges, stats['quantile_basis'], stats['quantiles'])\n\n self.k = k\n self.n_bins = k + 2\n self.classes = list(range(1, self.n_bins + 2))\n self.edges = [-np.Inf] + [edge for edge in bin_edges] + [np.Inf]\n self.chi = np.zeros((2, self.n_bins + 1))\n\n dist = np.linspace(2, 1, self.n_bins) # Bins captured by observations\n scaled_dist = 0.9 * dist / dist.sum() # Scaling by 0.9 to allow for 0.1 emission prob of NaN\n self.chi[1, :-1] = scaled_dist # Paired emission dist\n self.chi[0, :-1] = np.flip(scaled_dist) # Unpaired emission dist\n self.chi[1, -1] = 0.1 # NaN observations\n self.chi[0, -1] = 0.1 # NaN observations\n\n self.n_params = 2*(self.n_bins-2)",
"def __init__(self, i, h, o):\r\n self.Wh = np.random.randn(h + i, h)\r\n self.Wy = np.random.randn(h, o)\r\n self.bh = np.zeros((1, h))\r\n self.by = np.zeros((1, o))",
"def __init__(self, nBin, realHists, imagHists, normHists, indexHists, coma, integralReal = None, integralImag = None):\n\t\tself.bin3pi = nBin\n\t\tself.binCenter = 0.52 + .04*nBin\n\t\tif not len(realHists) == len(imagHists) or not len(imagHists) == len(normHists) or not len(normHists) == len(indexHists):\n\t\t\tprint \"Numbers of histogams do not match:\"\n\t\t\tprint \" real:\",len(realHists)\n\t\t\tprint \" imag:\",len(imagHists)\n\t\t\tprint \" norm:\",len(normHists)\n\t\t\tprint \" index:\",len(indexHists)\n\t\t\traise ValueError(\"Histogram size mismatch\")\n\t\tself.nSect = len(realHists)\n\t\tif self.nSect == 0:\n\t\t\traise ValueError(\"No histograms given.\")\n\t\tself.nBins = [ ]\n\t\tself.totalBins = 0\n\t\tself.sectors = [ ]\n\t\tfor s in range(self.nSect):\n\t\t\tbinMax = 0\n\t\t\tfor bin in range(realHists[s].GetNbinsY()):\n\t\t\t\tm2Pi = realHists[s].GetYaxis().GetBinCenter( bin+1)\n\t\t\t\tm3Pi = realHists[s].GetXaxis().GetBinCenter(nBin+1)\n\t\t\t\tif utils.isValidPhaseSpace(m3Pi, m2Pi):\n#\t\t\t\tif realHists[s].GetBinContent(nBin + 1, bin+1) != 0.:\n\t\t\t\t\tbinMax = bin\n\t\t\tself.nBins.append(binMax+1)\n\t\t\tself.totalBins += binMax+1\n\t\t\tself.sectors.append(realHists[s].GetTitle().split('_')[0])\n\t\tself.reals = np.zeros((self.totalBins))\n\t\tself.imags = np.zeros((self.totalBins))\n\t\tself.norms = np.zeros((self.totalBins))\n#\t#\tCMwrite(\"__init__\")\n\t\tself.coma = np.zeros((2*self.totalBins,2*self.totalBins))\n\t\tself.hasIntegralMatrix = False\n\t\tif integralReal and integralImag:\n\t\t\tself.hasIntegralMatrix = True\n\t\t\tself.integralMatrix = np.zeros((self.totalBins, self.totalBins), dtype = complex)\n\t\telif integralReal:\n\t\t\traise RuntimeError(\"Cannot handle real integral matrix only, need also imaginary\")\n\t\telif integralImag:\n\t\t\traise RuntimeError(\"Cannot handle imaginary integral matrix only, need also real\")\n\t\tself.binCenters = np.zeros((self.totalBins))\n\t\tself.numLim = 2.e-8\n\t\tself.ownPinv = True\n\t\tcount = 0\n\t\tfor s in range(self.nSect):\n\t\t\tfor bin in range(self.nBins[s]):\n\t\t\t\tself.reals[count] = realHists[s].GetBinContent(nBin + 1, bin + 1)\n\t\t\t\tself.imags[count] = imagHists[s].GetBinContent(nBin + 1, bin + 1)\n\t\t\t\tself.norms[count] = normHists[s].GetBinContent(nBin + 1, bin + 1)\n\t\t\t\tself.binCenters[count] = realHists[s].GetYaxis().GetBinCenter(bin + 1)\n\t\t\t\tcomaIndex = int(round(indexHists[s].GetBinContent(nBin + 1, bin + 1)))\n\t\t\t\tcount2 = 0\n\t\t\t\tfor s2 in range(self.nSect):\n\t\t\t\t\tfor bin2 in range(self.nBins[s2]):\n\t\t\t\t\t\tcomaIndex2 = int(round(indexHists[s2].GetBinContent(nBin + 1, bin2 + 1)))\n\t\t\t\t\t\tself.coma[2*count , 2*count2 ] = coma.GetBinContent(2*comaIndex+1, 2*comaIndex2+1)\n\t\t\t\t\t\tself.coma[2*count , 2*count2+1] = coma.GetBinContent(2*comaIndex+1, 2*comaIndex2+2)\n\t\t\t\t\t\tself.coma[2*count+1, 2*count2 ] = coma.GetBinContent(2*comaIndex+2, 2*comaIndex2+1)\n\t\t\t\t\t\tself.coma[2*count+1, 2*count2+1] = coma.GetBinContent(2*comaIndex+2, 2*comaIndex2+2)\n\t\t\t\t\t\tif self.hasIntegralMatrix:\n\t\t\t\t\t\t\tval = integralReal.GetBinContent(comaIndex+1, comaIndex2+1) + 1.j*integralImag.GetBinContent(comaIndex+1, comaIndex2+1)\n\t\t\t\t\t\t\tself.integralMatrix[count,count2] = val\n\t\t\t\t\t\tcount2 += 1\n\t\t\t\tcount +=1\n\t\tself.hasMassRange = False\n\t\tself.makeComaInv()\n\t\tself.borders = [0]\n\t\tfor i in range(self.nSect):\n\t\t\tself.borders.append(self.borders[-1] + self.nBins[i])\n\t\tself.nZero = 0\n\t\tself.zeroModes = [ ]\n\t\tself.zeroModeNumbers = [ ]\n\t\tself.zeroModeTitles = [ ]\n\t\tself.zeroEigenvalues = [ ]\n\t\tself.hasTheo = False\n\t\tself.chi2init = False\n\t\tself.zeroModesRemovedFromComa = False\n\t\tself.globalPhaseRemoved = False\n\t\tself.specialCOMAs = { }\n\t\tself.hasZMP = False\n\t\tself.zeroModeParameters = None\n\t\tself.hasRandomizedAmplitudes = False",
"def initializeDistribution(self):\n self.minVal = min(math.exp(self.upperBound),math.exp(self.lowerBound))\n self.maxVal = max(math.exp(self.upperBound),math.exp(self.lowerBound))",
"def equalize_color(img):\n imgeq = numpy.zeros_like(img, dtype='float')\n for i in xrange(img.shape[2]):\n imgeq[:,:,i] = exposure.equalize_hist(img[:,:,i])\n return imgeq",
"def hist_bin_opt (x, minbin=20, maxbin=600, spacing=10, N_trials=1):\n bin_checks = np.arange(minbin, maxbin, spacing)\n # bin_checks = np.linspace(150, 300, 16)\n costs = np.zeros(len(bin_checks))\n i = 0\n # this might be vectorizable in np\n for n_bins in bin_checks:\n # use np.histogram to do the numerical minimization\n pdf, bin_edges = np.histogram(x, n_bins)\n # calculate bin width\n # some discrepancy here but should be fine\n w_bin = np.unique(np.diff(bin_edges))\n if len(w_bin) > 1: w_bin = w_bin[0]\n # calc mean and var\n kbar = np.mean(pdf)\n kvar = np.var(pdf)\n # calc cost\n costs[i] = (2.*kbar - kvar) / (N_trials * w_bin)**2.\n i += 1\n # find the bin size corresponding to a minimization of the costs\n bin_opt_list = bin_checks[costs.min() == costs]\n bin_opt = bin_opt_list[0]\n return bin_opt",
"def get_histogram(self):\n\n for bin in range(self.bins.size):\n bin_inf = self.bins[bin]\n try: bin_sup = self.bins[bin + 1]\n except IndexError: bin_sup = self.vmax\n self.hist[bin] = np.sum(\n (self.values >= bin_inf)*(self.values < bin_sup))\n\n binned_values = np.sum(self.hist)\n if binned_values == 0: return self.hist # no binned value\n else: self.hist /= np.sum(self.hist)\n return self.hist",
"def __init__(self, bandits: List[Bandit], epsilon:float = None):\n self.bandits = bandits\n self.epsilon = epsilon\n # mu_pri and var_pri are hypermaterers for prior distribution\n self.mu_pri = np.zeros(len(self.bandits))\n self.var_pri = np.ones(len(self.bandits))*5\n # mu and var are hyperparameters for posterior distribution\n self.mu = self.mu_pri\n self.var = self.var_pri\n self.var0 = 1 # 1 is taken but any can be taken no prob upto a limit constant of inintial distribution\n self.logging = Logging()",
"def __init__(self, **kwargs): \n self.kwargs = kwargs\n\n # pretty figure up\n prettyplot() \n pretty_colors = prettycolors() \n \n self.fig = plt.figure(1) \n self.sub = self.fig.add_subplot(111) \n\n self.hist_max = 0.0",
"def compareHistograms(reference,model,name):\n# comparison = TH1D('comparison'+name,'', reference.GetNbinsX(),\n# reference.GetBinLowEdge(1),reference.GetBinLowEdge(reference.GetNbinsX())+reference.GetBinWidth(1))\n comparison = reference.Clone('comparison'+name)\n\n maxY,minY=2,0\n #maxY,minY=5,-5\n content, uncertainty = {}, {} \n for bin in range(1,reference.GetNbinsX()+1):\n reference_content= reference.GetBinContent(bin)\n reference_error = reference.GetBinError(bin)**2 # squared\n model_content = 0.0\n model_error = 0.0\n if model.Class_Name()=='THStack':\n for h in model.GetHists():\n model_content+=h.GetBinContent(bin)\n model_error+=h.GetBinError(bin)**2 # squared\n else:\n model_content= model.GetBinContent(bin)\n model_error = model.GetBinError(bin)**2 # squared\n\n #### Data/MC ###\n if True:\n try: \n comparison.SetBinContent(bin,min(max(reference_content/model_content, minY),maxY))\n comparison.SetBinError(bin,(reference_content/model_content)*math.sqrt(float(reference_error)/(reference_content**2) + float(model_error)/(model_content**2)))\n except: \n comparison.SetBinContent(bin,1)\n comparison.SetBinError(bin,0)\n\n #### Chi ###\n if False:\n try: \n error = math.sqrt(model_error+reference_error)\n comparison.SetBinContent(bin,min(max((reference_content - model_content)/error, minY),maxY))\n comparison.SetBinError(bin, 1 )\n except: \n comparison.SetBinContent(bin,0)\n comparison.SetBinError(bin,1)\n\n #comparison.SetAxisRange(minY,maxY,'Y')\n comparison.SetAxisRange(0.5,1.5,'Y')\n return comparison",
"def init_bn(bn):\n \n bn.bias.data.fill_(0.)\n bn.running_mean.data.fill_(0.)\n bn.weight.data.fill_(1.)\n bn.running_var.data.fill_(1.)",
"def __init__(self):\n self.buckets = 1009\n self.table = [{} for _ in range(self.buckets)]",
"def __init__(self, i, h, o):\n self.Wh = np.random.normal(size=(i+h, h))\n self.Wy = np.random.normal(size=(h, o))\n self.bh = np.zeros((1, h))\n self.by = np.zeros((1, o))",
"def __init__(self, size, parameters):\n\n self.weights = self.init_weights(size)\n self.alpha = parameters['alpha']\n self.epsilon = parameters['epsilon']\n self.gamma = parameters['gamma']\n self.value = 0.0 #np.random.random()",
"def test_bin_entries(self):\n with Pandas() as pd:\n if pd is None:\n return\n with Numpy() as np:\n if numpy is None:\n return\n sys.stderr.write(\"\\n\")\n\n df1 = pd.DataFrame(\n {'A': [0, 1, 2, 3, 4, 3, 2, 1, 1, 1], 'C': ['f1', 'f3', 'f4', 'f3', 'f4', 'f2', 'f2', 'f1', 'f3', 'f4']})\n df2 = pd.DataFrame(\n {'A': [2, 3, 4, 5, 7, 4, 6, 5, 7, 8], 'C': ['f7', 'f3', 'f5', 'f8', 'f9', 'f2', 'f3', 'f6', 'f7', 'f7']})\n\n # building 1d-, 2d-, and 3d-histogram (iteratively)\n hist0 = hg.Categorize(unit('C'))\n hist1 = hg.Categorize(unit('C'))\n hist2 = hg.SparselyBin(origin=0.0, binWidth=1.0, quantity=unit('A'))\n hist3 = hg.SparselyBin(origin=0.0, binWidth=1.0, quantity=unit('A'))\n hist4 = hg.Bin(num=10, low=0.0, high=10., quantity=unit('A'))\n hist5 = hg.Bin(num=10, low=0.0, high=10., quantity=unit('A'))\n\n # fill them\n hist0.fill.numpy(df1)\n hist1.fill.numpy(df2)\n hist2.fill.numpy(df1)\n hist3.fill.numpy(df2)\n hist4.fill.numpy(df1)\n hist5.fill.numpy(df2)\n\n labels0 = hist0.bin_labels()\n labels1 = hist1.bin_labels()\n centers2 = hist2.bin_centers()\n centers3 = hist3.bin_centers()\n centers = hist4.bin_centers()\n\n import numpy as np\n np.testing.assert_array_equal(hist0.bin_entries(), [2., 2., 3., 3.])\n np.testing.assert_array_equal(hist1.bin_entries(), [1., 2., 1., 1., 3., 1., 1.])\n np.testing.assert_array_equal(hist0.bin_entries(labels=labels1), [2., 3., 0., 0., 0., 0., 0.])\n np.testing.assert_array_equal(hist1.bin_entries(labels=labels0), [0., 1., 2., 0.])\n\n np.testing.assert_array_equal(hist2.bin_entries(), [1., 4., 2., 2., 1.])\n np.testing.assert_array_equal(hist3.bin_entries(), [1., 1., 2., 2., 1., 2., 1.])\n np.testing.assert_array_equal(hist4.bin_entries(), [1., 4., 2., 2., 1., 0., 0., 0., 0., 0.])\n np.testing.assert_array_equal(hist5.bin_entries(), [0., 0., 1., 1., 2., 2., 1., 2., 1., 0.])\n\n np.testing.assert_array_equal(hist2.bin_entries(xvalues=centers3), [2., 2., 1., 0., 0., 0., 0.])\n np.testing.assert_array_equal(hist3.bin_entries(xvalues=centers2), [0., 0., 1., 1., 2.])\n np.testing.assert_array_equal(hist2.bin_entries(xvalues=centers), [\n 1., 4., 2., 2., 1., 0., 0., 0., 0., 0.])\n np.testing.assert_array_equal(hist3.bin_entries(xvalues=centers), [\n 0., 0., 1., 1., 2., 2., 1., 2., 1., 0.])\n\n np.testing.assert_array_equal(hist2.bin_entries(low=2.1, high=11.9), [\n 2., 2., 1., 0., 0., 0., 0., 0., 0., 0.])\n np.testing.assert_array_equal(hist3.bin_entries(low=1.1, high=5.4), [0., 1., 1., 2., 2.])\n np.testing.assert_array_equal(hist4.bin_entries(low=2.1, high=11.9), [2., 2., 1., 0., 0., 0., 0., 0.])\n np.testing.assert_array_equal(hist5.bin_entries(low=1.1, high=5.4), [0., 1., 1., 2., 2.])",
"def hist_equ(raw_img=None, file_name=None):\n\n if raw_img is None:\n raw_img = cv.imread(file_name, cv.IMREAD_GRAYSCALE)\n\n norm = Normalize(vmin=0, vmax=255)\n L = 2 ** 8\n bins = range(L + 1)\n # row, col = raw_img.shape\n\n # input_hist = np.zeros(L, int)\n # for i in raw_img.flat:\n # input_hist[i] += 1\n\n # input_hist = histogram(raw_img)\n input_hist, _ = np.histogram(raw_img.flat, bins=bins, density=True)\n # print(file_name, 'raw', np.count_nonzero(input_hist))\n\n # s = np.zeros(L, int)\n # for k in range(L):\n # s[k] = (L - 1) * sum(input_hist[:k + 1])\n\n s = np.array([(L - 1) * sum(input_hist[:k + 1]) for k in range(L)])\n\n out_img = np.array([s[r] for r in raw_img], int).reshape(raw_img.shape)\n # output_hist = histogram(out_img)\n output_hist, _ = np.histogram(out_img.flat, bins=bins, density=True)\n # print(file_name, 'equalized', np.count_nonzero(output_hist))\n\n # %% plots\n '''\n plt.subplot(121)\n plt.imshow(raw_img, cmap='gray', norm=norm)\n plt.title(\"Raw \" + file_name)\n\n plt.subplot(122)\n plt.imshow(out_img, cmap='gray', norm=norm)\n plt.title(\"Equalized \" + file_name)\n # plt.savefig(file_name + \"_comparison.png\")\n plt.show()\n\n plt.title(\"Histogram of \" + file_name)\n plt.bar(range(L), input_hist)\n plt.bar(range(L), output_hist)\n plt.legend(('raw image', 'equalized image'))\n # plt.savefig(file_name + \"_histogram.png\")\n plt.show()\n\n plt.plot(range(L), s)\n plt.title(\"Histogram equalization transformation for \" + file_name)\n plt.xlabel('$r_k$')\n plt.ylabel('$s_k$')\n plt.show()\n '''\n\n return out_img, output_hist, input_hist, s",
"def init_bn(bn):\n bn.bias.data.fill_(0.0)\n bn.weight.data.fill_(1.0)",
"def Fill(self, *args, **kwargs):\n self._varexp = kwargs.get(\"varexp\")\n self._cuts = kwargs.get(\"cuts\", [])\n self._weight = kwargs.get(\"weight\", \"1\")\n if len(args) == 1 and isinstance(args[0], (str, unicode)):\n IOManager.FillHistogram(self, args[0], **kwargs)\n if not kwargs.get(\"append\", False):\n self._errorband.Reset()\n self._errorband.Add(self)\n else:\n super(Histo1D, self).Fill(*args)",
"def eqHist(img):\n\tgray = grayscale(img)\n\tequ = cv2.equalizeHist(gray)\n\treturn equ",
"def test_irregular(self):\n import numpy as np\n import histogrammar\n\n h = histogrammar.IrregularlyBin([0, 10, 20, 40, 100])\n h.fillnumpy([-5, 5, 5, 50, 10, 100, 1000, 50, 50])\n\n np.testing.assert_array_equal(h.bin_entries(), [1., 2., 1., 0., 3., 2.])\n np.testing.assert_array_equal(h.bin_edges(), [float('-inf'), 0., 10., 20., 40., 100., float('inf')])\n np.testing.assert_array_equal(h.bin_centers(), [float('-inf'), 5., 15., 30., 70., float('inf')])\n assert h.num_bins() == 6\n assert h.n_bins == 6\n np.testing.assert_almost_equal(h.mpv, 70.)\n\n np.testing.assert_array_equal(h.bin_entries(10, 40), [1., 0.])\n np.testing.assert_array_equal(h.bin_edges(10, 40), [10., 20., 40.])\n np.testing.assert_array_equal(h.bin_centers(10, 40), [15., 30.])\n assert h.num_bins(10, 40) == 2\n\n np.testing.assert_array_equal(h.bin_entries(5, 110), [2., 1., 0., 3., 2.])\n np.testing.assert_array_equal(h.bin_edges(5, 110), [0., 10., 20., 40., 100., float('inf')])\n np.testing.assert_array_equal(h.bin_centers(5, 110), [5., 15., 30., 70., float('inf')])\n assert h.num_bins(5, 110) == 5",
"def histo_repro(h):\n k = np.size(h) - 1\n if k == 1:\n return 0.\n nf = np.dot(h, np.arange(k + 1)) / k\n if nf == 0:\n return 0.\n n1k = np.arange(1, k + 1)\n res = 1.0 * np.dot(h[1:], n1k * (n1k - 1)) / (k * (k - 1))\n return res / nf",
"def initializeDistribution(self):\n if self.lowerBoundUsed == False and self.upperBoundUsed == False:\n self._distribution = distribution1D.BasicBinomialDistribution(self.n,self.p)\n else:\n self.raiseAnError(IOError,'Truncated Binomial not yet implemented')",
"def setup_bins(self):\n width = int((self.max - self.min) / self.bin_size)\n bins = {\n i * width + self.min: (idx, idx + self.bin_size)\n for i, idx in enumerate(range(0, len(self.nums), self.bin_size))\n }\n return bins",
"def test_bin_edges(self):\n with Pandas() as pd:\n if pd is None:\n return\n with Numpy() as np:\n if numpy is None:\n return\n sys.stderr.write(\"\\n\")\n\n df1 = pd.DataFrame({'A': [0, 1, 2, 3, 4, 3, 2, 1, 1, 1]})\n df2 = pd.DataFrame({'A': [2, 3, 4, 5, 7, 4, 6, 5, 7, 8]})\n\n # building test histograms\n hist2 = hg.SparselyBin(origin=0.0, binWidth=1.0, quantity=unit('A'))\n hist3 = hg.SparselyBin(origin=0.0, binWidth=1.0, quantity=unit('A'))\n hist4 = hg.Bin(num=10, low=0.0, high=10., quantity=unit('A'))\n hist5 = hg.Bin(num=10, low=0.0, high=10., quantity=unit('A'))\n hist6 = hg.Bin(num=201, low=0.0, high=1.005)\n\n # fill them\n hist2.fill.numpy(df1)\n hist3.fill.numpy(df2)\n hist4.fill.numpy(df1)\n hist5.fill.numpy(df2)\n\n import numpy as np\n np.testing.assert_array_equal(hist2.bin_edges(), [0., 1., 2., 3., 4., 5.])\n np.testing.assert_array_equal(hist3.bin_edges(), [2., 3., 4., 5., 6., 7., 8., 9.])\n np.testing.assert_array_equal(hist4.bin_edges(), [0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.])\n np.testing.assert_array_equal(hist5.bin_edges(), [0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.])\n\n np.testing.assert_array_equal(hist2.bin_edges(low=2.1, high=11.9), [\n 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12.])\n np.testing.assert_array_equal(hist3.bin_edges(low=1.1, high=6), [1., 2., 3., 4., 5., 6.])\n np.testing.assert_array_equal(hist4.bin_edges(low=2.1, high=11.9), [\n 2., 3., 4., 5., 6., 7., 8., 9., 10.])\n np.testing.assert_array_equal(hist5.bin_edges(low=1.1, high=5.4), [1., 2., 3., 4., 5., 6.])\n\n assert len(hist6.bin_edges()) == 202\n assert len(hist6.bin_edges(low=0.2089, high=0.9333)) == 147\n assert len(hist6.bin_edges(low=0.205, high=0.935)) == 147",
"def _new_hist(self, _hist, memo=NOTHING):\n\n other = self.__class__(_hist)\n for item in self.__dict__:\n if item not in [\"axes\", \"_hist\"]:\n if memo is NOTHING:\n other.__dict__[item] = self.__dict__[item]\n else:\n other.__dict__[item] = copy.deepcopy(self.__dict__[item], memo)\n other.axes = other._generate_axes_()\n for ax in other.axes:\n if memo is NOTHING:\n ax._ax.metadata = copy.copy(ax._ax.metadata)\n else:\n ax._ax.metadata = copy.deepcopy(ax._ax.metadata, memo)\n return other"
] | [
"0.64742666",
"0.64495724",
"0.623169",
"0.6206092",
"0.6172548",
"0.615209",
"0.60600775",
"0.6026051",
"0.60240495",
"0.5974756",
"0.594645",
"0.5923977",
"0.59031487",
"0.5899694",
"0.5876003",
"0.5854515",
"0.58174914",
"0.5769434",
"0.57519495",
"0.5749652",
"0.57370484",
"0.5731348",
"0.57181156",
"0.57170993",
"0.57167625",
"0.5683541",
"0.5681447",
"0.5666983",
"0.5661015",
"0.5654981",
"0.5622323",
"0.5621234",
"0.561892",
"0.56064343",
"0.5591328",
"0.5550777",
"0.5541232",
"0.5534435",
"0.553298",
"0.5501136",
"0.5500356",
"0.5477847",
"0.547769",
"0.5455211",
"0.54531586",
"0.54509956",
"0.544339",
"0.5431249",
"0.5428208",
"0.5424752",
"0.541449",
"0.5391119",
"0.53827244",
"0.53814983",
"0.5380655",
"0.53718597",
"0.53674924",
"0.53553414",
"0.53461045",
"0.53432095",
"0.5333419",
"0.5329049",
"0.5327296",
"0.53221196",
"0.53168094",
"0.5307338",
"0.53052133",
"0.5295389",
"0.5285928",
"0.5284079",
"0.5280755",
"0.5278444",
"0.52692676",
"0.52683413",
"0.52670115",
"0.5260525",
"0.5256549",
"0.525603",
"0.5252531",
"0.5251188",
"0.52460533",
"0.5244062",
"0.5240301",
"0.5231792",
"0.52300656",
"0.523",
"0.5227507",
"0.5222407",
"0.5222214",
"0.5214568",
"0.52134496",
"0.5208323",
"0.52066475",
"0.5201278",
"0.5194714",
"0.5188723",
"0.5184175",
"0.51779056",
"0.5177205",
"0.5176705",
"0.51721776"
] | 0.0 | -1 |
Generator function that returns collection members. | def _get_collection(self, collection_uri, request_headers=None):
# get the collection
status, headers, thecollection = self._rest_get(collection_uri)
if status != 200:
msg = self._get_extended_error(thecollection)
raise exception.IloError(msg)
while status < 300:
# verify expected type
# Don't limit to version 0 here as we will rev to 1.0 at some
# point hopefully with minimal changes
ctype = self._get_type(thecollection)
if (ctype not in ['Collection.0', 'Collection.1']):
raise exception.IloError("collection not found")
# if this collection has inline items, return those
# NOTE: Collections are very flexible in how the represent
# members. They can be inline in the collection as members
# of the 'Items' array, or they may be href links in the
# links/Members array. The could actually be both. Typically,
# iLO implements the inline (Items) for only when the collection
# is read only. We have to render it with the href links when an
# array contains PATCHable items because its complex to PATCH
# inline collection members.
if 'Items' in thecollection:
# iterate items
for item in thecollection['Items']:
# if the item has a self uri pointer,
# supply that for convenience.
memberuri = None
if 'links' in item and 'self' in item['links']:
memberuri = item['links']['self']['href']
yield 200, None, item, memberuri
# else walk the member links
elif ('links' in thecollection and
'Member' in thecollection['links']):
# iterate members
for memberuri in thecollection['links']['Member']:
# for each member return the resource indicated by the
# member link
status, headers, member = self._rest_get(memberuri['href'])
yield status, headers, member, memberuri['href']
# page forward if there are more pages in the collection
if ('links' in thecollection and
'NextPage' in thecollection['links']):
next_link_uri = (collection_uri + '?page=' + str(
thecollection['links']['NextPage']['page']))
status, headers, thecollection = self._rest_get(next_link_uri)
# else we are finished iterating the collection
else:
break | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __iter__(self) -> Generator[str, None, None]:\n\n yield from self.__dict__[\"members\"]",
"def get_members():",
"def getMembers():",
"def getMembers():",
"def getMembers():",
"def getMembers():",
"def members(self) -> Generator[discord.Member, None, None]:\n for guild in self.guilds:\n for member in guild.members:\n yield member",
"def items(self):\n for name in self.fields:\n yield name, getattr(self, name)",
"def _named_members(self, get_members_fn, prefix='', recurse=True):\n memo = set()\n modules = self.named_modules(prefix=prefix) if recurse else [(prefix, self)]\n for module_prefix, module in modules:\n members = get_members_fn(module)\n for k, v in members:\n if v is None or v in memo:\n continue\n memo.add(v)\n name = module_prefix + ('.' if module_prefix else '') + k\n # translate name to ori_name\n if name in self.node_name_map:\n name = self.node_name_map[name]\n yield name, v",
"def __iter__(self):\n yield self._extent.members()\n yield self._intent.members()",
"def items(self):\n for attr in ('name', 'birthDate', 'identifier'):\n value = getattr(self, attr, None)\n if value:\n yield attr, value",
"def get_members(self):\n return self._members",
"def __iter__(self):\n for metatag in self.meta.findall(CN('meta:user-defined')):\n yield (metatag.get(CN('meta:name')), metatag.text)",
"def __iter__(self):\n for name, field in self.iterate_over_fields():\n yield name, field",
"def members(self):\r\n return Members(self)",
"def members(self):\n return self._members",
"def members(self) -> object:\n return self._members",
"def test_get_members(self):\n pass",
"def items(self):\n for metakey in self:\n yield metakey, self[metakey]",
"def get_named_members(model, get_members_fn, prefix='', recurse=True):\n memo = set()\n modules = model.named_modules(prefix=prefix) if recurse else [(prefix, model)]\n for i, (module_prefix, module) in enumerate(modules):\n members = get_members_fn(module)\n for k, v in members:\n if v is None or v in memo:\n print(\"WARNING: reused module parameter\")\n continue\n memo.add(v)\n name = module_prefix + ('.' if module_prefix else '') + k\n yield name, v, i",
"def members(self):\n return self.properties.get('members',\n DirectoryObjectCollection(self.context, ResourcePath(\"members\", self.resource_path)))",
"def __iter__(self):\n for col in self.columns:\n yield self.fields[col]",
"def members(self) -> \"List[str]\":\n return self._attrs.get(\"members\")",
"def members(self) -> \"List[str]\":\n return self._attrs.get(\"members\")",
"def members(self) -> \"List[str]\":\n return self._attrs.get(\"members\")",
"def members(cls) -> Mapping[str, Member]:\n return cls.__atom_members__",
"def iterate(cls):\n for name, value in vars(cls).iteritems():\n if name.startswith('__'):\n continue\n yield (name, value)",
"def __iter__(self):\n return (x for x in vars(self))",
"def __iter__(self):\n cursor = 0\n while cursor < len(self):\n yield self._items[cursor]\n cursor += 1",
"def __iter__(self):\n return ((field, getattr(self, field)) for (field, _) in self.fields)",
"def __iter__(self):\n return (self.get_node(node_id) for node_id in self._collection.all_keys())",
"def __iter__(self):\n cursor=0\n while cursor<len(self):\n yield self._item[cursor].key\n cursor+=1",
"def items(self):\r\n for column in self.table.columns:\r\n yield (column, self[column.name])",
"def __getitem__(self, i):\n return self.members[i]",
"def member_names(self) -> Iterator[str]:\n return yield_column_names(self.schema)",
"def items(self):\n return list(self.items_generator())",
"def __iter__(self):\n\t\tfields = 'fieldname', 'text', 'docfreq', 'indexfreq'\n\t\tcur = self.index.collection.find(fields=fields).sort('fieldname')\n\t\treturn (tuple(rec[field] for field in fields) for rec in cur)",
"def children_of(self, member_name):\n for relationship in self.relationships[member_name]:\n yield self.members[relationship.child]",
"def __iter__(self):\n attr = gv.firstattr(self.handle)\n while gv.ok(attr):\n yield gv.nameof(attr), \\\n decode_page(gv.getv(self.handle, attr))\n attr = gv.nextattr(self.handle, attr)",
"def members_with_status(self,\n status: Status) -> 'typing.Generator[dt_member.Member, None, None]':\n for member in self.members.values():\n if member.status == status:\n yield member",
"def items(self):\n return self.__items(())",
"def items(self):\n for ts in self:\n yield ts, self[ts]",
"def __iter__(self):\n for v in self._items:\n yield v",
"def members(self, items):\n pass",
"def get(self, *args):\n return _libsbml.ListOfMembers_get(self, *args)",
"def test_list_members(self):\n pass",
"def __iter__(self):\n\t\treturn self.keys()",
"def collectionContainer(): # This name cannot be changed\n return [sampleMethodObject(), sampleMethodObject()]",
"def items(self, deep=False):\n for var in self.vars(deep):\n yield var, self[var]",
"def all(cls):\n return [(k, v) for k, v in cls.__members__.items()]",
"def __iter__(self) -> Generator:\n\t\treturn (article for article in self._articles)",
"def lmembers(self):\n return self.lrange(0, -1)",
"def __iter__(self):\n items = self._fetch()\n for item in items:\n yield item",
"def __call__(self):\n for name in self:\n try:\n yield getattr(self, name)\n except AttributeError:\n raise KeyError(name)",
"def items(self):\n\t\tfor k, vs in self.multiple.items():\n\t\t\tfor v in vs: yield k, v",
"def __iter__():",
"def __iter__():",
"def __iter__():",
"def __iter__():",
"def __iter__(self) -> Tuple[str, Any]:\n for attr_name, attr_val in self.__dict__.items():\n yield attr_name, attr_val",
"def members(self):\r\n return GroupMembers(self)",
"def __call__(self):\n return self.get_items()",
"def __iter__(self):\n for datum in self.data[self.name]:\n yield datum",
"def items():",
"def list(self):\n sql = \"\"\"SELECT\n rowid,\n first,\n last,\n introducedDate\n FROM members\n ORDER BY rowid\"\"\"\n rows = list(self.cursor.execute(sql))\n return [self.buildMember(row) for row in rows]",
"def __iter__(self):\n for item in (self[i] for i in range(len(self))):\n yield item",
"def __iter__(self):\n for item in (self[i] for i in range(len(self))):\n yield item",
"def __iter__(self):\n return self.keys()",
"def __iter__(self):\n for item in (self[i] for i in range(len(self))):\n yield item",
"def __iter__(self):\n for index in range(len(self)):\n yield self[index]",
"def ldap_get_intro_members():\n return _ldap_get_group_members('intromembers')",
"def __iter__(self):\r\n for column_id in self._columns.keys():\r\n yield column_id",
"def iterall(self):\r\n return (column for name, column in self.iteritems())",
"def _members_of(self, full_path, include, exclude, should_include_private):\n def rst_for(doclet):\n renderer = (AutoFunctionRenderer if doclet.get('kind') in ['function', 'typedef']\n else AutoAttributeRenderer)\n # Pass a dummy arg list with no formal param list so\n # _formal_params() won't find an explicit param list in there and\n # override what it finds in the code:\n return renderer(self._directive, self._app, arguments=['dummy']).rst(\n [doclet['name']],\n 'dummy_full_path',\n doclet,\n use_short_name=False)\n\n def doclets_to_include(include):\n \"\"\"Return the doclets that should be included (before excludes and\n access specifiers are taken into account).\n\n This will either be the doclets explicitly listed after the\n ``:members:`` option, in that order; all doclets that are\n members of the class; or listed members with remaining ones\n inserted at the placeholder \"*\".\n\n \"\"\"\n doclets = self._app._sphinxjs_doclets_by_class[tuple(full_path)]\n if not include:\n # Specifying none means listing all.\n return sorted(doclets, key=lambda d: d['name'])\n included_set = set(include)\n\n # If the special name * is included in the list, include\n # all other doclets, in sorted order.\n if '*' in included_set:\n star_index = include.index('*')\n not_included = sorted(d['name'] for d in doclets if d['name'] not in included_set)\n include = include[:star_index] + not_included + include[star_index + 1:]\n included_set.update(not_included)\n\n # Even if there are 2 doclets with the same short name (e.g. a\n # static member and an instance one), keep them both. This\n # prefiltering step should make the below sort less horrible, even\n # though I'm calling index().\n included_doclets = [d for d in doclets if d['name'] in included_set]\n # sort()'s stability should keep same-named doclets in the order\n # JSDoc spits them out in.\n included_doclets.sort(key=lambda d: include.index(d['name']))\n return included_doclets\n\n return '\\n\\n'.join(\n rst_for(doclet) for doclet in doclets_to_include(include)\n if (doclet.get('access', 'public') in ('public', 'protected')\n or (doclet.get('access') == 'private' and should_include_private))\n and doclet['name'] not in exclude)",
"def members(self):\n return self.find_users_by_rel('member')",
"def iterProperties(cls):\n meta = cls.staticMetaObject\n for i in range(meta.propertyCount()):\n yield meta.property(i).name()",
"def __iter__(self):\n return self.words",
"def __iter__(self):\n return iter(vars(self.obj))",
"def iteritems(self):\n for item in self.features.iteritems():\n yield item",
"def __iter__(self):\n\t\tfor key, value in self.__dict__.iteritems():\n\t\t\tif key[0] != '_':\n\t\t\t\tyield value",
"def __iter__(self):\n while True:\n for item in (self[i] for i in range(len(self))):\n yield item",
"def items():\n for i in self._iter_restrict(zeros, ones):\n yield self.pcdata[i]",
"def generate_plain_collection(self):\n raise NotImplementedError",
"def iter(self):\n return []",
"def iteritems(self):\r\n for name in self.table.sequence:\r\n if name not in self.table.exclude:\r\n yield (name, self.columns[name])",
"def deep_get_members(name, predicate=None, private=False):\r\n seen = set()\r\n for name in deep_iter_modules(name):\r\n mod = import_dotted_name(name)\r\n name = name.rsplit('.', 1)[-1]\r\n if not private and name.startswith('_'):\r\n continue\r\n for name, value in getmembers(mod, predicate):\r\n if id(value) in seen or not private and name.startswith('_'):\r\n continue\r\n yield value\r\n seen.add(id(value))",
"def __iter__(self):\r\n for attr, value in self.__dict__.items():\r\n a = getattr(self, attr)\r\n if type(a) is list:\r\n if len(a) > 0:\r\n yield attr, a",
"def __iter__(self):\n for i in range(len(self)):\n yield self[i]",
"def __iter__(self):\n for i in range(len(self)):\n yield self[i]",
"def iteritems(self):",
"def iterator(self):\n yield",
"def _elements(self):\n return list(islice(self.generate(), None))",
"def __iter__(self): # pragma: no cover\r\n return ((k, v) for k, v in vars(self).items() if not k.startswith(\"_\"))",
"def __iter__(self):\n for value in self.__dict__.values():\n yield value",
"def items(self):\n for element, value in self.focals.items():\n yield (element, value)",
"def group_get_members(self,groupname):\n\n if not self.check_prereqs():\n raise StopIteration\n\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n\n query=self.create_query(self.sql_group_get_members_query+\" ORDER BY $username_field$\",{'groupname':groupname,'username_field':self.sql_username_field,'groupname_field':self.sql_groupname_field})\n self.log.debug(\"sqlflexibleauthstore: group_get_members: %s\" % (query,))\n\n cursor.execute(query)\n desc=[i[0] for i in cursor.description]\n for row in cursor:\n dictrow=dict(zip(desc,row))\n yield dictrow[self.sql_username_field]",
"def __iter__(self):\n yield from self.gen",
"def __iter__(self):\n\n for each in list(self.keys()):\n yield each",
"def __iter__(self):\n return iter(self._items)\n # to use a generator, it would look like this...\n # for item in self._items: yield item",
"def __iter__(self):\n return self.iter_sections()",
"def __iter__(self) -> Iterator[UUID]:\n yield from self.__mutants.keys()"
] | [
"0.78000563",
"0.7432869",
"0.68755776",
"0.68755776",
"0.68755776",
"0.68755776",
"0.67106056",
"0.6568998",
"0.6439682",
"0.63878435",
"0.637143",
"0.63655144",
"0.6334077",
"0.6326549",
"0.6291822",
"0.6231462",
"0.6227288",
"0.62206596",
"0.6170464",
"0.6161556",
"0.6084516",
"0.60759306",
"0.6062099",
"0.6062099",
"0.6062099",
"0.6049794",
"0.6025096",
"0.60208267",
"0.5994059",
"0.5985374",
"0.5982684",
"0.5969102",
"0.5945053",
"0.5923145",
"0.5917384",
"0.58929783",
"0.58876646",
"0.58613306",
"0.5859887",
"0.58552384",
"0.58436906",
"0.5843496",
"0.5827585",
"0.58246267",
"0.58219296",
"0.58042896",
"0.57951105",
"0.57934767",
"0.57873774",
"0.5784002",
"0.5781163",
"0.57706",
"0.5759164",
"0.5746722",
"0.5732538",
"0.5730671",
"0.5730671",
"0.5730671",
"0.5730671",
"0.5729628",
"0.572172",
"0.5711257",
"0.5710669",
"0.5703973",
"0.5694751",
"0.5693896",
"0.5693896",
"0.56892884",
"0.5688168",
"0.56724554",
"0.56657225",
"0.56605214",
"0.56603175",
"0.5655466",
"0.5655194",
"0.5643727",
"0.56327444",
"0.5631626",
"0.56196356",
"0.56157506",
"0.5615221",
"0.56143683",
"0.5612559",
"0.56118554",
"0.5611187",
"0.5608633",
"0.56069714",
"0.5606245",
"0.5606245",
"0.55994403",
"0.5587111",
"0.5586008",
"0.558232",
"0.5575789",
"0.5573189",
"0.55703104",
"0.5565289",
"0.5551481",
"0.55501276",
"0.5549008",
"0.553805"
] | 0.0 | -1 |
Return the type of an object. | def _get_type(self, obj):
typever = obj['Type']
typesplit = typever.split('.')
return typesplit[0] + '.' + typesplit[1] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_type ( self, object ):\n return self.type",
"def get_type ( self, object ):\n return self.type",
"def object_type(self):\n return self._object_type",
"def object_type(self):\n return self._object_type",
"def object_type(self):\n return self._object_type",
"def object_type(self):\n return self._object_type",
"def object_type(self):\n return self._object_type",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return self._object_type",
"def object_type(self) -> Optional[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> Optional[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return self._event.get('object_type')",
"def fl_get_object_type(ptr_flobject):\n _fl_get_object_type = library.cfuncproto(\n library.load_so_libforms(), \"fl_get_object_type\", \\\n cty.c_int, [cty.POINTER(xfdata.FL_OBJECT)], \\\n \"\"\"int fl_get_object_type(FL_OBJECT * obj) \"\"\")\n library.check_if_flinitialized()\n library.verify_flobjectptr_type(ptr_flobject)\n library.keep_elem_refs(ptr_flobject)\n retval = _fl_get_object_type(ptr_flobject)\n return retval",
"def getMetaType(self, object) :\n\t\tif callable(object.meta_type) :\n\t\t\t# at least ZClasses\n\t\t\treturn object.meta_type()\n\t\telse :\n\t\t\t# the rest\n\t\t\treturn object.meta_type",
"def kind_of(obj):\n # why don't I use isinstance - it saves us big time\n\n # dict, list, and tuple are differianted from str, unicode, int, bool, and float\n # because they have special treatment and simple `==` or `is` is not enough to\n # prove them valid.\n obj_type = type(obj)\n if obj_type is dict:\n return TYPE_DICTIONARY\n elif obj_type is list:\n return TYPE_LIST\n elif obj_type is tuple:\n return TYPE_TUPLE\n elif obj in ATOMIC_TYPES:\n return TYPE_TYPE\n elif obj is object:\n return TYPE_OBJECT\n elif getattr(obj, \"__class__\", False) and issubclass(obj.__class__, BaseValidator):\n return TYPE_VALIDATOR\n elif callable(obj):\n return TYPE_FUNCTION\n # this f##king SRE_Pattern, why can't I f##king kill it\n elif getattr(obj, \"match\", False) and getattr(obj, \"search\", False):\n return TYPE_REGEX\n else:\n return TYPE_UNKNOWN",
"def object_type(self):\n if not self.Flags & gdef.ACE_OBJECT_TYPE_PRESENT:\n return None\n return self.ObjectType",
"def get_type(self):\n return self._type_obj",
"def get_object_type(self, ref):\n ws = Workspace(self.ws_url)\n info = ws.get_object_info3({\"objects\": [{\"ref\": ref}]})\n obj_info = info.get(\"infos\", [[]])[0]\n if len(obj_info) == 0:\n raise RuntimeError(\"An error occurred while fetching type info from the Workspace. \"\n \"No information returned for reference {}\".format(ref))\n return obj_info[2]",
"def get_obj_class(self, obj_type: str) -> Type[TgnObject]:\n pass",
"def object_type_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"object_type_name\")",
"def get_object_type(schema_obj):\n\n if isinstance(schema_obj, schema.Field):\n raise exception.InvalidArgument(schema_obj)\n return schema_obj.full_name",
"def is_type(obj):\n return type(obj) is type or type(obj) is types.ClassType",
"def get_drs_object_type(object_info: dict) -> DRSObjectType:\n if \"form\" in object_info:\n if object_info[\"form\"] is None:\n return DRSObjectType.object\n return DRSObjectType(object_info[\"form\"])\n\n if \"contents\" in object_info and len(object_info[\"contents\"]) > 0:\n return DRSObjectType.bundle\n else:\n return DRSObjectType.object",
"def GetType(self, obj_name):\n for type, objects in self.objects.iteritems():\n if obj_name in objects:\n return type\n raise NoSuchObjectError(\"Object %s doesn't exist.\" % obj_name)",
"def get_python_type(obj, format_type):\n t = type(obj)\n\n return t if format_type is None else t.__name__",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def print_type(obj: object) -> None:\n print(f'{type(obj)}')",
"def typename(obj):\n return obj.__name__ if hasattr(obj, '__name__') else type(obj).__qualname__",
"def determine_object_type(obj):\n type_obj = \"Ethernet\"\n if isinstance(obj, dom_kvm.LinuxBridge):\n type_obj = BRIDGE\n elif isinstance(obj, dom_kvm.EthernetBond):\n type_obj = ETH_BOND\n elif isinstance(obj, dom_kvm.PhysicalPort):\n type_obj = ETHERNET\n elif isinstance(obj, dom_kvm.OpenVSwitch):\n type_obj = OVS_BR\n return type_obj",
"def get_type(self):\n return self._type",
"def get_type(self):\n return self._type",
"def typename ( o ) :\n return type ( o ) .__name__",
"def fl_get_object_boxtype(ptr_flobject):\n _fl_get_object_boxtype = library.cfuncproto(\n library.load_so_libforms(), \"fl_get_object_boxtype\", \\\n cty.c_int, [cty.POINTER(xfdata.FL_OBJECT)], \\\n \"\"\"int fl_get_object_boxtype(FL_OBJECT * obj) \"\"\")\n library.check_if_flinitialized()\n library.verify_flobjectptr_type(ptr_flobject)\n library.keep_elem_refs(ptr_flobject)\n retval = _fl_get_object_boxtype(ptr_flobject)\n return retval",
"def type(self):\n return self.__type",
"def type(self):\r\n return self.__type",
"def get_type(self):\n return self.type",
"def get_type(self):\n return self.type",
"def getType(self,):\n\t\treturn self.type;"
] | [
"0.8628719",
"0.8628719",
"0.8257007",
"0.8257007",
"0.8257007",
"0.8257007",
"0.8257007",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.82011956",
"0.8145945",
"0.79864955",
"0.79864955",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.79691756",
"0.7898051",
"0.7898051",
"0.7898051",
"0.7898051",
"0.78362143",
"0.75354356",
"0.7515423",
"0.7446857",
"0.7424326",
"0.74203354",
"0.7369579",
"0.7231573",
"0.71980494",
"0.71343184",
"0.70426154",
"0.7038192",
"0.70044583",
"0.6986426",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.69820476",
"0.6886853",
"0.688152",
"0.6848774",
"0.68183106",
"0.68183106",
"0.6795202",
"0.67877954",
"0.67805916",
"0.67773527",
"0.6748413",
"0.6748413",
"0.6739524"
] | 0.69202507 | 88 |
Checks if specified operation is allowed on the resource. | def _operation_allowed(self, headers_dict, operation):
if 'allow' in headers_dict:
if operation in headers_dict['allow']:
return True
return False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_allowed(self, role, operation, resource):\r\n assert not role or role in self._roles\r\n assert not resource or resource in self._resources\r\n\r\n roles = set(get_family(self._roles, role))\r\n operations = set([None, operation])\r\n resources = set(get_family(self._resources, resource))\r\n\r\n is_allowed = None\r\n default_assertion = lambda *args: True\r\n\r\n for permission in itertools.product(roles, operations, resources):\r\n if permission in self._denied:\r\n assertion = self._denied[permission] or default_assertion\r\n if assertion(self, role, operation, resource):\r\n return False # denied by rule immediately\r\n\r\n if permission in self._allowed:\r\n assertion = self._allowed[permission] or default_assertion\r\n if assertion(self, role, operation, resource):\r\n is_allowed = True # allowed by rule\r\n\r\n return is_allowed",
"def supports_operation(self, operation: str) -> bool:\n return True",
"def supports_operation(self, operation: str) -> bool:\n return operation in OPERATION_SUPPORT_BY_TYPE[self.backing_type]",
"def is_any_allowed(self, roles, operation, resource):\r\n is_allowed = None # there is not matching rules\r\n for role in roles:\r\n is_current_allowed = self.is_allowed(role, operation, resource)\r\n if is_current_allowed is False:\r\n return False # denied by rule\r\n elif is_current_allowed is True:\r\n is_allowed = True\r\n return is_allowed",
"def _is_valid_fetch_operation(operation):\n if operation in FetchQuantity._supported_fetch_operations():\n return True\n else:\n return False",
"def permit_required(self):\n return \"permission\" in self.description.lower()",
"async def contains(self, operation: Operation) -> bool:\n return operation.instance_name in self.operations",
"def check_zone_operation(self, zone, operation):\n assert is_valid_zone(zone), zone\n return self.call_api('/zones/%s/operations/%s' % (zone, operation))",
"def check_rights(self, resources, request=None):\r\n if not self.auth:\r\n return True\r\n\r\n try:\r\n if not self.auth.test_rights(resources, request=request):\r\n raise AssertionError()\r\n\r\n except AssertionError, e:\r\n raise HttpError(\r\n \"Access forbiden. {0}\".format(e),\r\n status=status.HTTP_403_FORBIDDEN\r\n )",
"def has_access(self, action_name: str, resource_name: str, user=None) -> bool:\n if not user:\n user = g.user\n if (action_name, resource_name) in user.perms:\n return True\n\n if self.is_dag_resource(resource_name):\n if (action_name, permissions.RESOURCE_DAG) in user.perms:\n return True\n return (action_name, resource_name) in user.perms\n\n return False",
"def acl_check_entity(self, entity, auth_context, op, obj):\n acl_check = (\n entity.acl_check(auth_context, op, obj)\n if entity.has_acl()\n else self.default_acl.acl_check(auth_context, op, obj))\n if not acl_check:\n raise exceptions.AclError(\n 'unauthorized change to %s' % (\n entity.name,))",
"def check_allowed(self):\n if self.state_model.op_state in [\n DevState.FAULT,\n DevState.UNKNOWN,\n DevState.DISABLE,\n ]:\n return False\n\n return True",
"def allow(self, role, operation, resource, assertion=None):\r\n assert not role or role in self._roles\r\n assert not resource or resource in self._resources\r\n self._allowed[role, operation, resource] = assertion",
"def check_action_permissions(self, request, action, obj=None):\n if action is None:\n self.permission_denied(request)\n\n for permission in self.get_permissions():\n if not permission.has_action_permission(request, self, action, obj):\n self.permission_denied(request)",
"def check_permission(self, operation, resource, **exception_kwargs):\r\n exception = exception_kwargs.pop(\"exception\", PermissionDenied)\r\n checker = functools.partial(self._docheck, operation=operation,\r\n resource=resource)\r\n return PermissionContext(checker, exception, **exception_kwargs)",
"def test_allow(self) -> None:\n response = self.request(\"/\", method=\"HEAD\")\n self.assert_allowed(response, (\"GET\", \"POST\"))",
"def __CheckOpen(self, operation):\n if self.__closed:\n raise ValueError('%s() on a closed stream is not permitted' %\n operation)",
"def __CheckOpen(self, operation):\n if self.__closed:\n raise ValueError('%s() on a closed stream is not permitted' %\n operation)",
"def has_permission(self, request, view):\n if settings.ENHANCED_ORG_ADMIN and request.user.admin:\n return True\n\n if not request.user.access:\n return False\n\n if request.method in permissions.SAFE_METHODS:\n rates_read = request.user.access.get(\"cost_model\", {}).get(\"read\", [])\n if rates_read:\n return True\n else:\n rates_write = request.user.access.get(\"cost_model\", {}).get(\"write\", [])\n if \"*\" in rates_write:\n return True\n if self.get_uuid_from_url(request) in rates_write:\n return True\n return False",
"def validate(self, raw_op):\n log.info(\"validating @%s op %s\", self.actor, raw_op)\n\n try:\n # validate basic structure\n self._validate_raw_op(raw_op)\n self.action = raw_op[0]\n self.op = raw_op[1]\n self.actor_id = Accounts.get_id(self.actor)\n\n # validate and read schema\n self._read_schema()\n\n # validate permissions\n self._validate_permissions()\n\n self.valid = True\n\n except AssertionError as e:\n payload = str(e)\n Notify('error', dst_id=self.actor_id,\n when=self.date, payload=payload).write()\n\n return self.valid",
"def check_perms(resource):\r\n stmode = os.stat(resource).st_mode\r\n return (getattr(stat, 'S_IROTH') & stmode) > 0",
"def check_perms(resource):\r\n stmode = os.stat(resource).st_mode\r\n return (getattr(stat, 'S_IROTH') & stmode) > 0",
"def is_allowed(self) -> bool:\n return self.effect == ALLOW_ACCESS",
"def is_allowed_to_do(cls, db_tuple, action, target, actor, should_raise_insufficent_priv_ex=True):\n action_check_fn = cls.get_action_check_fn(action)\n \n if action_check_fn is None:\n raise cls.UnrecognizedActionException('unrecognized action: %s' % action)\n \n # i do what i want!\n if actor.metaspace_privileges.has_privilege(MetaspacePrivilegeSet.SUPER):\n return True\n \n can_do_action = action_check_fn(db_tuple, target, actor)\n if should_raise_insufficent_priv_ex and not can_do_action:\n raise cls.InsufficientPrivilegesException('%s (user_id=%i) is not allowed to perform %s' % (actor.email_addr, actor.user_id, action))\n else:\n return can_do_action",
"def check_permission(user, action_name, app_label, model_name):\r\n p = '%s.%s_%s' % (app_label, action_name, model_name)\r\n return user and user.is_active and user.has_perm(p)",
"async def permission_valid_check(cls):\n pass",
"def isOp(self):\n return True",
"def can_retry(self, opname):\n\n if self.retry_deny and opname in self.retry_deny:\n return False\n\n if self.retry_allow and opname not in self.retry_allow:\n return False\n\n return True",
"def is_Scan_allowed(self):\n handler = self.get_command_object(\"Scan\")\n return handler.check_allowed()",
"def mask_pass(owned_permissions: int, requested_operation: int,) -> bool:\n return bool(owned_permissions & requested_operation)",
"def check_supported(self, op):\n if op == \"series\":\n return (self.cgi_show_series is not None) or (self.cgi_show_series_wrapper is not None)\n elif op == \"info\":\n return self.cgi_jsoc_info is not None\n elif op == \"query\":\n return self.cgi_jsoc_info is not None\n elif op == \"email\":\n return self.cgi_check_address is not None\n elif op == \"export\":\n return (self.cgi_jsoc_info is not None) and (self.cgi_jsoc_fetch is not None)\n else:\n raise ValueError(f\"Unknown operation: {op!r}\")",
"def is_insufficient_permissions(self):\n return self._tag == 'insufficient_permissions'",
"def has_permission(self, request, view):\n if request.method in permissions.SAFE_METHODS:\n return True\n return False",
"def check_method_allowed(cls, request):\r\n if not request.method in cls._meta.allowed_methods:\r\n raise HttpError(\r\n 'Method \\'%s\\' not allowed on this resource.' % request.method,\r\n status=status.HTTP_405_METHOD_NOT_ALLOWED)",
"def _is_action_legal(self, action):\n loading_position = self.end_of_lanes[self.current_Lane]\n length_of_vehicle = self.vehicle_data[4][action]\n\n # Check if the corresponding lane has sufficient capacity for cargo\n if loading_position + length_of_vehicle <= self.rows:\n # Check if still vehicle are due to be loaded or infinite vehicle are in harbour yard to load\n if self.number_of_vehicles_loaded[action] < self.vehicle_data[1][action] or \\\n self.vehicle_data[1][action] == -1:\n # Check if cargo type is a reefer that it can be placed in chosen position\n if self.vehicle_data[5][action] == 1:\n designated_loading_area = self.grid_reefer.T[self.current_Lane][\n loading_position:(loading_position + length_of_vehicle)]\n return np.all(designated_loading_area == 1)\n else:\n return True\n else:\n return False\n else:\n return False",
"def _op_easy(self, op, reg_list, param_list=None): # pylint: disable-msg=invalid-name\n\n has_op = hasattr(self.circuit, op)\n\n if has_op:\n if param_list:\n # DEBUG\n # print(\"********** op {} param_list {} reg_list {}\".format(op, param_list, reg_list)) # pylint: disable-msg=line-too-long\n # END-DEBUG\n getattr(self.circuit, op)(*param_list, *reg_list)\n else:\n getattr(self.circuit, op)(*reg_list)\n\n return has_op",
"def has_permission(self):\n return super().has_permission()",
"def evaluate_resource_for_permission(statement: Dict, resource_arn: str) -> bool:\n if 'resource' not in statement:\n return False\n for clause in statement['resource']:\n if evaluate_clause(clause, resource_arn):\n return True\n return False",
"def has_object_permission(self, request, view, obj):\n if request.method == \"GET\":\n return self.model_admin_config.has_view_permission(self, request, obj=obj)\n if request.method == \"PUT\":\n return self.model_admin_config.has_change_permission(self, request, obj=obj)\n if request.method == \"DELETE\":\n return self.model_admin_config.has_delete_permission(self, request, obj=obj)",
"def allow_method (self, method, user, ** kw) :\n if self.auth_required and not \\\n (user and user.authenticated and user.active) :\n return False\n if isinstance (method, pyk.string_types) :\n try :\n method = GTW.RST.HTTP_Method.Table [method]\n except KeyError :\n raise self.Status.Method_Not_Allowed \\\n ( _T (\"Unknown HTTP method `%s` requested\") % (method, )\n , valid_methods = sorted (self.SUPPORTED_METHODS)\n )\n if method and not (user and user.superuser) :\n pn = method.mode + \"_permissions\"\n permissions = getattr (self, pn)\n for p in permissions :\n if not p (user, self, ** kw) :\n if p.auth_required :\n return False\n else :\n raise self.Status.Forbidden \\\n (p.message (user, self, ** kw))\n return True",
"def is_method_allowed(self):\n if self.request.method.upper() not in self.http_methods:\n raise exceptions.MethodNotAllowed(self.http_methods)\n return True",
"def can(user, action):\n\n v = bitvector.BitVector(user.access_level)\n return v.is_set(EVERYTHING) or v.is_set(action)",
"def validate_complaint_document(self, operation):\n if operation == \"update\" and self.request.authenticated_role != self.context.author:\n self.request.errors.add(\"url\", \"role\", \"Can update document only author\")\n self.request.errors.status = 403\n raise error_handler(self.request.errors)\n if self.request.validated[\"tender_status\"] not in [\"active.qualification\", \"active.awarded\"]:\n raise_operation_error(\n self.request,\n \"Can't {} document in current ({}) tender status\".format(\n operation, self.request.validated[\"tender_status\"]\n ),\n )\n if any(\n [\n i.status != \"active\"\n for i in self.request.validated[\"tender\"].lots\n if i.id == self.request.validated[\"award\"].lotID\n ]\n ):\n raise_operation_error(self.request, \"Can {} document only in active lot status\".format(operation))\n if self.request.validated[\"complaint\"].status not in STATUS4ROLE.get(self.request.authenticated_role, []):\n raise_operation_error(\n self.request,\n \"Can't {} document in current ({}) complaint status\".format(\n operation, self.request.validated[\"complaint\"].status\n ),\n )\n return True",
"def permits(identity, obj, permission):\n return False",
"def test_commandRaisesIllegalOperationResponse(self):\n self.assertCommandExceptionResponse(\n imap4.IllegalOperation(\"operation\"),\n b\"001\", b\"NO Illegal operation: operation\\r\\n\",\n )",
"def require_operator_permission(self, user, carrier, region):\n if user.is_anonymous():\n raise PermissionDenied()\n elif self.is_admin(user):\n return\n carrier = (carrier if isinstance(carrier, (int, long)) else\n CARRIER_MAP[carrier].id)\n region = (region if isinstance(region, (int, long)) else\n REGIONS_DICT[region].id)\n passes = OperatorPermission.objects.filter(\n user=user, carrier=carrier, region=region).exists()\n if not passes:\n raise PermissionDenied()",
"def is_Abort_allowed(self):\n handler = self.get_command_object(\"Abort\")\n return handler.check_allowed()",
"def can(self, unused_perm):\n return False",
"def require_object_permission(self, user, obj):\n self.require_operator_permission(user, obj.carrier, obj.region)",
"def check_permission(self, action, username, resource, perm):\n if not resource:\n return\n if resource.realm == 'blog' and resource.id:\n the_post = BlogPost(self.env, resource.id, resource.version)\n for category in the_post.category_list:\n if category in self.draft and the_post.author != username:\n # Block all access regardless\n return False",
"def opOk(op, validRegs):\n for operand in op.operands:\n if not operand in reversed(validRegs):\n return False\n # If we make it here, they're all valid\n return True",
"def _idempotent_acl_check(self):\n\n if self.host.os in {'iosxr'}:\n if self.parent is not self.root:\n acl = ('ipv4 access-list ', 'ipv6 access-list ')\n if self.parent.text.startswith(acl):\n return True\n return False",
"def ValidateCheckOperationTest(op_type_name, is_last, allow_signature,\n allow_unhashed, fail_src_extents,\n fail_dst_extents,\n fail_mismatched_data_offset_length,\n fail_missing_dst_extents, fail_src_length,\n fail_dst_length, fail_data_hash,\n fail_prev_data_offset, fail_bad_minor_version):\n op_type = _OpTypeByName(op_type_name)\n\n # REPLACE/REPLACE_BZ/REPLACE_XZ operations don't read data from src\n # partition. They are compatible with all valid minor versions, so we don't\n # need to check that.\n if (op_type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ,\n common.OpType.REPLACE_XZ) and (fail_src_extents or\n fail_src_length or\n fail_bad_minor_version)):\n return False\n\n # MOVE and SOURCE_COPY operations don't carry data.\n if (op_type in (common.OpType.MOVE, common.OpType.SOURCE_COPY) and (\n fail_mismatched_data_offset_length or fail_data_hash or\n fail_prev_data_offset)):\n return False\n\n return True",
"def has_permission(self, request, view):\n return False",
"def test_func(self):\n return self.request.user.has_permission(\"core.view_staffer\")",
"def _is_valid_delete_operation(session, row):\n # Check for any pending or processing create or update\n # ops on the row itself\n if db.check_for_pending_or_processing_ops(\n session, row.object_uuid, operation=[odl_const.ODL_UPDATE,\n odl_const.ODL_CREATE]):\n return False\n\n # Check for dependent operations\n dependent_resource_types = _DELETE_DEPENDENCIES.get(row.object_type)\n if dependent_resource_types is not None:\n for resource_type in dependent_resource_types:\n if db.check_for_pending_delete_ops_with_parent(\n session, resource_type, row.object_uuid):\n return False\n return True",
"def action_allowed_for(user, permission):\n if user is None or not user.is_authenticated:\n return False\n\n assert permission in amo.permissions.PERMISSIONS_LIST # constants only.\n return any(\n match_rules(group.rules, permission.app, permission.action)\n for group in user.groups_list\n )",
"def can_act(self, **kwargs):\n return True",
"def is_resource_enabled(resource):\n return use_resources is not None and resource in use_resources",
"async def ensure_contains(self, operation: Operation):\n # Check that our network contains the operation\n if not await self.contains(operation):\n if not await self.instantiable(operation):\n raise OperationImplementationNotInstantiable(operation.name)\n else:\n raise OperationImplementationNotInstantiated(\n operation.instance_name\n )",
"def is_valid_command(command):\n return is_get(command) or is_insert(command) or is_update(command) or is_delete(command) or is_showall(command) or is_search(command)",
"def can_edit(self):\n return self.state not in (\n 'scanning', 'resulted', 'cancelled', 'aborted')",
"def verify_blob_permissions(self, blob):\n path = self.csum_to_path(blob)\n return is_readonly(path)",
"def can(self, perm):\n return self.role.has_permissions(perm)",
"def isValidrequest(cls, mgr, fid, op, tmpcls, slot, session=None):\n ormop = clsmanager.getConfigOperation(op)\n if session is not None:\n cls.getclsoptions(tmpcls, session)\n if ormop in optionsdict[tmpcls]['OPTIONS']:\n if cls.getClsStageSupported(tmpcls, op, slot) is True:\n inputs = mgr.get(fid, tmpcls, op, slot, session)\n if len(inputs) > 0:\n return True\n return False",
"def _check_ops(self):\n required_ops = ['san_ip', 'san_login', 'san_password']\n for attr in required_ops:\n if not getattr(self.configuration, attr, None):\n raise exception.InvalidInput(reason=_('%s is not set.') % attr)\n\n replica = self.configuration.safe_get('replication_device')\n if replica and isinstance(replica, list):\n replica_ops = ['backend_id', 'login', 'password', 'rpo']\n for attr in replica_ops:\n if attr not in replica[0]:\n msg = _('replication_device %s is not set.') % attr\n raise exception.InvalidInput(reason=msg)\n self.replica = Replication(replica[0])",
"def _is_item_allowed(resource, item, resourcesalloweddict, resourcesuseddict):\n\n if item in resourcesalloweddict[resource]:\n # this is semi nonsensical, but allows us to indicate which ports are used\n # through get_resource_information()\n resourcesuseddict[resource].add(item)\n return True\n\n else:\n return False",
"def authorize(self, action, author_id=None):\n if action not in CHANGE_TYPES:\n return False\n return True",
"def has_permission(self, request, view):\n return True",
"def has_object_permission(self, request, view, obj):\n # if the user is trying to retrieve to create a item.. it will return true\n if request.method in permissions.SAFE_METHODS:\n return True\n # check if the user is trying to don't do a SAFE_METHODS, put,patch,delete and if the feed owner is doing it or another different user.. and it will return true if match or false if not\n return obj.user_profile.id == request.user.id",
"def has_object_permission(self, request, view, obj):\n if request.method in permissions.SAFE_METHODS:\n return True\n\n return is_owner_or_privileged_user(obj.user, request)",
"def check_authorization(\n self,\n perms: Sequence[tuple[str, str]] | None = None,\n dag_id: str | None = None,\n ) -> bool:\n if not perms:\n return True\n\n for perm in perms:\n if perm in (\n (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),\n (permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),\n (permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG),\n ):\n can_access_all_dags = self.has_access(*perm)\n if can_access_all_dags:\n continue\n\n action = perm[0]\n if self.can_access_some_dags(action, dag_id):\n continue\n return False\n\n elif not self.has_access(*perm):\n return False\n\n return True",
"def _is_valid_update_operation(session, row):\n # Check if there are older updates in the queue\n if db.check_for_older_ops(session, row):\n return False\n\n # Check for a pending or processing create operation on this uuid\n if db.check_for_pending_or_processing_ops(\n session, row.object_uuid, operation=odl_const.ODL_CREATE):\n return False\n return True",
"def can(self, perm):\n return self.role is not None and self.role.has_permission(perm)",
"def get_feature_permission(request, feature, operation=None):\n feature_info = FEATURE_MAP.get(feature)\n if not feature_info:\n raise ValueError(\"The requested feature '%(feature)s' is unknown. \"\n \"Please make sure to specify a feature defined \"\n \"in FEATURE_MAP.\")\n\n # Check dashboard settings\n feature_config = feature_info.get('config')\n if feature_config:\n if not setting_utils.get_dict_config('OPENSTACK_NEUTRON_NETWORK',\n feature_config['name']):\n return False\n\n # Check policy\n feature_policies = feature_info.get('policies')\n if feature_policies:\n policy_name = feature_policies.get(operation)\n if not policy_name:\n raise ValueError(\"The 'operation' parameter for \"\n \"get_feature_permission '%(feature)s' \"\n \"is invalid. It should be one of %(allowed)s\"\n % {'feature': feature,\n 'allowed': ' '.join(feature_policies.keys())})\n role = (('network', policy_name),)\n if not policy.check(role, request):\n return False\n\n # Check if a required extension is enabled\n feature_extension = feature_info.get('extension')\n if feature_extension:\n try:\n return is_extension_supported(request, feature_extension)\n except Exception:\n LOG.info(\"Failed to check Neutron '%s' extension is not supported\",\n feature_extension)\n return False\n\n # If all checks are passed, now a given feature is allowed.\n return True",
"def is_valid_operator(self, operator):\n if operator in self.operators_dict.keys():\n return True\n else:\n return False",
"def _enforce(self, req, action, target=None):\n if target is None:\n target = {}\n try:\n self.policy.enforce(req.context, action, target)\n except exception.Forbidden as e:\n LOG.debug(\"User not permitted to perform '%s' action\", action)\n raise webob.exc.HTTPForbidden(explanation=e.msg, request=req)",
"def test_only_add_perm(self):\n self.assertStatusCode(self.url, 403)",
"def IsAllowed(self):\r\n\r\n return self.notify.IsAllowed()",
"def IsAllowed(self):\r\n\r\n return self.notify.IsAllowed()",
"def IsAllowed(self):\r\n\r\n return self.notify.IsAllowed()",
"def can_edit_or_403(self, user):\n if self.get_permission_level(user) < self.OWNER_PERMISSION:\n raise PermissionDenied\n return True",
"def check_permissions(permission, payload):\n if 'permissions' not in payload:\n abort(401)\n\n if permission not in payload['permissions']:\n abort(401)\n\n return True",
"def test_has_permission(self):\n self.assertStatusCode(self.url, 200)",
"def test_has_permission(self):\n self.assertStatusCode(self.url, 200)",
"def test_has_permission(self):\n self.assertStatusCode(self.url, 200)",
"def test_has_permission(self):\n self.assertStatusCode(self.url, 200)",
"def has_object_permission(self, request, view, obj):\n\n \"\"\" Check the HTTP method being used for the request\"\"\"\n \"\"\" 'SAFE' methods are those methods that don't make any change to the object e.g. PUT\"\"\"\n \"\"\" users should only be able to make changes to their own profile - being checked below -> whether object ID = user id\"\"\"\n if request.method in permissions.SAFE_METHODS:\n return True\n\n return obj.id == request.user.id",
"def can_handle(self, rsm_ctx):\n return rsm_ctx.instance.type == NODE_TYPE_QUOTA",
"def has_permission(self, request, view):\n if request.method == \"POST\":\n return self.model_admin_config.has_add_permission(self, request)\n return True",
"def _resource_name_check(self, resource_name):\n return self._name_check(resource_name, 'resources')",
"def has_permission(self, request, view):\n user = request.user\n if (\n isinstance(user, TokenUser)\n and LTI_ROLES[self.__class__.role]\n & set(user.token.payload.get(\"roles\", []))\n and user.token.payload.get(\"permissions\", {}).get(\"can_update\", False)\n is True\n ):\n return True\n\n return False",
"def has_object_permission(self, request, view, obj):\n if request.user.is_superuser:\n return True\n if request.user.profile.role == UserRole.CLIENT and obj.owner != request.user:\n return False\n if request.user.profile.role == UserRole.EXECUTOR and obj.executor != request.user:\n return False\n return True",
"def _has_permission(self, user, user_is_mod, command, db_session):\n\n if command[1] == 'for_all':\n return True\n if command[1] == 'for_mods' and user_is_mod:\n return True\n if type(command[1]) == db.Command:\n db_command = command[1]\n if bool(db_command.permissions) is False:\n return True\n elif user in [permission.user_entity for permission in db_command.permissions]:\n return True\n return False",
"def is_telescope_on_allowed(self):\n handler = self.get_command_object(\"TelescopeOn\")\n return handler.check_allowed()",
"def _is_user_defined_permission(self, perm: Model) -> bool:\n\n return perm.permission.name in self.OBJECT_SPEC_PERMISSIONS",
"def has_change_permission(self, request, obj=None):\n return False",
"def isSetOperation(self):\n return _libsbml.FluxBound_isSetOperation(self)",
"def has_object_permission(self, request, view, obj):\n if request.method in permissions.SAFE_METHODS:\n return True\n \n \"\"\"Check if the user has the permission to edit their profile. If True it will allow PUT, PATCH & DELETE operations\"\"\"\n return obj.id == request.user.id # returns True or False",
"def can_read(self, auth_param: str) -> bool:\n perms = self._get_workspace_permissions([auth_param])\n return self._has_read_perm(perms.get(auth_param, WorkspacePermission.NONE))"
] | [
"0.735948",
"0.72206527",
"0.68997526",
"0.6572741",
"0.64294475",
"0.63755685",
"0.63672787",
"0.6200207",
"0.61940044",
"0.61875075",
"0.6171609",
"0.61679983",
"0.60811335",
"0.607854",
"0.604403",
"0.60305333",
"0.60210794",
"0.60210794",
"0.60156304",
"0.6002486",
"0.59915817",
"0.59915817",
"0.599041",
"0.59650993",
"0.5944636",
"0.59318346",
"0.5909353",
"0.59068096",
"0.5903858",
"0.58919746",
"0.5847682",
"0.5837207",
"0.5834272",
"0.5819325",
"0.58036745",
"0.5802878",
"0.57962537",
"0.5770759",
"0.5765103",
"0.5761626",
"0.57428604",
"0.57354426",
"0.5730529",
"0.57303673",
"0.57161784",
"0.57017416",
"0.57000077",
"0.5696499",
"0.56914157",
"0.5682622",
"0.56799215",
"0.5678169",
"0.56649095",
"0.56523365",
"0.5650446",
"0.5617676",
"0.560557",
"0.5601695",
"0.55978245",
"0.5597505",
"0.5587524",
"0.55816925",
"0.5570838",
"0.55688244",
"0.55656207",
"0.5549613",
"0.55483556",
"0.5542991",
"0.55384135",
"0.55352944",
"0.55239147",
"0.55140376",
"0.55125207",
"0.55032426",
"0.55010766",
"0.54985464",
"0.5498526",
"0.54969805",
"0.54910433",
"0.54910433",
"0.54910433",
"0.5487658",
"0.5483935",
"0.54832816",
"0.54832816",
"0.54832816",
"0.54832816",
"0.548126",
"0.5473995",
"0.5472051",
"0.54673433",
"0.5460058",
"0.54594284",
"0.5457564",
"0.5447304",
"0.5445699",
"0.54374665",
"0.5436299",
"0.54343337",
"0.5425616"
] | 0.77897847 | 0 |
Parse the ExtendedError object and retruns the message. Build a list of decoded messages from the extended_error using the message registries. An ExtendedError JSON object is a response from the with its own schema. This function knows how to parse the ExtendedError object and, using any loaded message registries, render an array of plain language strings that represent the response. | def _render_extended_error_message_list(self, extended_error):
messages = []
if isinstance(extended_error, dict):
if ('Type' in extended_error and
extended_error['Type'].startswith('ExtendedError.')):
for msg in extended_error['Messages']:
message_id = msg['MessageID']
x = message_id.split('.')
registry = x[0]
msgkey = x[len(x) - 1]
# if the correct message registry is loaded,
# do string resolution
if (registry in self.message_registries and msgkey in
self.message_registries[registry]['Messages']):
rmsgs = self.message_registries[registry]['Messages']
msg_dict = rmsgs[msgkey]
msg_str = message_id + ': ' + msg_dict['Message']
for argn in range(0, msg_dict['NumberOfArgs']):
subst = '%' + str(argn+1)
m = str(msg['MessageArgs'][argn])
msg_str = msg_str.replace(subst, m)
if ('Resolution' in msg_dict and
msg_dict['Resolution'] != 'None'):
msg_str += ' ' + msg_dict['Resolution']
messages.append(msg_str)
else:
# no message registry, simply return the msg object
# in string form
messages.append(str(message_id))
return messages | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_extended_error(self, extended_error):\n return self._render_extended_error_message_list(extended_error)",
"def _get_resp_body_errors(self):\n\n if self._resp_body_errors and len(self._resp_body_errors) > 0:\n return self._resp_body_errors\n\n errors = []\n warnings = []\n resp_codes = []\n\n if self.verb is None:\n return errors\n\n dom = self.response.dom()\n if dom is None:\n return errors\n\n for e in dom.findall('Errors'):\n eSeverity = None\n eClass = None\n eShortMsg = None\n eLongMsg = None\n eCode = None\n\n try:\n eSeverity = e.findall('SeverityCode')[0].text\n except IndexError:\n pass\n\n try:\n eClass = e.findall('ErrorClassification')[0].text\n except IndexError:\n pass\n\n try:\n eCode = e.findall('ErrorCode')[0].text\n except IndexError:\n pass\n\n try:\n eShortMsg = smart_encode(e.findall('ShortMessage')[0].text)\n except IndexError:\n pass\n\n try:\n eLongMsg = smart_encode(e.findall('LongMessage')[0].text)\n except IndexError:\n pass\n\n try:\n eCode = e.findall('ErrorCode')[0].text\n if int(eCode) not in resp_codes:\n resp_codes.append(int(eCode))\n except IndexError:\n pass\n\n msg = str(\"Class: {eClass}, Severity: {severity}, Code: {code}, {shortMsg} {longMsg}\") \\\n .format(eClass=eClass, severity=eSeverity, code=eCode, shortMsg=eShortMsg,\n longMsg=eLongMsg)\n\n # from IPython import embed; embed()\n\n if eSeverity == 'Warning':\n warnings.append(msg)\n else:\n errors.append(msg)\n\n self._resp_body_warnings = warnings\n self._resp_body_errors = errors\n self._resp_codes = resp_codes\n\n if self.config.get('warnings') and len(warnings) > 0:\n log.warn(\"{verb}: {message}\\n\\n\".format(\n verb=self.verb, message=\"\\n\".join(warnings)))\n\n if self.response.reply.Ack == 'Failure':\n if self.config.get('errors'):\n log.error(\"{verb}: {message}\\n\\n\".format(\n verb=self.verb, message=\"\\n\".join(errors)))\n\n return errors\n\n return []",
"def testExtendedErrorMessage(self):\n\n json_message = current.xml.json_message\n\n msg = json_message(False, 405, message=\"Test\")\n msg = json.loads(msg)\n self.assertEqual(len(msg), 3)\n self.assertEqual(msg[\"status\"], \"failed\")\n self.assertEqual(msg[\"statuscode\"], \"405\")\n self.assertEqual(msg[\"message\"], \"Test\")",
"def load_xcat_resp(message):\n resp_list = jsonloads(message)['data']\n keys = ('info', 'data', 'node', 'errorcode', 'error')\n\n resp = {}\n\n for k in keys:\n resp[k] = []\n\n for d in resp_list:\n for k in keys:\n if d.get(k) is not None:\n resp[k].append(d.get(k))\n\n err = resp.get('error')\n if err != []:\n for e in err:\n if _is_warning(str(e)):\n # ignore known warnings or errors:\n continue\n else:\n raise ZVMException(message)\n\n _log_warnings(resp)\n\n return resp",
"def json(self):\n d = [err.json for err in self.errors]\n return d",
"def odata_error(self, request, environ, start_response, sub_code,\n message='', code=400):\n response_headers = []\n e = core.Error(None)\n e.add_child(core.Code).set_value(sub_code)\n e.add_child(core.Message).set_value(message)\n response_type = self.content_negotiation(\n request, environ, self.ErrorTypes)\n if response_type is None:\n # this is an error response, default to text/plain anyway\n response_type = params.MediaType.from_str(\n 'text/plain; charset=utf-8')\n elif response_type == \"application/atom+xml\":\n # even if you didn't ask for it, you get application/xml in this\n # case\n response_type = \"application/xml\"\n if response_type == \"application/json\":\n data = str(''.join(e.generate_std_error_json()))\n else:\n data = str(e)\n data = data.encode('utf-8')\n response_headers.append((\"Content-Type\", str(response_type)))\n response_headers.append((\"Content-Length\", str(len(data))))\n start_response(\"%i %s\" % (code, sub_code), response_headers)\n return [data]",
"def report_transaction_error_messages(self):\n response = self.__get_transaction_response()\n\n # get response data from response object\n response_data = response.json()\n\n # get error messages\n response_error = response_data['Error']\n response_error_messages = response_error['messages']\n\n # add all error messages to the report\n error_messages_to_report = []\n for response_error_message in response_error_messages:\n error_description = response_error_message['description']\n error_messages_to_report.append(error_description)\n\n return error_messages_to_report",
"def parsed_error_msg(self):\r\n # Translates the category names and messages into something more human readable\r\n message_dict = {\r\n (\"photoIdReasons\", \"Not provided\"): _(\"No photo ID was provided.\"),\r\n (\"photoIdReasons\", \"Text not clear\"): _(\"We couldn't read your name from your photo ID image.\"),\r\n (\"generalReasons\", \"Name mismatch\"): _(\"The name associated with your account and the name on your ID do not match.\"),\r\n (\"userPhotoReasons\", \"Image not clear\"): _(\"The image of your face was not clear.\"),\r\n (\"userPhotoReasons\", \"Face out of view\"): _(\"Your face was not visible in your self-photo\"),\r\n }\r\n\r\n try:\r\n msg_json = json.loads(self.error_msg)\r\n msg_dict = msg_json[0]\r\n\r\n msg = []\r\n for category in msg_dict:\r\n # find the messages associated with this category\r\n category_msgs = msg_dict[category]\r\n for category_msg in category_msgs:\r\n msg.append(message_dict[(category, category_msg)])\r\n return u\", \".join(msg)\r\n except (ValueError, KeyError):\r\n # if we can't parse the message as JSON or the category doesn't\r\n # match one of our known categories, show a generic error\r\n log.error('PhotoVerification: Error parsing this error message: %s', self.error_msg)\r\n return _(\"There was an error verifying your ID photos.\")",
"def get_aggregated_exceptions(self) -> Payload:\n return Payload(aggregated_errors=list(self._aggregated_exceptions.values()))",
"def formatErrors(self):\n errorlist = []\n xepsWithErrors = sorted(\n set(self.getParseErrors() + self.getBuildErrors()),\n key=lambda x: str(x))\n if self.getErrors() or xepsWithErrors:\n if self.getErrors():\n errorlist.append(\"********** Read errors **********\")\n for error in self.getErrors():\n errorlist.append(error)\n for xep in xepsWithErrors:\n errorlist.append(\n \"********** Error report for {} **********\".format(str(xep)))\n if xep.parseErrors:\n errorlist.append(\"********** Parsing Errors **********\")\n errors = list(set(xep.parseErrors))\n for error in errors:\n errorlist.append(error)\n if xep.buildErrors:\n errorlist.append(\"********** Build Errors **********\")\n for error in xep.buildErrors:\n if len(error.splitlines()) > 4:\n error = ''.join(error.splitlines()[:4])\n errorlist.append(error)\n return '\\n'.join(errorlist)\n else:\n return None",
"def testExtendedErrorMessageWithTree(self):\n\n json_message = current.xml.json_message\n\n msg = json_message(False, 405, message=\"Test\", tree='{\"test\": \"value\"}')\n msg = json.loads(msg)\n self.assertEqual(len(msg), 4)\n self.assertEqual(msg[\"status\"], \"failed\")\n self.assertEqual(msg[\"statuscode\"], \"405\")\n self.assertEqual(msg[\"message\"], \"Test\")\n self.assertTrue(isinstance(msg[\"tree\"], dict))\n tree = msg[\"tree\"]\n self.assertEqual(len(tree), 1)\n self.assertEqual(tree[\"test\"], \"value\")",
"def extended(self) -> List:\n return List([String.build(self.maintype), String.build(self.subtype),\n _ParamsList(self.content_type_params),\n String.build(self.content_id),\n String.build(self.content_description),\n String.build(self.content_transfer_encoding,\n fallback=b'7BIT'),\n Number(self.size),\n self.envelope_structure,\n self.body_structure.extended,\n Number(self.lines),\n String.build(self.body_md5),\n String.build(self.content_disposition),\n String.build(self.content_language),\n String.build(self.content_location)])",
"def get_messages(self):\n other_user_email = request.args.get('other_user_email')\n page = request.args.get('page')\n per_page = request.args.get('per_page')\n if not other_user_email or not page or not per_page:\n self.logger.debug(messages.MISSING_FIELDS_ERROR % \"query params\")\n return messages.ERROR_JSON % messages.MISSING_FIELDS_ERROR % \"query params\", 400\n email_token = auth.current_user()[0]\n page = int(page)\n per_page = int(per_page)\n # App sends starting with 1 but we start at 0\n page -= 1\n try:\n message_list, pages = self.friend_database.get_conversation(email_token, other_user_email, per_page, page)\n except NoMoreMessagesError:\n self.logger.debug(messages.NO_MORE_PAGES_ERROR)\n return messages.NO_MORE_PAGES_ERROR, 404\n message_list = [{k:v for k,v in m._asdict().items() if k != \"hidden_to\"} for m in message_list]\n for i in range(len(message_list)):\n message_list[i][\"timestamp\"] = message_list[i][\"timestamp\"].isoformat()\n return json.dumps({\"messages\": message_list, \"pages\": pages}), 200",
"def get_response(self):\n return self.messages",
"def deserialize_known_exception(error):\n message = error['message']\n\n known_exception_type_kwargs = error['known_exception_type_kwargs']\n known_exception_type = getattr(exceptions, error['known_exception_type'])\n known_exception_type_args = error['known_exception_type_args']\n\n if error['append_message']:\n known_exception_type_args.append(message)\n else:\n known_exception_type_args.insert(0, message)\n return known_exception_type(\n *known_exception_type_args,\n **known_exception_type_kwargs\n )",
"def get_error(self) -> List[str]:\n return []",
"def get_error(self) -> List[str]:\n return []",
"def process_sub_serializer_errors(self, serializer_error_dict, error_type):\n sub_serializer_errors = serializer_error_dict.get('errors', [])\n sub_serializer_non_field_errors = serializer_error_dict.get('non_field_errors', None)\n result = []\n for sub_error in sub_serializer_errors:\n if sub_error['field'] is None:\n sub_error['field'] = error_type\n result.append(sub_error)\n if sub_serializer_non_field_errors is not None:\n result.extend(\n self.get_non_field_error_entries(sub_serializer_non_field_errors)\n )\n return result",
"def extract_messages(self,msg_list):\n msgs = []\n for m in msg_list:\n msgs.append(json.loads(str(m)))\n return msgs",
"def parsed_error_msg(self):\r\n return self.error_msg",
"def _process_message(self, response):\n message = str()\n try:\n message = response.json()\n except (simplejson.JSONDecodeError, ValueError) as e:\n message = response.text\n return message",
"def get_errors(response):\n errors = response.get(\"error\")\n if errors:\n return [e.get(\"message\") for e in errors]\n return None",
"def extended(self) -> List:\n parts = [part.extended for part in self.parts]\n return List([_Concatenated(parts), String.build(self.subtype),\n _ParamsList(self.content_type_params),\n String.build(self.content_disposition),\n String.build(self.content_language),\n String.build(self.content_location)])",
"def _get_errors(exc):\n if hasattr(exc, 'message'):\n errors = exc.messages\n else:\n errors = [str(exc)]\n return errors",
"def list(self, query_params=None, **kwargs):\n # type: (WebhookListQueryParams, dict) -> Webhook\n\n return self.api_client.get(\n '/notifications/webhooks/encoding/encodings/error',\n query_params=query_params,\n pagination_response=True,\n type=Webhook,\n **kwargs\n )",
"def _decode(self, message):\n raise NotImplementedError(\"_decode needs to be implemented in {} subclass\".format(type(self).__name__))",
"def __call__(self, environ, start_response):\n start_response(self.status, self.headers)\n return [self.message] if not isinstance(self.message, list) else self.message",
"def msgs_from_bytes(self, b):\n msgs = []\n # User remainder bytes\n parse_bytes = self.remainder + b.decode('ascii')\n # Find the first frame delimiter\n i = parse_bytes.find('\\r\\n')\n while i >= 0:\n # Try to parse a single message\n m = self._parse_msg(parse_bytes[:i])\n # Remove parsed bytes and delimter\n parse_bytes = parse_bytes[i+2:]\n # Add parsed message, if any\n if m:\n msgs.append(m)\n self.logger.debug('Parsed ASCII frame: address={}, function={}, len={}'.format(m.address, m.function, len(m.data) if m.data else 0))\n #else - warn?\n i = parse_bytes.find('\\r\\n')\n # Store any remaining bytes for the next pass\n self.remainder = parse_bytes\n return msgs",
"def _processGETErr(self, e, request):\r\n if e.check(InvalidRequest):\r\n msg = e.getErrorMessage()\r\n code = httplib.BAD_REQUEST\r\n elif e.check(UnauthorizedLogin):\r\n msg = e.getErrorMessage()\r\n code = httplib.UNAUTHORIZED\r\n elif e.check(InternalError):\r\n e.printTraceback()\r\n msg = 'Internal Error'\r\n code = httplib.INTERNAL_SERVER_ERROR\r\n else:\r\n e.printTraceback()\r\n msg = 'Fatal Error'\r\n code = httplib.INTERNAL_SERVER_ERROR\r\n\r\n self._render_GET(request, code, 'text/plain; charset=utf-8', msg)",
"def parse_last_exception(message):\n for pattern, response in patterns:\n items_found = re.findall(pattern, repr(message))\n if items_found:\n #print(\"FOUND\", items_found)\n print_exception_message(response, items_found[0])\n break\n else:\n unrecognised_exception(message)",
"def error_wrapper(x):\n errors = list()\n for error_key, error_list in list(x.items()):\n for error in error_list:\n if error_key == 'non_field_errors':\n errors.append(error)\n else:\n errors.append(\"%s: %s\" % (error_key, error))\n return errors",
"def _decode_raw_response(\n self, bound_obj: Any, message: Message, response_encoded: str\n ) -> Response | SysResponse:\n response: Response | SysResponse\n try:\n response_dict = self.protocol.decode_dict(response_encoded)\n response = self.protocol.response_from_dict(response_dict)\n if self._decode_filter_call is not None:\n self._decode_filter_call(\n bound_obj, message, response_dict, response\n )\n except Exception as exc:\n response = ErrorSysResponse(\n error_message='Error decoding raw response.',\n error_type=ErrorSysResponse.ErrorType.LOCAL,\n )\n # Since we'll be looking at this locally, we can include\n # extra info for logging/etc.\n response.set_local_exception(exc)\n return response",
"def __get_response_error(message, response):\n\n rjson = response.json()\n error_description = \"Code %s - %s\" %(str(response.status_code), rjson.get('message'))\n\n return {\n 'app_message': \"%s\" % (message),\n 'error_description': \"[%s] - %s\" % (message, error_description),\n 'code': response.status_code\n }",
"def validation_errors_to_error_messages(validation_errors):\n errorMessages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n errorMessages.append(f\"{field} : {error}\")\n return errorMessages",
"def validation_errors_to_error_messages(validation_errors):\n errorMessages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n errorMessages.append(f\"{field} : {error}\")\n return errorMessages",
"def validation_errors_to_error_messages(validation_errors):\n errorMessages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n errorMessages.append(f\"{field} : {error}\")\n return errorMessages",
"def validation_errors_to_error_messages(validation_errors):\n errorMessages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n errorMessages.append(f\"{field} : {error}\")\n return errorMessages",
"def validation_errors_to_error_messages(validation_errors):\n errorMessages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n errorMessages.append(f\"{field} : {error}\")\n return errorMessages",
"def __iter__(self):\n return iter([self.format_message(record) for record in self._messages])",
"def validation_errors_to_error_messages(validation_errors):\n errorMessages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n errorMessages.append(f'{field} : {error}')\n return errorMessages",
"def get_messages(self, max_messages):\n raw = self.redis_client.lrange(self.message_list, 0, max_messages)\n messages = (m for m in raw if m != b\"null\")\n messages = (m.decode(\"utf-8\") for m in messages)\n yield from map(json.loads, messages)",
"def validation_errors_to_error_messages(validation_errors):\n error_messages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n error_messages.append(f\"{field}: {error}\")\n return error_messages",
"def get_messages_from_doctype(name):\n\tmessages = []\n\tmeta = frappe.get_meta(name)\n\n\tmessages = [meta.name, meta.module]\n\n\tif meta.description:\n\t\tmessages.append(meta.description)\n\n\t# translations of field labels, description and options\n\tfor d in meta.get(\"fields\"):\n\t\tmessages.extend([d.label, d.description])\n\n\t\tif d.fieldtype=='Select' and d.options:\n\t\t\toptions = d.options.split('\\n')\n\t\t\tif not \"icon\" in options[0]:\n\t\t\t\tmessages.extend(options)\n\n\t# translations of roles\n\tfor d in meta.get(\"permissions\"):\n\t\tif d.role:\n\t\t\tmessages.append(d.role)\n\n\tmessages = [message for message in messages if message]\n\tmessages = [('DocType: ' + name, message) for message in messages if is_translatable(message)]\n\n\t# extract from js, py files\n\tif not meta.custom:\n\t\tdoctype_file_path = frappe.get_module_path(meta.module, \"doctype\", meta.name, meta.name)\n\t\tmessages.extend(get_messages_from_file(doctype_file_path + \".js\"))\n\t\tmessages.extend(get_messages_from_file(doctype_file_path + \"_list.js\"))\n\t\tmessages.extend(get_messages_from_file(doctype_file_path + \"_list.html\"))\n\t\tmessages.extend(get_messages_from_file(doctype_file_path + \"_calendar.js\"))\n\t\tmessages.extend(get_messages_from_file(doctype_file_path + \"_dashboard.html\"))\n\n\t# workflow based on doctype\n\tmessages.extend(get_messages_from_workflow(doctype=name))\n\n\treturn messages",
"def retrieve_error_messages(self):\n return self.errors_seen[:]",
"async def _handle_response(response: ClientResponse) -> Dict:\n content = await response.json(encoding='utf-8', loads=loads)\n if response.status != 200:\n for member in JmRpcErrorType:\n if content['message'] != member.value:\n continue\n raise JmRpcError(response.status, content)\n response.raise_for_status()\n return content",
"def errorResponse(errormessage, format, extraJSON={}): \n \n if format == 'csv':\n return CSVResponse(\n [{'errormessage': errormessage}],\n fields=('errormessage',) )\n \n else:\n json_objects = extraJSON.copy()\n json_objects['error'] = True\n json_objects['errormessage'] = errormessage\n return JSONResponse(json_objects)",
"def get_messages_from_doctype(name):\n\tmessages = []\n\tmeta = frappe.get_meta(name)\n\n\tmessages = [meta.name, meta.module]\n\n\tif meta.description:\n\t\tmessages.append(meta.description)\n\n\t# translations of field labels, description and options\n\tfor d in meta.get(\"fields\"):\n\t\tmessages.extend([d.label, d.description])\n\n\t\tif d.fieldtype == \"Select\" and d.options:\n\t\t\toptions = d.options.split(\"\\n\")\n\t\t\tif not \"icon\" in options[0]:\n\t\t\t\tmessages.extend(options)\n\t\tif d.fieldtype == \"HTML\" and d.options:\n\t\t\tmessages.append(d.options)\n\n\t# translations of roles\n\tmessages.extend(d.role for d in meta.get(\"permissions\") if d.role)\n\tmessages = [message for message in messages if message]\n\tmessages = [(\"DocType: \" + name, message) for message in messages if is_translatable(message)]\n\n\t# extract from js, py files\n\tif not meta.custom:\n\t\tdoctype_file_path = frappe.get_module_path(meta.module, \"doctype\", meta.name, meta.name)\n\t\tmessages.extend(get_messages_from_file(doctype_file_path + \".js\"))\n\t\tmessages.extend(get_messages_from_file(doctype_file_path + \"_list.js\"))\n\t\tmessages.extend(get_messages_from_file(doctype_file_path + \"_list.html\"))\n\t\tmessages.extend(get_messages_from_file(doctype_file_path + \"_calendar.js\"))\n\t\tmessages.extend(get_messages_from_file(doctype_file_path + \"_dashboard.html\"))\n\n\t# workflow based on doctype\n\tmessages.extend(get_messages_from_workflow(doctype=name))\n\treturn messages",
"def _get_error_message(response):\n try:\n return response.json()[\"detail\"]\n except (KeyError, _JSONDecodeError):\n return response.text",
"def print_toml_decodeerror(cls, excep_obj):\n print(f\"{cls.ERROR_PREFIX} {cls.TOML_DECODEERROR_MESSAGE}\")\n print(excep_obj)",
"def parse_response(self, response, **kw):\n data = super().parse_response(response, **kw)\n error = data.get('error')\n if error is None:\n return data['result']\n else:\n # assume error object follows json-rpc 2.0 spec formatting\n self.handle_error(code=error['code'], msg=error['message'])",
"def extended(self) -> List:\n return List([String.build(self.maintype), String.build(self.subtype),\n _ParamsList(self.content_type_params),\n String.build(self.content_id),\n String.build(self.content_description),\n String.build(self.content_transfer_encoding,\n fallback=b'7BIT'),\n Number(self.size), Number(self.lines),\n String.build(self.body_md5),\n String.build(self.content_disposition),\n String.build(self.content_language),\n String.build(self.content_location)])",
"def parse_errors(errors):\n\n try:\n return errors['detail']\n\n except KeyError:\n error_string = ''\n\n for key in errors:\n error_string += '{0}\\n'.format(errors[key][0])\n\n return error_string",
"def _get_resends(self):\n if not self.has_error():\n return []\n\n errors = []\n i = 0\n for item in self.my_json['results']:\n if item.has_key('error') and item['error'] == 'Unavailable':\n errors.append((i, item['error']))\n i += 1\n return errors",
"def derive_error_dicts(self, error_obj_list):\n results = []\n for error_obj in error_obj_list:\n if error_obj:\n results.append(self.derive_error_dict(error_obj))\n return results",
"def get(self):\n user_id = get_jwt_identity()\n user = UserModel.get_by_id(user_id)\n return [message.json() for message in MessageModel.get_package_messages(user)], 200",
"def my_custom_process_message(messages: List[str]):\n def add_message_to_list(message):\n \"\"\"\n Simple function that parses dict objects from incoming message.\n \"\"\"\n messages.append(ast.literal_eval(message))\n\n return add_message_to_list",
"def AsJson(self):\n\n return json.dumps(self._errors)",
"def extended(self) -> List:\n return List([String.build(self.maintype), String.build(self.subtype),\n _ParamsList(self.content_type_params),\n String.build(self.content_id),\n String.build(self.content_description),\n String.build(self.content_transfer_encoding,\n fallback=b'7BIT'),\n Number(self.size),\n String.build(self.body_md5),\n String.build(self.content_disposition),\n String.build(self.content_language),\n String.build(self.content_location)])",
"def error_messages(self) -> List[str]:\n spatial_msgs = []\n temporal_msgs = []\n if self.spatial:\n spatial_msgs = [m for v, m in self.spatial_validations if not v(self.spatial)]\n if self.temporal:\n temporal_msgs = [m for v, m in self.temporal_validations if not v(self.temporal)]\n\n return spatial_msgs + temporal_msgs",
"def get_error_messages(self):\n\n if len(self._sensor_results_list) == 0:\n return\n\n error_msgs = []\n\n for reading in self._sensor_results_list:\n if reading.is_error():\n error_msgs.append(reading.get_error_msg())\n\n if len(error_msgs) > 0:\n return error_msgs\n else:\n return \"No Error Readings\"",
"def to_response_data(self) -> typing.Any:\n v = self.value or {}\n error_code = v.get(\"code\", \"GenericLobotomyError\")\n error_message = v.get(\"message\", \"There was an error.\")\n return {\"Error\": {\"Code\": error_code, \"Message\": error_message}}",
"def _parse_store_error(self, response):\n default_msg = \"Failure working with the Store: [{}] {!r}\".format(\n response.status_code, response.content\n )\n try:\n error_data = response.json()\n except ValueError:\n return default_msg\n\n try:\n error_info = [(error[\"message\"], error[\"code\"]) for error in error_data[\"error-list\"]]\n except (KeyError, TypeError):\n return default_msg\n\n if not error_info:\n return default_msg\n\n messages = []\n for msg, code in error_info:\n if code:\n msg += \" [code: {}]\".format(code)\n messages.append(msg)\n return \"Store failure! \" + \"; \".join(messages)",
"def get_wtf_errors(self, wtf_errors):\n\t\tmessages = []\n\t\tmessages.append('<ol class=\"wtf-errors\">')\n\t\tfor field, errors in wtf_errors.iteritems():\n\t\t\tmessages.append(\"<li>\"+field+\": <br />\")\n\t\t\tfor error in errors:\n\t\t\t\tmessages.append(\"— \"+error+ \"<br />\")\n\t\t\tmessages.append(\"</li>\")\n\t\tmessages.append(\"</ol>\")\n\t\treturn \"\".join(messages)",
"def messaging_events(payload):\n data = json.loads(payload)\n messaging_events = data[\"entry\"][0][\"messaging\"]\n for event in messaging_events:\n if \"message\" in event and \"text\" in event[\"message\"]:\n yield event[\"sender\"][\"id\"], event[\"message\"][\"text\"].encode('unicode_escape')\n else:\n yield event[\"sender\"][\"id\"], \"rez can't parse this\"",
"def sub_jsons(self, msg):\n i = 0\n result = []\n split_msg = msg.split('}{')\n for s in range(len(split_msg)):\n if i==0 and len(split_msg)==1:\n result.append(split_msg[s])\n elif i==0 and len(split_msg)>1:\n result.append(split_msg[s]+\"}\")\n elif i==len(split_msg)-1 and len(split_msg)>1:\n result.append(\"{\"+split_msg[s])\n else:\n result.append(\"{\"+split_msg[s]+\"}\")\n i+=1\n return result",
"def _deconstruct_messages(snuba_messages):\n return [\n (json.loads(msg.payload.value.decode(\"utf-8\")), msg.payload.headers)\n for msg in snuba_messages\n ]",
"def _parse_resources(message):\n resource_list = message.get_json()\n\n # Convert message into a list if it isn\"t already\n if not isinstance(resource_list, list):\n resource_list = [resource_list]\n\n logging.info(f\"Found {len(resource_list)} resources to process\")\n\n resource_list = [Resource(resource) for resource in resource_list]\n\n return resource_list",
"def get_messages(email: str) -> list:\n\n params = {\"action\": \"getMessages\", \"login\": email, \"domain\": domain}\n\n response = requests.get(endpoint, data=params)\n email_data = json.load(response)\n\n for email_message in email_data:\n for field, value in email_message.items():\n print(f\"{field}: {value}\")\n print()",
"def process_messages(self, messages):\n\n return messages",
"def errors(self) -> List[Error]:",
"def getErrors(self) -> java.util.Collection:\n ...",
"def deserialize(msg: str) -> List[Any]:\n try:\n pythonObject = json.loads(msg)\n except json.decoder.JSONDecodeError:\n return [\"fail\", msg]\n if type(pythonObject) != list:\n raise DeserializationFailure(\"Received malformed data: {}\".format(msg))\n return pythonObject",
"def test_error_message_header_bundle_failed_codes(self):\n\n error_type = 17\n error_type_value = Error.ErrorType.OFPET_BUNDLE_FAILED\n\n error_code = 0\n\n iter_given_code = Error.ErrorType.get_class(error_type_value).__iter__()\n length = Error.ErrorType.get_class(error_type_value).__len__()\n\n while error_code < self.MAX_BUNDLE_FAILED_CODE_VALUE or length > 0:\n data = UBInt32(random.randint(2, 250)).pack()\n xid = random.randint(2, 250)\n\n test_value = b'\\x05\\x01\\x00\\x10' + UBInt32(xid).pack() + UBInt16(error_type).pack() + \\\n UBInt16(error_code).pack() + data\n\n if error_code < self.MAX_BUNDLE_FAILED_CODE_VALUE:\n error_code += 1\n\n length -= 1\n\n test_object_error_messages = Error.ErrorMsg(xid, error_type_value, iter_given_code.__next__(), data).pack()\n\n self.assertEqual(test_value, test_object_error_messages)",
"def parse_response(self):\n pass",
"def additional_validation(sub_domain, end_point, end_point_pattern, method, json_payload):\n # Reset curie cache\n CURIE_CACHE.clear()\n if sub_domain not in REGISTER_INFO:\n return {\"errors\": ['invalid sub-domain']}\n error_return = []\n for validator in REGISTER_INFO[sub_domain][\"additional-validation\"]:\n result = validator(sub_domain, end_point, end_point_pattern, method, json_payload)\n error_return = error_return + result['errors']\n CURIE_CACHE.clear()\n return {\"errors\": error_return}",
"def _construct_error_response_body(error_type, error_message):\n # OrderedDict is used to make testing in Py2 and Py3 consistent\n return json.dumps(OrderedDict([(\"Type\", error_type), (\"Message\", error_message)]))",
"def filter_validation_errors(errors):\n error_messages = []\n for field, msgs in errors.items():\n if isinstance(msgs, dict):\n for f, m in msgs.items():\n error_messages.append(dict(\n field=f,\n message=m,\n code=error_codes['validation_error'],\n ))\n else:\n error_messages.append(dict(\n field=field,\n message=msgs,\n code=error_codes['validation_error'],\n ))\n return error_messages",
"def __get_msg_list(self):\n msg_list = []\n eld_msg_group = ELD_msg_group()\n with open(self.simulation_source_file, 'r') as sim_file:\n for line in sim_file:\n group_complete = False\n if line.startswith('#'):\n eld_msg_group.description = self.__get_description(line)\n elif line.startswith('speed'):\n # Normal line: parsing signal values for J1939 messages\n eld_msg_group.vehicle_speed = self.__get_int_value_from_line(line, 'speed')\n eld_msg_group.vehicle_distance = self.__get_int_value_from_line(line, 'distance')\n eld_msg_group.engine_speed = self.__get_int_value_from_line(line, 'engine_rpm')\n eld_msg_group.engine_hours = self.__get_engine_hours_from_line(line)\n elif line.startswith('duration'):\n eld_msg_group.duration = self.__get_duration_from_line(line)\n group_complete = True\n else:\n print('Error: Unknown simulation line format: {0}'.format(line))\n\n if group_complete:\n msg_list.append(eld_msg_group)\n eld_msg_group = ELD_msg_group()\n return msg_list",
"def FormatErrorMessage(values):\n return (http.HTTP_APP_JSON, serializer.DumpJson(values))",
"def handle_exception_24011(msgs):\n\n cp_class_name = \"short message and notification transfer on CM\"\n rp_class_name = \"short message and notification transfer on CM-RP messages\"\n types = {\n # 'cp-data' [1, cp_Class_name] is embedding below messages\n \"rp-data\": [0, rp_class_name],\n \"rp-data\": [1, rp_class_name],\n \"rp-ack\": [2, rp_class_name],\n \"rp-ack\": [3, rp_class_name],\n \"rp-error\": [4, rp_class_name],\n \"rp-error\": [5, rp_class_name],\n \"rp-smma\": [6, rp_class_name],\n \"cp-ack\": [4, cp_class_name],\n \"cp-error\": [16, cp_class_name],\n }\n\n return msgs, types",
"def _process_api_response(self, response, commands, raw_text=False):\n\n response_list = json.loads(response)\n if isinstance(response_list, dict):\n response_list = [response_list]\n\n # Add the 'command' that was executed to the response dictionary\n for i, response_dict in enumerate(response_list):\n response_dict[\"command\"] = commands[i]\n\n new_response = []\n for response in response_list:\n\n # Dectect errors\n self._error_check(response)\n\n # Some commands like \"show run\" can have a None result\n cmd_response = response.get(\"result\")\n if cmd_response is None:\n cmd_response = {}\n\n # Normalize the response data structure\n response_dict = {\"command\": response[\"command\"]}\n if response and raw_text:\n response_dict[\"result\"] = cmd_response.get(\"msg\")\n elif response and not raw_text:\n response_dict[\"result\"] = cmd_response.get(\"body\")\n else:\n raise NXAPIError(\"Unexpected value encountered processing response.\")\n new_response.append(response_dict)\n\n return new_response",
"def parseMsg(self):\n # These 4 elements are always present\n # \"ToUserName\"\n # \"FromUserName\"\n # \"CreateTime\"\n # \"MsgType\"\n\n # Following elements depends on MsgType\n # \"MsgId\"\n # \"Content\"\n # \"MediaId\"\n # \"PicUrl\"\n # \"Format\"\n # \"ThumbMediaId\"\n # \"Location_X\"\n # \"Location_Y\"\n # \"Scale\"\n # \"Label\"\n # \"Title\"\n # \"Description\"\n # \"Url\"\n # \"Event\"\n # \"EventKey\"\n # \"Ticket\"\n # \"Latitude\"\n # \"Longitude\"\n # \"Precision\"\n # \"Recognition\"\n\n def getField(req, key):\n if req.find(key) != None:\n return req.find(key).text\n\n\n msg = {}\n req = et.fromstring(self.request.body.decode(\"utf-8\"))\n\n # These 4 elements are always present\n msg[\"ToUserName\"] = getField(req, \"ToUserName\")\n msg[\"FromUserName\"] = getField(req, \"FromUserName\")\n msg[\"CreateTime\"] = getField(req, \"CreateTime\")\n msg[\"MsgType\"] = getField(req, \"MsgType\")\n\n # Following elements depends on MsgType\n msg[\"MsgId\"] = getField(req, \"MsgId\")\n msg[\"Content\"] = getField(req, \"Content\")\n msg[\"MediaId\"] = getField(req, \"MediaId\")\n msg[\"PicUrl\"] = getField(req, \"PicUrl\")\n msg[\"Format\"] = getField(req, \"Format\")\n msg[\"ThumbMediaId\"] = getField(req, \"ThumbMediaId\")\n msg[\"Location_X\"] = getField(req, \"Location_X\")\n msg[\"Location_Y\"] = getField(req, \"Location_Y\")\n msg[\"Scale\"] = getField(req, \"Scale\")\n msg[\"Label\"] = getField(req, \"Label\")\n msg[\"Title\"] = getField(req, \"Title\")\n msg[\"Description\"] = getField(req, \"Description\")\n msg[\"Url\"] = getField(req, \"Url\")\n msg[\"Event\"] = getField(req, \"Event\")\n msg[\"EventKey\"] = getField(req, \"EventKey\")\n msg[\"Ticket\"] = getField(req, \"Ticket\")\n msg[\"Latitude\"] = getField(req, \"Latitude\")\n msg[\"Longitude\"] = getField(req, \"Longitude\")\n msg[\"Precision\"] = getField(req, \"Precision\")\n msg[\"Recognition\"] = getField(req, \"Recognition\")\n return msg",
"def process_error_response(self, resources, resource, api, operation,\n error_response, context):\n pass",
"def get_errors(self, response: response_domain_model.Response, question_code: str) -> Sequence['ValidationError']:\n ...",
"def list_messages(self):",
"def get_internal_errors(self) -> Dict[str, int]:\n self.serial.write(b\"D!\")\n values = self.__read_response(4)\n first_address_byte_errors = self.__extract_int(values[0], b\"!E1\")\n command_byte_errors = self.__extract_int(values[1], b\"!E2\")\n second_address_byte_errors = self.__extract_int(values[2], b\"!E3\")\n PEC_byte_errors = self.__extract_int(values[3], b\"!E4\")\n\n return {\n \"first_address_byte_errors\": first_address_byte_errors,\n \"command_byte_errors\": command_byte_errors,\n \"second_address_byte_errors\": second_address_byte_errors,\n \"PEC_byte_errors\": PEC_byte_errors,\n }",
"def getMessages(self):\n raise NotImplementedError(\"Child class must implement this\")",
"def test_decode_errors(self):\n if self._invalid_encoded:\n self.assert_raises((ValueError, jsonschema.exceptions.ValidationError),\n self.import_cls.decode,\n self._invalid_encoded[0], self.typedef)",
"def _msg(response):\n try:\n return response.json().get('message')\n except simplejson.scanner.JSONDecodeError:\n return response.text\n except Exception: # pylint: disable=W0703\n return 'Unexpected error.'",
"def xen_api_error(error):\n if type(error) == tuple:\n error = list(error)\n if type(error) != list:\n error = [error]\n if len(error) == 0:\n error = ['INTERNAL_ERROR', 'Empty list given to xen_api_error']\n\n return { \"Status\": \"Failure\",\n \"ErrorDescription\": [str(x) for x in error] }",
"def addMessage():\n\ttmp = [[] for x in GLOBALS.SUPER_PEER_LIST]\n\tglobal response\n\tglobal num\n\tif num != 0:\n\t\tresponse.append([])\n\tfor x in GLOBALS.SUPER_PEER_LIST:\n\t\tresponse[num].append(tmp)\n\tnum +=1",
"def _serialize_event_error(event):\n if not event.error_code:\n return None\n\n return {\n \"code\": event.error_code,\n \"message\": MessagingEvent.ERROR_MESSAGES.get(event.error_code, None),\n \"message_detail\": event.additional_error_text\n }",
"def error(self) -> list:\n return self.__err",
"def get_er_exceptions():\n express_route_exceptions_lst = []\n try:\n for i in get_data():\n if i['expressRoute'] is False:\n express_route_exceptions_lst.append(i)\n express_route_exceptions_dic = {'expressRoutesExceptions': express_route_exceptions_lst}\n return get_json(express_route_exceptions_dic)\n except ValueError as e:\n print(e)",
"def extract_messages_from_code(code):\n\tfrom jinja2 import TemplateError\n\n\ttry:\n\t\tcode = frappe.as_unicode(render_include(code))\n\n\t# Exception will occur when it encounters John Resig's microtemplating code\n\texcept (TemplateError, ImportError, InvalidIncludePath, OSError) as e:\n\t\tif isinstance(e, InvalidIncludePath):\n\t\t\tfrappe.clear_last_message()\n\n\tmessages = []\n\n\tfor m in TRANSLATE_PATTERN.finditer(code):\n\t\tmessage = m.group(\"message\")\n\t\tcontext = m.group(\"py_context\") or m.group(\"js_context\")\n\t\tpos = m.start()\n\n\t\tif is_translatable(message):\n\t\t\tmessages.append([pos, message, context])\n\n\treturn add_line_number(messages, code)",
"def _create_response_objects(self) -> list[JsonDict]:\n responses = []\n for feat_type, feat_name, _ in self.features:\n if feat_type.is_array():\n feat_name = cast(str, feat_name) # can only be string since it's an array type\n responses.append(SentinelHubRequest.output_response(feat_name, MimeType.TIFF))\n elif feat_type.is_meta():\n responses.append(SentinelHubRequest.output_response(\"userdata\", MimeType.JSON))\n else:\n # should not happen as features have already been validated\n raise ValueError(f\"{feat_type} not supported!\")\n\n return responses",
"def decode_produce_response(cls, response):\n return [\n kafka.structs.ProduceResponsePayload(topic, partition, error, offset)\n for topic, partitions in response.topics\n for partition, error, offset in partitions\n ]",
"def _parse_message(self, exc):\n return '%s: %s' % (exc.__class__.__name__, str(exc))",
"def _rest_error(self, status_code, error_code, message):\n return {\"status_code\": status_code, \"error_code\": error_code, \"message\": message}",
"def error(self):\n errors = self._info.get('error', {}).get('errors')\n if not errors:\n return None\n return ' '.join(err.get('message', 'unknown') for err in errors)"
] | [
"0.6861036",
"0.57295793",
"0.5393745",
"0.52291447",
"0.51675904",
"0.50977695",
"0.50927067",
"0.50382864",
"0.499844",
"0.49484342",
"0.48670247",
"0.4862164",
"0.48498005",
"0.48152092",
"0.479743",
"0.4794128",
"0.4794128",
"0.4792617",
"0.47888657",
"0.4763331",
"0.4752166",
"0.4748517",
"0.4732601",
"0.4730067",
"0.47297788",
"0.47293502",
"0.47199076",
"0.47054747",
"0.47044468",
"0.4698819",
"0.46827123",
"0.46489456",
"0.46453884",
"0.46339697",
"0.46339697",
"0.46339697",
"0.46339697",
"0.46339697",
"0.4622355",
"0.46218002",
"0.46133357",
"0.46076152",
"0.46063647",
"0.4595829",
"0.45903173",
"0.4581084",
"0.45777446",
"0.4574055",
"0.45713112",
"0.4566418",
"0.4561458",
"0.4556256",
"0.45526767",
"0.45525008",
"0.45393783",
"0.4531495",
"0.45252824",
"0.4524765",
"0.4521154",
"0.4519354",
"0.4508473",
"0.45070025",
"0.45042813",
"0.4502401",
"0.45014098",
"0.4499757",
"0.44951597",
"0.4491617",
"0.44812137",
"0.44713345",
"0.44671583",
"0.44618067",
"0.44605285",
"0.44579586",
"0.44572255",
"0.4438553",
"0.4436095",
"0.4424718",
"0.44246212",
"0.4422631",
"0.44129798",
"0.44067544",
"0.44009933",
"0.43945137",
"0.43924776",
"0.4385871",
"0.43849665",
"0.43782228",
"0.4377744",
"0.43766534",
"0.43725443",
"0.43699938",
"0.43693298",
"0.43691802",
"0.43671852",
"0.43638575",
"0.43609723",
"0.4357939",
"0.43576214",
"0.4350888"
] | 0.8262206 | 0 |
Gets the list of decoded messages from the extended_error. | def _get_extended_error(self, extended_error):
return self._render_extended_error_message_list(extended_error) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _render_extended_error_message_list(self, extended_error):\n messages = []\n if isinstance(extended_error, dict):\n if ('Type' in extended_error and\n extended_error['Type'].startswith('ExtendedError.')):\n for msg in extended_error['Messages']:\n message_id = msg['MessageID']\n x = message_id.split('.')\n registry = x[0]\n msgkey = x[len(x) - 1]\n\n # if the correct message registry is loaded,\n # do string resolution\n if (registry in self.message_registries and msgkey in\n self.message_registries[registry]['Messages']):\n rmsgs = self.message_registries[registry]['Messages']\n msg_dict = rmsgs[msgkey]\n msg_str = message_id + ': ' + msg_dict['Message']\n\n for argn in range(0, msg_dict['NumberOfArgs']):\n subst = '%' + str(argn+1)\n m = str(msg['MessageArgs'][argn])\n msg_str = msg_str.replace(subst, m)\n\n if ('Resolution' in msg_dict and\n msg_dict['Resolution'] != 'None'):\n msg_str += ' ' + msg_dict['Resolution']\n\n messages.append(msg_str)\n else:\n # no message registry, simply return the msg object\n # in string form\n messages.append(str(message_id))\n\n return messages",
"def retrieve_error_messages(self):\n return self.errors_seen[:]",
"def _get_errors(exc):\n if hasattr(exc, 'message'):\n errors = exc.messages\n else:\n errors = [str(exc)]\n return errors",
"def get_error(self) -> List[str]:\n return []",
"def get_error(self) -> List[str]:\n return []",
"def get_messages(self):\r\n return self.messages",
"def get_messages(self):\n\t\tcontents = self.archive.read_file('replay.message.events')\n\t\treturn self.protocol.decode_replay_message_events(contents)",
"def get_errors(response):\n errors = response.get(\"error\")\n if errors:\n return [e.get(\"message\") for e in errors]\n return None",
"def messages(self):\n return list(iter(self))",
"def error(self) -> list:\n return self.__err",
"def get_encoding_errors(self):\n return self._encoding_errors",
"def _get_resp_body_errors(self):\n\n if self._resp_body_errors and len(self._resp_body_errors) > 0:\n return self._resp_body_errors\n\n errors = []\n warnings = []\n resp_codes = []\n\n if self.verb is None:\n return errors\n\n dom = self.response.dom()\n if dom is None:\n return errors\n\n for e in dom.findall('Errors'):\n eSeverity = None\n eClass = None\n eShortMsg = None\n eLongMsg = None\n eCode = None\n\n try:\n eSeverity = e.findall('SeverityCode')[0].text\n except IndexError:\n pass\n\n try:\n eClass = e.findall('ErrorClassification')[0].text\n except IndexError:\n pass\n\n try:\n eCode = e.findall('ErrorCode')[0].text\n except IndexError:\n pass\n\n try:\n eShortMsg = smart_encode(e.findall('ShortMessage')[0].text)\n except IndexError:\n pass\n\n try:\n eLongMsg = smart_encode(e.findall('LongMessage')[0].text)\n except IndexError:\n pass\n\n try:\n eCode = e.findall('ErrorCode')[0].text\n if int(eCode) not in resp_codes:\n resp_codes.append(int(eCode))\n except IndexError:\n pass\n\n msg = str(\"Class: {eClass}, Severity: {severity}, Code: {code}, {shortMsg} {longMsg}\") \\\n .format(eClass=eClass, severity=eSeverity, code=eCode, shortMsg=eShortMsg,\n longMsg=eLongMsg)\n\n # from IPython import embed; embed()\n\n if eSeverity == 'Warning':\n warnings.append(msg)\n else:\n errors.append(msg)\n\n self._resp_body_warnings = warnings\n self._resp_body_errors = errors\n self._resp_codes = resp_codes\n\n if self.config.get('warnings') and len(warnings) > 0:\n log.warn(\"{verb}: {message}\\n\\n\".format(\n verb=self.verb, message=\"\\n\".join(warnings)))\n\n if self.response.reply.Ack == 'Failure':\n if self.config.get('errors'):\n log.error(\"{verb}: {message}\\n\\n\".format(\n verb=self.verb, message=\"\\n\".join(errors)))\n\n return errors\n\n return []",
"def messages(self):\n return self._messages",
"def messages(self):\n return self._messages",
"def messages(self):\n return self._messages",
"def get_response(self):\n return self.messages",
"def get_messages(self):\n data = self.socket.recv(BUF_SIZE).decode()\n return data.split('\\0')",
"def getMessages(self):\n raise NotImplementedError(\"Child class must implement this\")",
"def getErrorsList(self):\n return self.__errors",
"def get_errors(self):\n return [result for result in self.values() if result.outcome == Result.ERROR]",
"def msgs_from_bytes(self, b):\n msgs = []\n # User remainder bytes\n parse_bytes = self.remainder + b.decode('ascii')\n # Find the first frame delimiter\n i = parse_bytes.find('\\r\\n')\n while i >= 0:\n # Try to parse a single message\n m = self._parse_msg(parse_bytes[:i])\n # Remove parsed bytes and delimter\n parse_bytes = parse_bytes[i+2:]\n # Add parsed message, if any\n if m:\n msgs.append(m)\n self.logger.debug('Parsed ASCII frame: address={}, function={}, len={}'.format(m.address, m.function, len(m.data) if m.data else 0))\n #else - warn?\n i = parse_bytes.find('\\r\\n')\n # Store any remaining bytes for the next pass\n self.remainder = parse_bytes\n return msgs",
"def validation_errors_to_error_messages(validation_errors):\n errorMessages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n errorMessages.append(f\"{field} : {error}\")\n return errorMessages",
"def validation_errors_to_error_messages(validation_errors):\n errorMessages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n errorMessages.append(f\"{field} : {error}\")\n return errorMessages",
"def validation_errors_to_error_messages(validation_errors):\n errorMessages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n errorMessages.append(f\"{field} : {error}\")\n return errorMessages",
"def validation_errors_to_error_messages(validation_errors):\n errorMessages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n errorMessages.append(f\"{field} : {error}\")\n return errorMessages",
"def validation_errors_to_error_messages(validation_errors):\n errorMessages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n errorMessages.append(f\"{field} : {error}\")\n return errorMessages",
"def getErrors(self) -> java.util.Collection:\n ...",
"def __msgtolist(self) -> List[str]:\n return self.msg.splitlines()",
"def get_received_messages(self):\n return self.received_messages",
"def validation_errors_to_error_messages(validation_errors):\n error_messages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n error_messages.append(f\"{field}: {error}\")\n return error_messages",
"def message_lines(self):\n\n return [bapi_message_to_str(bapiret) for bapiret in self._bapirettab]",
"def get_status_messages(self):\n return self.data[\"allMessagesForFrontend\"][\"messages\"]",
"def parsed_error_msg(self):\r\n return self.error_msg",
"def validation_errors_to_error_messages(validation_errors):\n errorMessages = []\n for field in validation_errors:\n for error in validation_errors[field]:\n errorMessages.append(f'{field} : {error}')\n return errorMessages",
"def get_messages(response_dict):\n if 'messages' in response_dict:\n return response_dict.get('messages')\n else:\n return response_dict.get('message')",
"def get_error_messages(self):\n\n if len(self._sensor_results_list) == 0:\n return\n\n error_msgs = []\n\n for reading in self._sensor_results_list:\n if reading.is_error():\n error_msgs.append(reading.get_error_msg())\n\n if len(error_msgs) > 0:\n return error_msgs\n else:\n return \"No Error Readings\"",
"def errors(self) -> List[Error]:",
"def GetAll(self):\n return self._errors.copy()",
"def json(self):\n d = [err.json for err in self.errors]\n return d",
"def filter_validation_errors(errors):\n error_messages = []\n for field, msgs in errors.items():\n if isinstance(msgs, dict):\n for f, m in msgs.items():\n error_messages.append(dict(\n field=f,\n message=m,\n code=error_codes['validation_error'],\n ))\n else:\n error_messages.append(dict(\n field=field,\n message=msgs,\n code=error_codes['validation_error'],\n ))\n return error_messages",
"def getPostedFormUrlEncodedMessages(cls):\n with cls.messageLock:\n postedFormUrlEncodedMessages = cls.postedFormUrlEncodedMessages\n cls.postedFormUrlEncodedMessages = []\n return postedFormUrlEncodedMessages",
"def _get_resends(self):\n if not self.has_error():\n return []\n\n errors = []\n i = 0\n for item in self.my_json['results']:\n if item.has_key('error') and item['error'] == 'Unavailable':\n errors.append((i, item['error']))\n i += 1\n return errors",
"def extract_messages(self,msg_list):\n msgs = []\n for m in msg_list:\n msgs.append(json.loads(str(m)))\n return msgs",
"def list_messages(self):",
"def getErrors(self):\n return self.errors",
"def get_validation_errors(self):\n return [err.to_dict() for err in self._schema.validator.validation_errors]",
"def errors(self) -> Tuple[MqexsErrorInfo, ...]:\n return self.__errors",
"def report_transaction_error_messages(self):\n response = self.__get_transaction_response()\n\n # get response data from response object\n response_data = response.json()\n\n # get error messages\n response_error = response_data['Error']\n response_error_messages = response_error['messages']\n\n # add all error messages to the report\n error_messages_to_report = []\n for response_error_message in response_error_messages:\n error_description = response_error_message['description']\n error_messages_to_report.append(error_description)\n\n return error_messages_to_report",
"def Get(self, key):\n if not key:\n key = ERRORKEY_SYSTEM_DEFAULTKEYS[0]\n messages = self._errors.get(key)\n if messages:\n return list(messages)\n return None",
"def error_messages(self) -> List[str]:\n spatial_msgs = []\n temporal_msgs = []\n if self.spatial:\n spatial_msgs = [m for v, m in self.spatial_validations if not v(self.spatial)]\n if self.temporal:\n temporal_msgs = [m for v, m in self.temporal_validations if not v(self.temporal)]\n\n return spatial_msgs + temporal_msgs",
"def get_messages(self):\n return self.messages_received",
"def get_messages(self):\n return self.messages_received",
"def get_messages(self):\n return self.messages_received",
"def errors(self) -> List[Error]:\n return self._errors_files + list(self._errors.values())",
"def parsed_error_msg(self):\r\n # Translates the category names and messages into something more human readable\r\n message_dict = {\r\n (\"photoIdReasons\", \"Not provided\"): _(\"No photo ID was provided.\"),\r\n (\"photoIdReasons\", \"Text not clear\"): _(\"We couldn't read your name from your photo ID image.\"),\r\n (\"generalReasons\", \"Name mismatch\"): _(\"The name associated with your account and the name on your ID do not match.\"),\r\n (\"userPhotoReasons\", \"Image not clear\"): _(\"The image of your face was not clear.\"),\r\n (\"userPhotoReasons\", \"Face out of view\"): _(\"Your face was not visible in your self-photo\"),\r\n }\r\n\r\n try:\r\n msg_json = json.loads(self.error_msg)\r\n msg_dict = msg_json[0]\r\n\r\n msg = []\r\n for category in msg_dict:\r\n # find the messages associated with this category\r\n category_msgs = msg_dict[category]\r\n for category_msg in category_msgs:\r\n msg.append(message_dict[(category, category_msg)])\r\n return u\", \".join(msg)\r\n except (ValueError, KeyError):\r\n # if we can't parse the message as JSON or the category doesn't\r\n # match one of our known categories, show a generic error\r\n log.error('PhotoVerification: Error parsing this error message: %s', self.error_msg)\r\n return _(\"There was an error verifying your ID photos.\")",
"def get_messages(self, max_messages):\n raw = self.redis_client.lrange(self.message_list, 0, max_messages)\n messages = (m for m in raw if m != b\"null\")\n messages = (m.decode(\"utf-8\") for m in messages)\n yield from map(json.loads, messages)",
"def getParseErrors(self):\n return [x for x in self.xeps if x.parseErrors]",
"def get_messages_body(self):\n msgs_body = []\n if not self.messages:\n u_print(\" Queue.get_messages_body() ERR - There is no messages or malformed messages on queue. \")\n u_print(json.dumps(self.messages, indent=4))\n sys.exit(1)\n\n try:\n for m in self.messages:\n msgs_body.append(m.body)\n except:\n raise\n\n return msgs_body",
"def failed_messages(self, namespace, queue):\n failed = []\n for m in self.messages(namespace, queue):\n if m.error:\n failed.append(m)\n return failed",
"def error(self):\n errors = self._info.get('error', {}).get('errors')\n if not errors:\n return None\n return ' '.join(err.get('message', 'unknown') for err in errors)",
"def Errors(self):\r\n\t\treturn self._get_attribute('errors')",
"def filter_draft_errors(result):\n error_messages = []\n for field, msgs in result.get('messages', {}).items():\n if msgs.get('state', None) == 'error':\n for m in msgs['messages']:\n error_messages.append(dict(\n field=field,\n message=m,\n code=error_codes['validation_error'],\n ))\n return error_messages",
"def errors(self):\n return self._properties.get(\"errors\")",
"def get_messages(self):\n return [MinimalMessage(self, message) for message in self.messages]",
"def errors(self):\n return self._errors",
"def get_messages(self):\n res = self.conn.cursor().execute(\"SELECT * FROM messages\")\n return res.fetchall()",
"def get_errors(self, path: str,\n is_ancillary: bool = False,\n is_system: bool = False,\n is_removed: bool = False) -> List[str]:\n u_file = self.__api.files.get(path, is_ancillary=is_ancillary,\n is_system=is_system,\n is_removed=is_removed)\n return [e.message for e in u_file.errors]",
"def errors(self):\n return self.__errors",
"def extract_items(self, msg):\n # TODO: Create an entity extraction nlp pipeline for this\n items = re.findall('\"[\\w\\s,?!\\-]*\"|\\'[\\w\\s,?!\\-]*\\'', msg)\n return [item[1:-1] for item in items]",
"def decomptcperrmoredata(self) :\n\t\ttry :\n\t\t\treturn self._decomptcperrmoredata\n\t\texcept Exception as e:\n\t\t\traise e",
"def errors (self):\n return self._errors",
"def errors (self):\n return self._errors",
"def get_status_messages(self):\n\n try:\n subContext = conf.EHST_MESSAGES\n connHandler = self._tap._TapPlus__getconnhandler()\n response = connHandler.execute_tapget(subContext, verbose=False)\n if response.status == 200:\n for line in response:\n string_message = line.decode(\"utf-8\")\n print(string_message[string_message.index('=') + 1:])\n except OSError:\n print(\"Status messages could not be retrieved\")",
"def get_append_messages(self):\n\t\treturn self._appendMessages",
"def messages(self) -> dict:\n raise NotImplementedError",
"def get_messages(self):\n return self.addresses",
"def decode_message(self, raw):\n return raw.decode('utf-8')",
"def get_unread_messages(self):\n self.chat.click()\n loaded_messages = self.__get_loaded_messages()\n for message in loaded_messages:\n try:\n if message.get_attribute(\"class\") == \"XFAMv focusable-list-item\":\n unread_index = loaded_messages.index(message)\n return loaded_messages[unread_index + 1:]\n except:\n continue\n return []",
"def error_details(self):\n return self._error_details",
"def fetchLogs(self):\n return [record.msg for record in self.handler.buffer]",
"def all_received_messages(self):\n request = {'token': self.token, 'include_read': True}\n return Session.send_request('messages', request, Session.FULL_RESPONSE_OR_NONE)",
"def detailed_error_messages(self) -> Optional['outputs.EnabledConfigResponse']:\n return pulumi.get(self, \"detailed_error_messages\")",
"def recorded_messages(self):\n messages = []\n for time in sorted(self.reception_records):\n messages.extend(self.reception_records[time])\n return messages",
"def messages(self):\n return MessageNotification.messages",
"def _messages(self):\n q = [json.loads(i)['message'] for i in self.client.kv.get(\n 'rhumba.q.testqueue', [])]\n return q",
"def error_data(self):\n\n if not self.__settings:\n return []\n\n return self.__transaction_errors",
"def get_messages(email: str) -> list:\n\n params = {\"action\": \"getMessages\", \"login\": email, \"domain\": domain}\n\n response = requests.get(endpoint, data=params)\n email_data = json.load(response)\n\n for email_message in email_data:\n for field, value in email_message.items():\n print(f\"{field}: {value}\")\n print()",
"def Errors(self):\n return self._get_attribute('errors')",
"def get_msgs(self):\n msgs = []\n while True:\n try:\n msgs.append(self.get_msg(block=False))\n except Empty:\n break\n return msgs",
"def get_msgs(self):\n msgs = []\n while True:\n try:\n msgs.append(self.get_msg(block=False))\n except Empty:\n break\n return msgs",
"def get_wtf_errors(self, wtf_errors):\n\t\tmessages = []\n\t\tmessages.append('<ol class=\"wtf-errors\">')\n\t\tfor field, errors in wtf_errors.iteritems():\n\t\t\tmessages.append(\"<li>\"+field+\": <br />\")\n\t\t\tfor error in errors:\n\t\t\t\tmessages.append(\"— \"+error+ \"<br />\")\n\t\t\tmessages.append(\"</li>\")\n\t\tmessages.append(\"</ol>\")\n\t\treturn \"\".join(messages)",
"def doDecode(self):\n raise CipherError(\"override this funct and return the decoded msg\")",
"def get_error_message(self):\n return self.error_message.get_error_message()",
"def get_messages(\n event: Dict[str, Any]\n ) -> List[Dict[str, Any]]:\n reply_message = event.get(\"reply_message\", {})\n return [reply_message] if reply_message else event.get(\"fwd_messages\", [])",
"def list(self, query_params=None, **kwargs):\n # type: (WebhookListQueryParams, dict) -> Webhook\n\n return self.api_client.get(\n '/notifications/webhooks/encoding/encodings/error',\n query_params=query_params,\n pagination_response=True,\n type=Webhook,\n **kwargs\n )",
"def _serialize_event_messages(event):\n if event.content_type == MessagingEvent.CONTENT_EMAIL:\n return _get_messages_for_email(event)\n\n if event.content_type in (MessagingEvent.CONTENT_SMS, MessagingEvent.CONTENT_SMS_CALLBACK):\n return _get_messages_for_sms(event)\n\n if event.content_type in (MessagingEvent.CONTENT_SMS_SURVEY, MessagingEvent.CONTENT_IVR_SURVEY):\n return _get_messages_for_survey(event)\n return []",
"def decode_message(self, key):\n\n decoded_message = ''\n for char in self.message:\n if char.isalpha():\n decoded_char = self.convert_char(char, key)\n decoded_message = decoded_message + decoded_char\n else:\n decoded_message = decoded_message + char\n return decoded_message",
"def analysis_errors(self) -> str:\n errors = []\n\n # Get any callback errors\n for cid, callback in self._analysis_callbacks.items():\n if callback.status == AnalysisStatus.ERROR:\n errors.append(f\"\\n[Analysis Callback ID: {cid}]: {callback.error_msg}\")\n\n return \"\".join(errors)",
"def decomptcperrdata(self) :\n\t\ttry :\n\t\t\treturn self._decomptcperrdata\n\t\texcept Exception as e:\n\t\t\traise e",
"def Errcheck(self) -> list:\n\n myError = []\n\n ErrorList = self.myFieldFox.query(\"SYST:ERR?\").split(',')\n\n Error = ErrorList[0]\n\n if int(Error) == 0:\n\n print (\"+0, No Error!\")\n\n else:\n\n while int(Error)!=0:\n\n print (\"Error #: \" + ErrorList[0])\n\n print (\"Error Description: \" + ErrorList[1])\n\n myError.append(ErrorList[0])\n\n myError.append(ErrorList[1])\n\n ErrorList = self.myFieldFox.query(\"SYST:ERR?\").split(',')\n\n Error = ErrorList[0]\n\n myError = list(myError)\n\n return myError"
] | [
"0.780336",
"0.66833067",
"0.6297517",
"0.62860835",
"0.62860835",
"0.60984194",
"0.6095849",
"0.60497504",
"0.60270995",
"0.59741163",
"0.59682614",
"0.59034014",
"0.58919424",
"0.58919424",
"0.58919424",
"0.58700424",
"0.5852855",
"0.57940054",
"0.5786559",
"0.57755375",
"0.5728941",
"0.5728451",
"0.5728451",
"0.5728451",
"0.5728451",
"0.5728451",
"0.5719009",
"0.57163906",
"0.57159895",
"0.5709386",
"0.56845653",
"0.5683327",
"0.56801224",
"0.56668764",
"0.56502336",
"0.5646901",
"0.564018",
"0.56388986",
"0.56226563",
"0.5620739",
"0.5585641",
"0.55702883",
"0.5570069",
"0.5541688",
"0.55383986",
"0.5530914",
"0.553024",
"0.5524061",
"0.5517465",
"0.5512295",
"0.55046266",
"0.55046266",
"0.55046266",
"0.5501282",
"0.54830265",
"0.5467727",
"0.5462352",
"0.54605794",
"0.54543257",
"0.5430574",
"0.5427706",
"0.5424368",
"0.5418874",
"0.5412252",
"0.537991",
"0.5368106",
"0.5363751",
"0.53577125",
"0.53571653",
"0.5349797",
"0.5349395",
"0.5349395",
"0.5340125",
"0.5330756",
"0.5326219",
"0.53240037",
"0.53096825",
"0.529735",
"0.52937436",
"0.52934897",
"0.52810955",
"0.5279236",
"0.52530926",
"0.52505124",
"0.52449626",
"0.52434456",
"0.52422965",
"0.5241523",
"0.5241142",
"0.5241142",
"0.52405095",
"0.5239984",
"0.52275753",
"0.52209604",
"0.5217707",
"0.5216032",
"0.52051204",
"0.5188696",
"0.51715976",
"0.51693666"
] | 0.7126763 | 1 |
Get the system details. | def _get_host_details(self):
# Assuming only one system present as part of collection,
# as we are dealing with iLO's here.
status, headers, system = self._rest_get('/rest/v1/Systems/1')
if status < 300:
stype = self._get_type(system)
if stype not in ['ComputerSystem.0', 'ComputerSystem.1']:
msg = "%s is not a valid system type " % stype
raise exception.IloError(msg)
else:
msg = self._get_extended_error(system)
raise exception.IloError(msg)
return system | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def get_system_info(self) -> Dict[str, Any]:\n assert self._client is not None\n return await self._client.invoke_method(\"system.info\")",
"def get_system_info(self):\r\n method = self.public_endpoints['system_info']['method']\r\n url = self.base_url + self.public_endpoints['system_info']['url']\r\n req = requests.request(method, url)\r\n res = req.json()\r\n\r\n if res['success'] == True:\r\n return res[\"result\"]\r\n else:\r\n return res",
"def system_info(self, system_id):\n\n\t\tpath = f'{self.BIKE_ENDPOINT}system/{system_id}/{self.secret_key}'\n\t\tresponse = requests.get(path).json()\n\t\tself.check_api_key(response)\n\n\t\treturn response",
"def get_system_info():\n query = {\"type\": \"op\", \"cmd\": \"<show><system><info></info></system></show>\"}\n\n return __proxy__[\"panos.call\"](query)",
"def get_system_information(self):\n\t\tsys = platform.uname()\n\t\treturn {\n\t\t\t'hostname': sys.node,\n\t\t\t'operating_system': sys.system,\n\t\t\t'version': sys.version,\n\t\t\t'release': sys.release,\n\t\t\t'processor' : sys.processor,\n\t\t\t'processor_type': sys.machine,\n\t\t}",
"def system(self):\n return self['system']",
"def getSysinfo(self, request):\r\n return self._ref.callRemote('getSysinfo')",
"def test_get_system(self):\n pass",
"def get_system_info() -> SystemInfo:\n\n assert is_windows(), 'This function is only available on Windows systems'\n\n from win32api import GetSystemInfo\n return SystemInfo(*GetSystemInfo())",
"def get(self, section=None):\n logging.info(\"GET Request for System information, section=\\\"%s\\\"\", section)\n\n system_info = get_system_info(section)\n\n return jsonify(system_info)",
"def _get_system_hardware(self):\n return self._get_system_status()[\"hardware\"]",
"def systemRead():\n return",
"async def sysinfo(self, ctx: Context):\n\t\tstart = time.perf_counter()\n\t\tend = time.perf_counter()\n\t\tduration = (end - start) * 1000\n\t\tcpuavg = psutil.cpu_percent(interval=None)\n\t\tmem = psutil.virtual_memory()[2]\n\t\tdurround = round(duration, 3)\n\t\tosun = os.uname()\n\t\tawait self.send(f\"System Info | CPU: {cpuavg}% | RAM: {mem}% | Latency: {durround * 1000}ms | OS: {sys.platform}\", whisper=[ctx.author.id])",
"async def get_system(self) -> dict[str, Any]:\n cmd = await self.send_command(\"SYSTEM\", timeout=1)\n if not cmd.succeeded():\n raise ArchonError(f\"Command finished with status {cmd.status.name!r}\")\n\n keywords = str(cmd.replies[0].reply).split()\n system = {}\n for (key, value) in map(lambda k: k.split(\"=\"), keywords):\n system[key.lower()] = value\n if match := re.match(r\"^MOD([0-9]{1,2})_TYPE\", key, re.IGNORECASE):\n name_key = f\"mod{match.groups()[0]}_name\"\n system[name_key] = ModType(int(value)).name\n\n return system",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.SystemDataResponse':\n return pulumi.get(self, \"system_data\")",
"def remote_getSysinfo(self, request):\r\n # TODO : replace these calls with call to rce.util.sysinfo\r\n response_table = {\r\n 'size':self._size,\r\n 'cpu':self._cpu,\r\n 'memory': self._memeory,\r\n 'bandwidth': self._bandwidth,\r\n # 'keyword': some value or function to provide the data\r\n }\r\n\r\n return response_table[request]",
"def system_info() -> str:\n return \"\\n\".join(\n [\n f\"Python version: {platform.python_version()}\",\n f\"Python implementation: {platform.python_implementation()}\",\n f\"Python compiler: {platform.python_compiler()}\",\n f\"PyTorch version: {torch.__version__}\",\n f\"System: {platform.system() or 'Unable to determine'}\",\n f\"System version: {platform.release() or 'Unable to determine'}\",\n f\"Processor: {platform.processor() or 'Unable to determine'}\",\n f\"Number of CPUs: {multiprocessing.cpu_count()}\",\n ]\n )",
"def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def get_system_name(self):\n\n\t\treturn self.__system_name",
"def system_info():\n requirements = get_requirements(\"sunpy\")\n groups = get_keys_list(requirements)\n extra_groups = get_extra_groups(groups, ['all', 'dev'])\n base_reqs = get_keys_list(requirements['required'])\n extra_reqs = get_keys_list(requirements['all'])\n missing_packages, installed_packages = find_dependencies(package=\"sunpy\", extras=extra_groups)\n extra_prop = {\"System\": platform.system(),\n \"Arch\": f\"{platform.architecture()[0]}, ({platform.processor()})\",\n \"Python\": platform.python_version(),\n \"sunpy\": version(\"sunpy\")}\n sys_prop = {**installed_packages, **missing_packages, **extra_prop}\n print(\"==============================\")\n print(\"sunpy Installation Information\")\n print(\"==============================\")\n print()\n print(\"General\")\n print(\"#######\")\n if sys_prop['System'] == \"Linux\":\n print(f\"OS: {distro.name()} ({distro.version()}, Linux {platform.release()})\")\n elif sys_prop['System'] == \"Darwin\":\n print(f\"OS: Mac OS {platform.mac_ver()[0]}\")\n elif sys_prop['System'] == \"Windows\":\n print(f\"OS: Windows {platform.release()} {platform.version()}\")\n else:\n print(\"Unknown OS\")\n for sys_info in ['Arch', 'sunpy']:\n print(f'{sys_info}: {sys_prop[sys_info]}')\n print(f'Installation path: {distribution(\"sunpy\")._path}')\n print()\n print(\"Required Dependencies\")\n print(\"#####################\")\n for req in base_reqs:\n print(f'{req}: {sys_prop[req]}')\n print()\n print(\"Optional Dependencies\")\n print(\"#####################\")\n for extra_req in extra_reqs:\n print(f'{extra_req}: {sys_prop[extra_req]}')",
"def _get_system_status(self):\n sysinfo_strings = self._command(self.commands[\"SYSTEM_STATUS\"])\n sysinfo_dict = {\"name\": sysinfo_strings[0]}\n for line in sysinfo_strings:\n if \":\" in line:\n key, value = line.split(\":\", 1)\n sysinfo_dict[key.lower()] = value.strip()\n\n return sysinfo_dict",
"async def get_system_info(hass, include_components):\n\n gate_id = hass.states.get('sensor.ais_secure_android_id_dom').state\n info_object = {\n 'arch': platform.machine(),\n 'dev': 'dev' in current_version,\n 'docker': False,\n 'os_name': platform.system(),\n 'python_version': platform.python_version(),\n 'timezone': dt_util.DEFAULT_TIME_ZONE.zone,\n 'version': current_version,\n 'virtualenv': os.environ.get('VIRTUAL_ENV') is not None,\n 'hassio': hass.components.hassio.is_hassio(),\n 'gate_id': gate_id,\n }\n\n if include_components:\n info_object['components'] = list(hass.config.components)\n\n if platform.system() == 'Windows':\n info_object['os_version'] = platform.win32_ver()[0]\n elif platform.system() == 'Darwin':\n info_object['os_version'] = platform.mac_ver()[0]\n elif platform.system() == 'FreeBSD':\n info_object['os_version'] = platform.release()\n elif platform.system() == 'Linux':\n import distro\n linux_dist = await hass.async_add_job(\n distro.linux_distribution, False)\n info_object['distribution'] = linux_dist[0]\n info_object['os_version'] = linux_dist[1]\n info_object['docker'] = os.path.isfile('/.dockerenv')\n\n return info_object",
"def get_systemname(self) -> str:\n\n return self.send(self.cmd.GET_SYSTEMNAME)",
"def system_data(self) -> pulumi.Output['outputs.ProxyResourceResponseSystemData']:\n return pulumi.get(self, \"system_data\")",
"def system_data(self) -> 'outputs.DataCollectionEndpointResourceResponseSystemData':\n return pulumi.get(self, \"system_data\")",
"def sys_info(self):\n\n for i in self._nodes.items():\n print(\"\\n==============================\")\n name = i[0]\n node = i[1]\n\n print(\"NODE: {}\\n\".format(name))\n\n # CPU\n print(\"CPU:\")\n self.cpu_info(node)\n\n # Grub\n print(\"\\nGrub Command Line:\")\n if \"grub\" in node:\n print(\" Current: {}\".format(node[\"grub\"][\"current_cmdline\"]))\n print(\" Configured: {}\".format(node[\"grub\"][\"default_cmdline\"]))\n\n # Huge Pages\n print(\"\\nHuge Pages:\")\n self.hugepage_info(node)\n\n # Devices\n print(\"\\nDevices:\")\n self.device_info(node)\n\n # Status\n print(\"\\nVPP Service Status:\")\n state, errors = VPPUtil.status(node)\n print(\" {}\".format(state))\n for e in errors:\n print(\" {}\".format(e))\n\n # Minimum system resources\n self.min_system_resources(node)\n\n print(\"\\n==============================\")",
"def get_version_info(self):\n sys_info_service = self.robot.all_services.get(\"sys_info\")\n if sys_info_service is not None:\n log.info(\"System version info: %s\" % sys_info_service.system_version)\n else:\n log.warning(\"Service get_version_info is not enabled!\")",
"def ex_get_hypervisor_sysinfo(self):\n xml = self.connection.getSysinfo()\n etree = ET.XML(xml)\n\n attributes = [\"bios\", \"system\", \"processor\", \"memory_device\"]\n\n sysinfo = {}\n for attribute in attributes:\n element = etree.find(attribute)\n entries = self._get_entries(element=element)\n sysinfo[attribute] = entries\n\n return sysinfo",
"def _get_system_info(target: Optional[str],\n serial_num: Optional[str]) -> Tuple[str, str]:\n\n # TODO(b/242191374): Remove when devices in swarming are no longer booted\n # into zedboot.\n if running_unattended():\n try:\n boot_device(target, BootMode.REGULAR, serial_num)\n except (subprocess.CalledProcessError, StateTransitionError):\n logging.warning('Could not boot device. Assuming in ZEDBOOT')\n return ('', '')\n wait_cmd = common.run_ffx_command(cmd=('target', 'wait', '-t', '180'),\n target_id=target,\n check=False)\n if wait_cmd.returncode != 0:\n return ('', '')\n\n return get_system_info(target)",
"def get_system_info(baseurl, cookie_header):\n url = baseurl + 'stacking/vsf/members/system_info'\n headers = {'cookie': cookie_header}\n response = requests.get(url, verify=False, headers=headers)\n if response.status_code == 200:\n return response.json()\n else:\n return response.status_code",
"def get_software_info():\n query = {\n \"type\": \"op\",\n \"cmd\": \"<request><system><software><info></info></software></system></request>\",\n }\n\n return __proxy__[\"panos.call\"](query)",
"def get(self):\n\t\treturn {\n\t\t\t'system': self.get_system_information(),\n\t\t\t'cpu': self.get_cpu_stats(),\n\t\t\t'gpu': self.get_gpu_stats(),\n\t\t\t'ram': self.get_ram_stats(),\n\t\t\t'storage': self.get_storage_stats(),\n\t\t\t'battery': self.get_battery_stats(),\n\t\t\t'temps': self.get_temperatures()\n\t\t}",
"def get_sys_info(self):\n server_provider = self.server_managers[0].get_config_value(\"provider\")\n sys_info = []\n for entry in get_network_information(self.hostlist_servers, SUPPORTED_PROVIDERS):\n if server_provider in entry.provider:\n entry.device = None\n sys_info.append(entry)\n return sys_info",
"def get_os_info(hass: HomeAssistant) -> dict[str, Any] | None:\n return hass.data.get(DATA_OS_INFO)",
"def get_info():\n global PERF_APP\n archs = None\n best_arch = None\n cipher_algos = None\n hash_algos = None\n aead_algos = None\n\n cmd = PERF_APP + ' --print-info'\n\n try:\n res = subprocess.run(cmd, stdout=subprocess.PIPE, \\\n stderr=subprocess.STDOUT, \\\n env=ENVS, shell=True, check=True)\n output = res.stdout.decode('utf-8')\n except subprocess.CalledProcessError as e:\n print(\"Error (\" + str(e.returncode) + \")\")\n print(e.output.decode('utf-8'))\n sys.exit(1)\n\n lines = output.rstrip().split('\\n')\n try:\n for line in lines:\n info = line.split(':')\n if info[0] == 'Supported architectures':\n archs = info[1].split()\n if info[0] == 'Best architecture':\n best_arch = info[1].split()\n if info[0] == 'Supported cipher algorithms':\n cipher_algos = info[1].split()\n if info[0] == 'Supported hash algorithms':\n hash_algos = info[1].split()\n if info[0] == 'Supported aead algorithms':\n aead_algos = info[1].split()\n except:\n print(\"Error parsing --print-info output:\\n\" \\\n \"{}\".format(output), file=sys.stderr)\n\n if archs is None or best_arch is None or cipher_algos is None \\\n or hash_algos is None or aead_algos is None:\n print(\"Error parsing system and app information\", file=sys.stderr)\n sys.exit(1)\n\n return archs, best_arch, cipher_algos, hash_algos, aead_algos",
"def get_system_time(self):\r\n method = self.public_endpoints['system_time']['method']\r\n url = self.base_url + self.public_endpoints['system_time']['url']\r\n req = requests.request(method, url)\r\n res = req.json()\r\n\r\n if res['success'] == True:\r\n return res[\"result\"]\r\n else:\r\n return res",
"def system(self):\n try:\n return self._system\n except AttributeError:\n raise AttributeError('You must initialize the system with '\n 'createSystem before accessing the cached '\n 'object.')",
"def get_sys_name(self):\n\t\treturn call_sdk_function('PrlVmDev_GetSysName', self.handle)",
"def local_info():\n local('uname -a')",
"def getSystemByName(self,systemName):\n\n logger.debug(\"Call to getSystemByName - systemName: {}\".format(systemName))\n try:\n\n response = self.httpHandler.sendHttpRequest(\n CIC_SYSTEM_ENDPOINT+\"?\"+\n urllib.urlencode({ \"name\": systemName }))\n\n except urllib2.HTTPError as e:\n\n logger.debug(traceback.format_exc())\n\n if e.code == 404:\n\n body = e.read()\n logger.debug(\"Response code: {}, response body: {}\".format(e.code, body))\n flag = _checkSystemNotFound(body)\n if flag == True:\n raise KeyError(\n \"System with name '{}' was not found in TMS because it does not exist, {}\".format(systemName, body),\n \"CIC_SYSTEM_NOT_FOUND_ERR\")\n else:\n raise IOError(\n \"System with name '{}' was not found in TMS because of network/communication error, {}\".format(systemName, body),\n \"CIC_SYSTEM_COMMUNICATION_NETWORK_ERR\")\n\n elif e.code == 403:\n\n body = e.read()\n logger.debug(\"Response code: {}, response body: {}\".format(e.code, body))\n raise RuntimeError(\n \"User {} has no permission to look up the specified system {} in {} {}\".format(self.cicUser,systemName, self.cicUrl, body),\n \"CIC_NO_ACCESS\"\n )\n\n else:\n raise\n else:\n responseString = response.read()\n return json.loads(responseString)",
"def system(self, language=None):\n return self._get('/systems.{language}.json', language)",
"def getInfo():",
"def computer_info():\n return {\n 'system': platform.system(),\n 'architecture': platform.architecture(),\n 'name': platform.node(),\n 'release': platform.release(),\n 'version': platform.version(),\n 'machine': platform.machine(),\n 'processor': platform.processor(),\n 'virtual CPUs': mproc.cpu_count(),\n 'total RAM': _get_ram(),\n }",
"def system_properties(self):\r\n return dict(self._get_system_properties(self.java))",
"def system_data(self) -> Optional['outputs.NotebookResourceSystemDataResponse']:\n return pulumi.get(self, \"system_data\")",
"def describe_operating_systems():\n pass",
"def get_system_state(self, path, params):\n system_summary = self._get_system_summary(path)\n overview = {\n 'peer_controller_url': self._get_peer_controller_url(),\n 'summary_sources': system_summary,\n 'site_name': self._config.get('site', {}).get('name', 'unknown'),\n 'controller_name': self._get_controller_name(),\n }\n overview.update(self._distill_summary(system_summary))\n return overview",
"def platform_info(self):\n return platform.uname()._asdict()",
"def subcmd_getsystem_main(args, parameter_info):\n \n from get_system_inventory import get_system_inventory\n result = get_system_inventory(parameter_info['ip'], parameter_info['user'], parameter_info['passwd'], parameter_info['sysid'])\n \n if result['ret'] is True:\n del result['ret']\n sys.stdout.write(json.dumps(result['entries'], sort_keys=True, indent=2))\n else:\n sys.stderr.write(result['msg'])",
"def test_get_systems(self):\n pass",
"def get_system_id(self):\n return system.SystemManagement(self.client).get_system_id()",
"def system_status(system_ip):\n\n click.secho(\"\\nRetrieving the System Status\")\n\n url = base_url + \"/device/system/status?deviceId={0}\".format(system_ip)\n\n response = requests.get(url=url, headers=header,verify=False)\n if response.status_code == 200:\n items = response.json()['data']\n else:\n print(\"Failed to get system status \" + str(response.text))\n exit()\n\n print(\"\\nSystem status for Device:\",system_ip)\n\n headers = [\"Host name\", \"Up time\", \"Version\", \"Memory Used\", \"CPU system\"]\n table = list()\n\n for item in items:\n tr = [item['vdevice-host-name'], item['uptime'], item['version'], item['mem_used'], item['cpu_system']]\n table.append(tr)\n\n try:\n click.echo(tabulate.tabulate(table, headers, tablefmt=\"fancy_grid\"))\n except UnicodeEncodeError:\n click.echo(tabulate.tabulate(table, headers, tablefmt=\"grid\"))",
"def get_status(self):\n\n return self._system",
"def getSystemByUid(self,uid):\n\n logger.debug(\"Call to getSystemByUid - uid: {}\".format(uid))\n try:\n response = self.httpHandler.sendHttpRequest(CIC_SYSTEM_ENDPOINT+\"?uuid=\"+uid)\n\n except urllib2.HTTPError as e:\n\n logger.debug(traceback.format_exc())\n\n if e.code == 404:\n\n body = e.read()\n logger.debug(\"Response code: {}, response body: {}\".format(e.code, body))\n raise KeyError(\n \"System with uid {} not found in TMS, {}\".format(uid, body),\n \"CIC_SYSTEM_UUID_NOT_FOUND_ERR\")\n\n elif e.code == 403:\n\n body = e.read()\n logger.debug(\"Response code: {}, response body: {}\".format(e.code, body))\n raise RuntimeError(\n \"User {} has no permission to look up 'systems' in {} {}\".format(self.cicUser, self.cicUrl, body),\n \"CIC_NO_ACCESS\"\n )\n\n else:\n raise\n else:\n responseString = response.read()\n return json.loads(responseString)",
"def get_info(self) -> str:\n return self.info",
"def getInfo(self, formatted=False):\n\n\t\tinfo = {}\n\t\tinfo['Python'] = \"%d.%d.%d\" %(sys.version_info[0], sys.version_info[1], sys.version_info[2])\n\t\tinfo[__binding__] = __binding_version__\n\t\tinfo['Qt'] = QtCore.qVersion()\n\t\tinfo['OS'] = platform.system()\n\t\tinfo['Environment'] = HOST\n\n\t\tif formatted:\n\t\t\tinfo_ls = []\n\t\t\tfor key, value in info.items():\n\t\t\t\tinfo_ls.append(\"{} {}\".format(key, value))\n\t\t\tinfo_str = \" | \".join(info_ls)\n\t\t\treturn info_str\n\n\t\telse:\n\t\t\treturn info",
"def info(self):\n return self.client.call('GET', self.name + 'info')",
"def remote_info():\n run('uname -a')",
"def platform_info(self):\n return self.msg.platform_info",
"def get_system_defined(self):\n\n\t\treturn self.__system_defined",
"def get_info(self):\n pass",
"def get_info(self):\n pass",
"def test_get_info(self):\n self.addCleanup(self.sdkapi.guest_delete, self.userid)\n\n self.sdkapi.guest_create(self.userid, 1, 1024, disk_list=self.disks)\n self.sdkapi.guest_deploy(self.userid, self.image_name)\n\n # get info in shutdown state\n info_off = self.sdkapi.guest_get_info(self.userid)\n self.assertEquals(info_off['power_state'], 'off')\n self.assertEquals(info_off['mem_kb'], 0)\n self.assertEquals(info_off['cpu_time_us'], 0)\n\n # get info in active state\n self.sdkapi.guest_start(self.userid)\n self.assertTrue(self.sdkutils.wait_until_guest_in_power_state(\n self.userid, 'on'))\n time.sleep(1)\n info_on = self.sdkapi.guest_get_info(self.userid)\n self.assertEquals(info_on['power_state'], 'on')\n self.assertNotEqual(info_on['cpu_time_us'], 0)\n self.assertNotEqual(info_on['mem_kb'], 0)\n\n # get info in paused state\n self.sdkapi.guest_pause(self.userid)\n info_on = self.sdkapi.guest_get_info(self.userid)\n self.assertEquals(info_on['power_state'], 'on')\n self.assertNotEqual(info_on['cpu_time_us'], 0)\n self.assertNotEqual(info_on['mem_kb'], 0)",
"def system(self):\r\n return self.runtime",
"def get_info(self) -> str:\n raise NotImplementedError()",
"def get_sys_name(self):\n\t\treturn call_sdk_function('PrlVmDevHdPart_GetSysName', self.handle)"
] | [
"0.8487843",
"0.81485045",
"0.7842302",
"0.7782143",
"0.7730701",
"0.7618187",
"0.7365681",
"0.7167398",
"0.71424323",
"0.7105484",
"0.7102612",
"0.70674133",
"0.70565844",
"0.7018115",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.7008186",
"0.6910726",
"0.6885459",
"0.68740845",
"0.68740845",
"0.68740845",
"0.68740845",
"0.68740845",
"0.68740845",
"0.68740845",
"0.68740845",
"0.68740845",
"0.68740845",
"0.68740845",
"0.68740845",
"0.6868526",
"0.6855317",
"0.68448824",
"0.6843005",
"0.68334633",
"0.6808339",
"0.6715575",
"0.66561955",
"0.6624723",
"0.6610222",
"0.6603702",
"0.6547785",
"0.65433776",
"0.6454325",
"0.64260226",
"0.64138365",
"0.640924",
"0.63955796",
"0.6385284",
"0.6385128",
"0.63813883",
"0.6371882",
"0.63599956",
"0.6356647",
"0.6341425",
"0.6327854",
"0.63105905",
"0.63105834",
"0.6310052",
"0.6307327",
"0.6253572",
"0.6231224",
"0.6230016",
"0.62270635",
"0.6224757",
"0.6177614",
"0.6176911",
"0.6154047",
"0.6143645",
"0.6133926",
"0.6117186",
"0.6113652",
"0.61065954",
"0.61065954",
"0.61026424",
"0.60988575",
"0.60938174",
"0.60825425"
] | 0.7909569 | 2 |
Check if the bios resource exists. | def _check_bios_resource(self, properties=[]):
system = self._get_host_details()
if ('links' in system['Oem']['Hp'] and
'BIOS' in system['Oem']['Hp']['links']):
# Get the BIOS URI and Settings
bios_uri = system['Oem']['Hp']['links']['BIOS']['href']
status, headers, bios_settings = self._rest_get(bios_uri)
if status >= 300:
msg = self._get_extended_error(bios_settings)
raise exception.IloError(msg)
# If property is not None, check if the bios_property is supported
for property in properties:
if property not in bios_settings:
# not supported on this platform
msg = ('BIOS Property "' + property + '" is not'
' supported on this system.')
raise exception.IloCommandNotSupportedError(msg)
return headers, bios_uri, bios_settings
else:
msg = ('"links/BIOS" section in ComputerSystem/Oem/Hp'
' does not exist')
raise exception.IloCommandNotSupportedError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def does_resource_exist(resource):\n try:\n resource.load()\n return True\n except botocore.exceptions.ClientError as e:\n if e.response['Error']['Code'] == 'ValidationError':\n return False\n else:\n raise e",
"def ResourceExists(self, name):\n pass",
"def ResourceExists(resource_name, search_user_paths=True):\n try:\n ResourcePath(resource_name, search_user_paths)\n return True\n except ResourceNotFound:\n return False",
"def has(resname):\n # check HOME directory first\n if os.path.exists(os.path.join(os.getenv(\"HOME\"), \".aphla\", resname)):\n return True\n else:\n return resource_exists(__name__, resname)",
"def test_check_resource(self):\n s1 = System()\n b1 = Books(\"1984\", \"George Orwell\", \"Harvill Secker\", \"1949\", \"0123456789123\")\n self.assertEqual(s1.check_resource(b1), False)\n s1.add_resource(b1)\n self.assertEqual(s1.check_resource(b1), True)",
"def resource_exists(self, resource):\n products = Product.select(self.env, where={'name' : resource.id})\n return bool(products)",
"def check_exist(self):\n helper.RbdImageOperator._check_rbd_image(self.real_path)",
"def exists(self):\r\n return os.path.exists(self.full_path)",
"def exists(self):\n return _os.path.exists(self.__str__())",
"def object_exists(self, fname):\n return True",
"def has_efi():\n return os.path.exists(\"/sys/firmware/efi\")",
"def is_file_exists(self):\n pass",
"def object_exists(self, fname):\n return False",
"def check_if_sound_card_exists():\n\n try:\n snd_cards = run_shell_cmd('cat /proc/asound/cards')\n except Exception:\n return False\n return not 'no soundcards' in '\\n'.join(snd_cards)",
"def test_resource_exists(self):\r\n\t\tself.assertTrue(self._configuration_.resources().has_key(\"AddWordTaskRepeat\") and self._configuration_.resources().has_key(\"RemoveWordTaskRepeat\"))",
"def exists(self):\n return True",
"def exists(self):\n return True",
"def check_file_exist(self):\n return False",
"def object_exists(self, fname):\n return self.object_exists",
"def object_exists(self, name: str):\n file_path = self.__get_file_path(name)\n return os.path.exists(file_path)",
"def has_resources(self):\r\n return hasattr(self, 'resources') and self.resources",
"def exists(self):\n # TODO: What about broken sym-links?\n return os.path.exists(self.path)",
"def exists(self):\n return self.path.exists()",
"def file_exists(self):\r\n if os.path.exists(self.file_path):\r\n return True\r\n else:\r\n return False",
"def check_resource_exists(test_cma_creds, test_rma_url, path):\n creds = test_cma_creds.split(\":\")\n response = requests.get(\n f\"{test_rma_url}/{path}\",\n headers={\"Accept\": \"application/json\"},\n auth=HTTPDigestAuth(creds[0], creds[1]),\n )\n return response.status_code == 200",
"def exists(self) -> bool:\n return self._file_exists()",
"def exist(self):\n return self.file_path.exists()",
"def exists(self):\n\n return os.path.exists(self[\"~filename\"])",
"def resource_exists(uri: Optional[str]) -> bool:\n\n if uri is None:\n return True\n\n # TODO Replace after finding way to pass custom fs through FireO validator\n if uri.startswith(\"gs://\"):\n return True\n\n else:\n # Get file system\n fs, uri = url_to_fs(uri)\n\n # Check exists\n if fs.exists(uri):\n return True\n\n return False",
"def do_status() -> None:\n asset_dir = pycozmo.util.get_cozmo_asset_dir()\n if os.path.exists(asset_dir / \"resources.txt\"):\n print(f\"Resources found in {asset_dir}\")\n else:\n print(f\"Resources NOT found in {asset_dir}\")\n sys.exit(1)",
"def exists(self):\r\n return bool(self.bucket.lookup(self.name))",
"def exists(self):\n f = os.path.join(pth, '..', 'static/data', self.filename)\n return os.path.isfile(f)",
"def exists_adv(path):\n # TODO: use selenium\n r = requests.head(path)\n # print(r.status_code)\n return r.status_code == requests.codes.ok",
"def hazard_exists(self):\n # Perform other checks to make sure hazard file is complete\n return os.path.exists(self.hazard_path)",
"def file_exists(self):\n if os.path.isfile(self.file_name):\n return True\n else:\n return False",
"def sox_check_is_available(self):\n result = self._process_command('sox -h', PIPE, supress_dry_run=True)\n return result[0] == 0",
"def exists():\n\treturn os.path.exists('data/strmr.db')",
"def Exists(self, path: str) -> bool:\n ...",
"def checkExist(self,fname,status):\n\n if (self.status == \"r\"):\n # Checks to see if it exists for reading\n # Which means it must be present\n\n if (not (os.path.exists(self.fname))):\n print(f\"Couldn't open input file: {self.fname}\")\n return False\n else:\n # Check to see if exists for reading\n # (i.e. must not exist)\n if (os.path.exists(self.fname)):\n print(f\"File {self.fname} already exists.\")\n return False\n\n return True",
"def checkExist(self,fname,status):\n\n if (self.status == \"r\"):\n # Checks to see if it exists for reading\n # Which means it must be present\n\n if (not (os.path.exists(self.fname))):\n print(f\"Couldn't open input file: {self.fname}\")\n return False\n else:\n # Check to see if exists for reading\n # (i.e. must not exist)\n if (os.path.exists(self.fname)):\n print(f\"File {self.fname} already exists.\")\n return False\n\n return True",
"def exists(self):\n\n return os.path.exists(self.path)",
"def file_exist() -> bool:\n pass",
"def is_resource(self, path):\n # type: (Text) -> bool\n raise FileNotFoundError",
"def exists(path):\n return get_instance(path).exists(path)",
"def file_exists(self, path):\n ref = self._parse_path(path)\n self.log.warn(\"looking up whether a narrative exists\")\n try:\n self.log.warn(\"trying to get narrative {}\".format(ref))\n return self.narrative_exists(ref)\n except WorkspaceError as err:\n self.log.warn(\n \"Error while testing narrative existence: {}\".format(str(err))\n )\n if err.http_code == 403:\n raise HTTPError(\n 403,\n \"You do not have permission to view the narrative with id {}\".format(\n path\n ),\n )\n raise HTTPError(\n err.http_code,\n \"An error occurred while trying to find the Narrative with id {}\".format(\n path\n ),\n )",
"def exists():\n check50.include(\"data\")\n check50.exists(\"adventure.py\")\n check50.exists(\"room.py\")",
"def __check_exists(name, path, fatal=True):\n if not os.path.exists(path):\n if fatal:\n raise SystemExit(\"%s '%s' does not exist\" % (name, path))\n return False\n return True",
"def has_resources(self) -> Optional[bool]:\n return pulumi.get(self, \"has_resources\")",
"def exists(self):\n return os.path.exists(self.sensorpath)",
"def exist(self):",
"def test_exists_mol_res_spin_data(self):\n\n # This should be True.\n self.failUnless(mol_res_spin.exists_mol_res_spin_data())",
"def exists(self):\n log.warning('Could not determine whether %s exists due to unhandled scheme.', self.file_name)",
"def exists(self) -> bool:\n try:\n result = self.get()\n except KeyError:\n return False\n return True",
"def _bucket_exists(self):\n try:\n self.resource.meta.client.head_bucket(Bucket=self.bucketname)\n except botocore.exceptions.ClientError as error:\n # If a client error is thrown, then check that it was a 404 error.\n # If it was a 404 error, then the bucket does not exist.\n error_code = int(error.response['Error']['Code'])\n if error_code == 404:\n raise LookupError(\"Bucket '%s' does not exist\", self.bucketname)\n else:\n # maybe a permissions issue\n raise error\n return True",
"def exists(path):\r\n path = encode(path)\r\n if path.lower().startswith(\"smb://\"):\r\n try:\r\n return samba.file_exists(os.path.basename(path), os.path.dirname(path)) or \\\r\n samba.folder_exists(os.path.basename(path), os.path.dirname(path))\r\n except gaierror:\r\n logger.info(\"deportesalacarta.core.filetools exists: No es posible conectar con la ruta\")\r\n platformtools.dialog_notification(\"No es posible conectar con la ruta\", path)\r\n return True\r\n else:\r\n return os.path.exists(path)",
"def exists(self, obj):\n return False",
"def exists(path):\n r = requests.head(path)\n # print(r.status_code)\n return r.status_code == requests.codes.ok",
"def exists(self, path: str) -> bool:\n pass",
"def exists(self, path):",
"def check_exists(name):\n if arcpy.Exists(name):\n arcpy.Delete_management(name)\n return",
"def exists (self, uuid):\n return self.read (uuid) is not None",
"def path_exists(path):\r\n return os.path.exists(path)",
"def exists(self):\n return os.path.exists(self.key_file)",
"def is_book_exist(self, book_info):\n for type, link in book_info.links.items():\n try:\n bookfile = BookFile.objects.get( link_hash = md5(link).hexdigest() )\n books = bookfile.book_set.all()\n if books:\n return True, books[0]\n except BookFile.DoesNotExist:\n continue\n try:\n book = Book.objects.get(author__name=book_info.authors, title=book_info.title)\n return True, book\n except Book.DoesNotExist:\n continue\n return False, None",
"def is_present(self):\n try:\n self.read_binary(0, 2)\n return True\n except:\n return False",
"def is_exists(self):\n\n return os.path.isfile(os.path.join(self.scripts_dir, self.python_name))",
"def file_exists(self):\n return os.path.exists(self._fileName)",
"def file_exists(filename: str):\n if osp.exists(filename) is True:\n return True\n else:\n return False",
"def file_exists(path):\n return os.path.exists(path)",
"def test_exists(self):\n self.assertTrue(os.path.exists(__file__) == self._system.exists(__file__))",
"def exists(self):\n return self.pod.file_exists(self.source_pod_path)",
"def _check(self):\n\t\tif not self._raven:\n\t\t\traise NoDeviceFoundException",
"def isExist(data):\n return True/False",
"def does_file_exist(self, fn):\n if True:\n print(f\"-=- {fn} found.\")\n return True\n else:\n print(f\"-!- {fn} not found. Try again\")\n return False",
"def exists(self):\r\n try:\r\n self.refresh()\r\n except:\r\n return False\r\n return True",
"def exists(path):\n return os.path.exists(path)",
"def resourceExists(self, uri):\r\n return uri in self.cache",
"def check_available():\n\n rm = current_app.config['rm_object']\n\n return rm.check_availability()",
"def exists(identifier, network):\n foo = next(load(identifier, network), None)\n return foo is not None",
"def check_binary(self):\n if shutil.which(self.binary):\n return True\n else:\n logging.warning(R+'The supplied binary or path does not exist... Exiting'+W)\n exit(1)",
"def is_loaded(self):\n return os.path.exists(IPMIService.IPMI_DEV)",
"def exists(path):\n fs.exists(path)",
"def path_exists(path):\n return os.path.exists(path)",
"def _do_check(self):\n try:\n #breakpoint()\n ApplicationsItem.objects.exists()\n #print (\"Checking\")\n return True\n\n except Exception:\n client.captureException()\n return False",
"def file_exists(filename):\n return os.path.exists(filename)",
"def FileExists(file):\n return os.path.exists(file)",
"def exists(self):\n return self.obj is not None",
"def exists(self):\n\n if self:\n pass",
"def test_exists_true(self):\n self.assertTrue(PrepTemplate.exists(1))",
"def exists(profile, name):\n result = fetch_by_name(profile, name)\n return len(result) > 0",
"def exists(profile, name):\n result = fetch_by_name(profile, name)\n return len(result) > 0",
"def exists(self):\n\t\tif self.hasUdim:\n\t\t\treturn len( self.udimPaths ) != 0\n\t\treturn super( textureFile, self ).exists",
"def exists(self):\n return self.properties.get(\"Exists\", None)",
"def entry_exists(title):\n try:\n f = default_storage.open(f\"entries/{title}.md\")\n return True\n\n except FileNotFoundError:\n return False",
"def test_exists_false(self):\n self.assertFalse(PrepSample.exists('Not_a_Sample', self.prep_template))",
"def check_if_exists(self, bookID):\n query = f\"\"\"SELECT * from {TABLE} WHERE bookID = '{bookID}';\"\"\"\n res = self.cursor.execute(query)\n\n if self.cursor.fetchall():\n return True\n else:\n return False",
"def has_item(self, usage_key):\r\n try:\r\n self._find_one(usage_key)\r\n return True\r\n except ItemNotFoundError:\r\n return False",
"def check_availability(self):\n pass",
"def exist(name: str) -> bool:\n return bool(os.path.exists(name))",
"def resource(self, pe, r, parents):\n if hasattr(r, \"data\"):\n # Resource\n offset = r.data.struct.OffsetToData\n size = r.data.struct.Size\n data = pe.get_memory_mapped_image()[offset:offset+size]\n if data.startswith(b'\\x4d\\x5a\\x90\\x00\\x03\\x00'):\n if r.name:\n name = '/'.join(parents) + '/' + str(r.name)\n else:\n name = '/'.join(parents) + '/' + str(r.id)\n print('[+] PE header in resource {}'.format(name))\n return True\n else:\n return False\n else:\n # directory\n parents = copy.copy(parents)\n suspicious = False\n if r.id is not None:\n parents.append(str(r.id))\n else:\n name = r.name.string.decode('utf-8')\n parents.append(name)\n if name in self.resource_names:\n print(\"[+] Suspicious resource name: {} -> {}\".format(\n name,\n self.resource_names[name])\n )\n suspicious = True\n for r2 in r.directory.entries:\n suspicious |= self.resource(pe, r2, parents)\n return suspicious"
] | [
"0.6995172",
"0.6845966",
"0.67246604",
"0.66229993",
"0.6481578",
"0.64715683",
"0.63962346",
"0.63410455",
"0.6323303",
"0.62668014",
"0.6262816",
"0.624901",
"0.62375194",
"0.623514",
"0.6232769",
"0.6223159",
"0.6223159",
"0.61314154",
"0.6087281",
"0.6079083",
"0.6048402",
"0.60336506",
"0.599664",
"0.59880334",
"0.59838265",
"0.59736353",
"0.5968472",
"0.5963379",
"0.5958146",
"0.59163034",
"0.59127325",
"0.59075165",
"0.5903226",
"0.58843684",
"0.58842564",
"0.587747",
"0.58753633",
"0.5874729",
"0.58619106",
"0.58619106",
"0.58605576",
"0.58596665",
"0.5857004",
"0.5854332",
"0.585163",
"0.58464545",
"0.58397615",
"0.58350253",
"0.58327883",
"0.5827763",
"0.5824287",
"0.5820292",
"0.5810773",
"0.5806442",
"0.5802444",
"0.5799859",
"0.5795692",
"0.5793153",
"0.5790091",
"0.57854015",
"0.5766028",
"0.5760882",
"0.57584053",
"0.5757267",
"0.5752605",
"0.57451236",
"0.5741399",
"0.5737686",
"0.57319427",
"0.57315147",
"0.57075083",
"0.5707135",
"0.56934744",
"0.56802356",
"0.567011",
"0.5665238",
"0.5662465",
"0.56582105",
"0.56574154",
"0.56484413",
"0.5646615",
"0.5643344",
"0.56422734",
"0.5638402",
"0.5637534",
"0.56330365",
"0.561722",
"0.5610964",
"0.5605262",
"0.5605147",
"0.5605147",
"0.5605058",
"0.5604358",
"0.5603668",
"0.5600551",
"0.5600299",
"0.55972546",
"0.5595435",
"0.55828476",
"0.55791295"
] | 0.67197114 | 3 |
Gets the PCI devices. | def _get_pci_devices(self):
system = self._get_host_details()
if ('links' in system['Oem']['Hp'] and
'PCIDevices' in system['Oem']['Hp']['links']):
# Get the PCI URI and Settings
pci_uri = system['Oem']['Hp']['links']['PCIDevices']['href']
status, headers, pci_device_list = self._rest_get(pci_uri)
if status >= 300:
msg = self._get_extended_error(pci_device_list)
raise exception.IloError(msg)
return pci_device_list
else:
msg = ('links/PCIDevices section in ComputerSystem/Oem/Hp'
' does not exist')
raise exception.IloCommandNotSupportedError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_pci_device_list(self):\n pass",
"def _get_gpu_pci_devices(self):\n pci_device_list = self._get_pci_devices()\n\n gpu_list = []\n items = pci_device_list['Items']\n for item in items:\n if item['ClassCode'] in CLASSCODE_FOR_GPU_DEVICES:\n if item['SubclassCode'] in SUBCLASSCODE_FOR_GPU_DEVICES:\n gpu_list.append(item)\n return gpu_list",
"def get_devices(self):\n\n \"\"\"\n # Note: This code is no longer required with the latest spt updates.\n # But that said, leaving for now so I don't risk breaking folks!\n if not self._use_lsscsi:\n message = \"Find Number of IOM's\"\n command = \"lsscsi | fgrep enclo | egrep 'HGST|WDC' | wc -l\"\n pdata = self._run_command(command=command, message=message, logger=self._logger, shell=True)\n ioms = (int)(pdata['stdout'].strip())\n if ioms > 1:\n self._use_lsscsi = True\n if not self._use_lsscsi and os.path.exists('/etc/multipath.conf'):\n self._use_lsscsi = True\n \"\"\"\n # Allow above logic or options to override lsscsi vs. spt usage.\n if not self._use_lsscsi or self._force_spt:\n self.get_devices_spt()\n else:\n self.get_devices_lsscsi()\n return",
"def get_devices():\n devices = []\n for device_id in range(pm.lib.Pm_CountDevices()):\n devices.append(DeviceInfo(device_id))\n\n return devices",
"def get_devices(self):\n return self.api_request('GET', self.url + '/device', {})",
"def devices(self):\n return self.enumerate_devices()",
"def get_all_devices(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_GetAllDevices', self.handle))",
"def get_devices(self):\n devices = []\n for i in self.devices:\n devices.append(self.devices[i])\n\n return devices",
"def get_devices(self):\n devices = self.get(\"event/device\")",
"def GetAllDevices(self):\n\n return list(self.YieldAllDevices())",
"def get_devices(self):\n return get_devices(self.api_key)",
"def list_devices(cls):\n # get all matching devices\n return usb.core.find(\n find_all=True,\n custom_match=lambda dev: (\n dev.idVendor == cls.vendor_id and dev.idProduct in cls.product_ids\n ),\n )",
"def devices(self):\n\t\t\tdevices = []\n\t\t\tnum = cuda.Device.count()\n\t\t\tfor id in range(num):\n\t\t\t\tname = cuda.Device(id).name()\n\t\t\t\tmemory = cuda.Device(id).total_memory()\n\t\t\t\tdevices.append((memory, name, id))\n\t\t\treturn devices",
"def get_devices(self):\n e = ctypes.POINTER(rs_error)()\n n_devices = lrs.rs_get_device_count(self.ctx, ctypes.byref(e))\n _check_error(e)\n\n lrs.rs_get_device.restype = ctypes.POINTER(rs_device)\n for idx in range(n_devices):\n dev = lrs.rs_get_device(self.ctx, idx, ctypes.byref(e))\n _check_error(e)\n\n name = pp(lrs.rs_get_device_name, dev, ctypes.byref(e))\n _check_error(e)\n\n serial = pp(lrs.rs_get_device_serial, dev, ctypes.byref(e))\n _check_error(e)\n\n version = pp(lrs.rs_get_device_firmware_version, dev, ctypes.byref(e))\n _check_error(e)\n\n is_streaming = lrs.rs_is_device_streaming(dev, ctypes.byref(e))\n _check_error(e)\n\n yield {'id': idx, 'name': name, 'serial': serial,\n 'firmware': version, 'is_streaming': is_streaming}",
"def devices(self, **kwargs):\n return self._get(API.DEVICES.value, check_202=True, **kwargs)",
"def devices(self):\n return self._devices",
"def devices(self):\n return self._devices",
"def devices(self):\n return self._devices",
"def devices(self):\n return self._devices",
"def get_devices(self): \n devices = []\n \n # get all the keys from the dictionary\n keys = self.SCPI_Data.keys()\n \n # extract the device specifier\n dev_keys = [key.split(':')[0] for key in keys]\n \n # iterate through the devices\n for key in dev_keys:\n if (key not in devices) and (key != 'SUP'):\n # this is a unique device, add it to the list\n devices = devices + [key]\n # end if\n # end for\n \n devices = devices + ['SIM']\n \n # replace the GPS if present with its longer name\n devices = ['GPSRM' if device == 'GPS' else device \n for device in devices]\n return devices",
"def list_devices(self):\n return [x for x in self.devices.keys()]",
"def devices(self):\n\n return self.__devices",
"def getDevices():\n devices = create_string_buffer(BUF_SIZE)\n daqmx(\n dll.DAQmxGetSysDevNames,\n (\n devices,\n BUF_SIZE\n )\n )\n return parseStringList(devices.value)",
"def get_available_devices(self):\n available_devices = []\n try:\n out = self.get_output(\"devices\")\n except Exception as e:\n logger.error(e)\n else:\n for line in out:\n device = self.parse_device_info(line)\n if device:\n available_devices.append(device)\n return available_devices",
"def devices(self):\n return {k:v for k, v in self._data.items() \n if v[\"type\"] == \"DEVICE\"}",
"def findDevices(self):\n devs = []\n for name, (serServer, port) in self.serialLinks.items():\n if serServer not in self.client.servers:\n continue\n server = self.client[serServer]\n ports = yield server.list_serial_ports()\n if port not in ports:\n continue\n devName = '%s - %s' % (serServer, port)\n devs += [(devName, (server, port))]\n returnValue(devs)",
"def list_devices():\r\n DeviceManagerCLI.BuildDeviceList()\r\n return DeviceManagerCLI.GetDeviceList()",
"def findDevices(self):\n devs = []\n for name in self.serialLinks:\n port = self.serialLinks[name]\n if name not in self.client.servers:\n continue\n server = self.client[name]\n ports = yield server.list_serial_ports()\n print ports\n if port not in ports:\n continue\n devName = '%s - %s' % (name, port)\n devs += [(devName, (server, port))]\n returnValue(devs)",
"def get_devices():\n devices = []\n for path in hookenv.action_get('osd-devices').split(' '):\n path = path.strip()\n if not os.path.isabs(path):\n raise Error('{}: Not absolute path.'.format(path))\n devices.append(path)\n return devices",
"def devices(self):\n return list(self._device_types)",
"def retrieve_pci_addresses(self):\n debug('Retrieve PCI addresses...')\n try:\n lshw_json = self.run_ssh('lshw -json').stdout\n except SSHError:\n fatal('Cannot connect to node:', self.ip_address)\n lshw = json.loads(lshw_json)\n pci_addresses = []\n for component in lshw[\"children\"][0][\"children\"]:\n if component[\"class\"] == \"bridge\":\n for subsystem in component[\"children\"]:\n if subsystem[\"class\"] == \"network\":\n index = int(subsystem[\"id\"].split(':')[1])\n pci_addresses.append((index, subsystem[\"businfo\"]))\n pci_addresses = [v.strip('pci@') for k, v in sorted(pci_addresses)]\n # iterate over interfaces and set pci address\n i = 0\n for interface in self.interfaces:\n self.interfaces[interface]['pci_address'] = pci_addresses[i]\n i += 1\n if i >= len(pci_addresses):\n break",
"def gpu_devices(self):\n return self._gpu_devices",
"async def find_devices() -> List[DeviceInfo]:\n return await Discovery.search_devices()",
"async def async_get_devices(self) -> list[dict[str, Any]]:\n return await self.aiolivisi.async_get_devices()",
"def _get_device_list(self):\n if self.app.config.cloud_type == 'ec2':\n # c5/m5 on AWS mounts EBS volumes as NVMe:\n # http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/nvme-ebs-volumes.html\n # http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html\n for itype in ['c5', 'm5']:\n if itype in self.app.cloud_interface.get_type():\n return frozenset(glob('/dev/nvme[0-26]n1'))\n return frozenset(glob('/dev/*d[a-z]'))",
"def get_devices():\n global managed_objects\n global devices_by_adr\n \n devices_by_adr = {}\n \n r = re.compile(\"\\/org\\/bluez\\/hci\\d*\\/dev\\_(.*)\")\n # e.g., match a string like this:\n # /org/bluez/hci0/dev_58_C9_35_2F_A1_EF\n \n for key, value in managed_objects.items():\n # print(\"key=\", key)\n m = r.match(key)\n if m is not None:\n dev_str = m.group(1) # we have a device string!\n # print(\"dev_str=\", dev_str)\n # let's flatten that dict a bit\n devices_by_adr[dev_str] = value[\"org.bluez.Device1\"]",
"def devices(self) -> api.Devices:\n return self._get_model(model=api.Devices)",
"def list_devices():\n return _lib.SeaTeaseAPI().list_devices()",
"def get_devices():\n devices, errors = [], []\n\n for path in hookenv.action_get('devices').split(' '):\n path = path.strip()\n if not os.path.isabs(path):\n errors.append('{}: Not absolute path.'.format(path))\n elif not os.path.exists(path):\n errors.append('{}: Device does not exist.'.format(path))\n else:\n devices.append(path)\n\n if errors:\n raise ZapDiskError(\", \".join(errors))\n\n return devices",
"def get_devices(self):\n data = {\n \"device_id\": self.uuid,\n \"cmd\": \"get_account_units\",\n \"account_token\": self.api_token\n }\n headers = {\n \"Content-Type\": \"application/json\"\n }\n\n response = requests.post(\"{}/box_pin\".format(self.BASE_URL),\n data=json.dumps(data),\n headers=headers)\n response_json = response.json()\n if not response_json.get(\"success\"):\n raise ValueError(response_json.get(\"error_message\"))\n\n units_json = response_json.get(\"units\")\n devices = []\n for unit in units_json:\n device = Charger(unit, self)\n device.update_state()\n devices.append(device)\n\n return devices",
"def devices(self):\n return self._sdk_dependencies.device_client",
"def _get_usb_devices(self):\n\n # Get every device on the bus\n device_re = re.compile(\"Bus\\s+(?P<bus>\\d+)\\s+Device\\s+(?P<device>\\d+).+ID\\s(?P<id>\\w+:\\w+)\\s(?P<tag>.+)$\", re.I)\n df = subprocess.check_output(\"lsusb\")\n devices = []\n\n for i in df.decode().split('\\n'):\n if i:\n info = device_re.match(i)\n if info:\n dinfo = info.groupdict()\n dinfo['device'] = '/dev/bus/usb/%s/%s' % (dinfo.pop('bus'), dinfo.pop('device'))\n devices.append(dinfo)\n\n # Filter only for the STLink devices\n st_link_devices = []\n for device in devices:\n if self.STLINK_VENDOR_ID in device['id']:\n st_link_devices.append(device)\n\n self.usb_devices = st_link_devices",
"def get_discoverable_devices(self):\n available = self.get_available_devices()\n paired = self.get_paired_devices()\n return [d for d in available if d not in paired]",
"def get_available_devices(self):\n try:\n out = self.get_output(\"devices\")\n except BluetoothctlError, e:\n print(e)\n return None\n else:\n available_devices = []\n for line in out:\n device = self.parse_device_info(line)\n if device:\n available_devices.append(device)\n\n return available_devices",
"def readPCIList(self):\n\n self.vendors = {}\n self.devices = {}\n\n vendorId = None\n vendorName = None\n for line in PCIList.split('\\n'):\n stripped = line.lstrip()\n if not stripped or stripped[0] == ';':\n continue\n if line[0] != '\\t':\n # A vendor line.\n vendorId, vendorName = line.split('\\t', 1)\n vendorId = int(vendorId, 16)\n self.vendors[vendorId] = vendorName.strip()\n else:\n # A device line, continuing the previous vendor.\n deviceId, deviceName = line[1:].split('\\t', 1)\n deviceId = deviceId.split(' ', 1)[0]\n try:\n deviceId = int(deviceId, 16)\n except:\n deviceId = None\n self.devices[(vendorId, deviceId)] = deviceName.strip()\n\n self.addExtraDevices()",
"def list_devices(self):\n xml = str(self._server.listDevices())\n return self._parse_cabling_xml(xml)",
"def get_devices_lsscsi(self):\n\n try:\n message = \"Find SCSI Devices\"\n if self._include_enclosures:\n command = \"lsscsi --generic --transport | egrep 'disk|0x14|enclo'\"\n else:\n command = \"lsscsi --generic --transport | fgrep 'disk|0x14'\"\n pdata = self._run_command(command=command, message=message, logger=self._logger, shell=True)\n #\n # Format:\n # $ lsscsi --generic --transport\n # [0] [1] [2] [3] [4]\n # [0:0:0:0] disk sas:0x5000cca25103b471 /dev/sda /dev/sg0 \n # [0:0:1:0] disk sas:0x5000cca251029301 /dev/sdb /dev/sg1 \n # ...\n # [0:0:14:0] enclosu sas:0x5001636001caa0bd - /dev/sg14\n # [7:0:0:0] cd/dvd usb: 1-1.3:1.2 /dev/sr0 /dev/sg15\n #\n # Special Case:\n # Handle lines without a transport (spaces only). (screen scrapping danger)\n # [0:0:10:0] enclosu sas:0x50030480091d71fd - /dev/sg10\n # [1:0:0:0] disk <spaces> /dev/sdk /dev/sg11 <- INTEL disk!\n #\n # Another SNAFU! (and why I hate screen scrapping!!!)\n # [15:0:53597:0]disk sas:0x5000cca23b359649 /dev/sdg /dev/sg6 \n # [15:0:53598:0]disk sas:0x5000cca23b0c0a99 /dev/sdh /dev/sg7 \n # [15:0:53599:0]disk sas:0x5000cca23b0b7531 /dev/sdi /dev/sg8 \n # ...\n # [15:0:53686:0]enclosu sas:0x5000ccab040001bc - /dev/sg165\n # [15:0:53766:0]enclosu sas:0x5000ccab040001fc - /dev/sg144\n #\n # Evidently, the author of lsscsi did not think of consistent output! ;(\n #\n for line in pdata['stdout'].splitlines():\n dinfo = line.split()\n device = dict()\n if len(dinfo) < 5:\n m = re.search('(?P<device>disk|\\(0x14\\)|enclosu)', dinfo[0])\n if m:\n device['Device Type'] = m.group('device')\n sas_index = 1\n dev_index = 2\n sg_index = 3\n else:\n continue\n else:\n device['Device Type'] = dinfo[1]\n sas_index = 2\n dev_index = 3\n sg_index = 4\n\n # lsscsi does not understand 'Host Managed' device type.\n if '0x14' in device['Device Type']:\n device['Device Type'] = 'disk'\n\n # Parse remaining information.\n if 'sas:' in dinfo[sas_index]:\n device['SAS Address'] = dinfo[sas_index][4:]\n self._sas_addresses += 1\n else:\n device['SAS Address'] = \"\"\n\n # Note: Enclosure has no driver, so reports '-' for name.\n if '/dev/' in dinfo[dev_index]:\n if self._drives and not dinfo[dev_index] in self._drives:\n continue\n if self._exclude and dinfo[dev_index] in self._exclude:\n continue\n device['Linux Device Name'] = dinfo[dev_index]\n else:\n device['Linux Device Name'] = \"\"\n if '/dev/sg' in dinfo[sg_index]:\n device['SCSI Device Name'] = dinfo[sg_index]\n else:\n device['SCSI Device Name'] = \"\"\n\n self._devices.append(device)\n\n except RuntimeError as exc:\n self._logger.error(\"Failed to acquire SCSI devices: {0}\".format(exc))\n raise exc",
"def get_connected_devices(self):\n all_devices = []\n if self.vid_list:\n for vid in self.vid_list:\n all_devices += UsbDriver.usb_list_devices(vid)\n\n if self.pid_ignore_list:\n return [device for device in all_devices\n if not (device.product_id in self.pid_ignore_list)]\n else:\n return all_devices",
"def devices(self) -> list[Device]:\n return list(self._devices.values())",
"def get_discoverable_devices(self):\n available = self.get_available_devices()\n paired = self.get_paired_devices()\n\n return [d for d in available if d not in paired]",
"def do_list(self, _):\n devices = []\n for source in self._target.devices:\n devices.append({\n 'name': source.device['name'],\n 'path': source.device['path'],\n })\n return devices",
"def getDevices(i):\n devices = Account['KTFLR'].devices('monpressprod')\n device = devices[i]\n return device",
"def get_devices(self):\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.network.networkdevice.1.0+xml'})\n\n\t\tresp = self.ise.get('{0}/config/networkdevice'.format(self.url_base))\n\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tjson_res = ERS._to_json(resp.text)['ns3:searchResult']\n\n\t\tif resp.status_code == 200 and int(json_res['@total']) > 1:\n\t\t\tresult['success'] = True\n\t\t\tresult['response'] = [(i['@name'], i['@id'])\n\t\t\t\t\t\t\t\t for i in json_res['ns3:resources']['ns5:resource']]\n\t\t\treturn result\n\n\t\telif resp.status_code == 200 and int(json_res['@total']) == 1:\n\t\t\tresult['success'] = True\n\t\t\tresult['response'] = [(json_res['ns3:resources']['ns5:resource']['@name'],\n\t\t\t\t\t\t\t\t json_res['ns3:resources']['ns5:resource']['@id'])]\n\t\t\treturn result\n\n\t\telif resp.status_code == 200 and int(json_res['@total']) == 0:\n\t\t\tresult['success'] = True\n\t\t\tresult['response'] = []\n\t\t\treturn result\n\n\t\telse:\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result",
"def _find_devices(cls, vendor: int, product: int,\n nocache: bool = False) -> Set[UsbDevice]:\n backend = cls._load_backend()\n vidpid = (vendor, product)\n if nocache or (vidpid not in cls.UsbDevices):\n # not freed until Python runtime completion\n # enumerate_devices returns a generator, so back up the\n # generated device into a list. To save memory, we only\n # back up the supported devices\n devs = set()\n vpdict = {} # Dict[int, List[int]]\n vpdict.setdefault(vendor, [])\n vpdict[vendor].append(product)\n for dev in backend.enumerate_devices():\n device = UsbDevice(dev, backend)\n if device.idVendor in vpdict:\n products = vpdict[device.idVendor]\n if products and (device.idProduct not in products):\n continue\n devs.add(device)\n if sys.platform == 'win32':\n # ugly kludge for a boring OS:\n # on Windows, the USB stack may enumerate the very same\n # devices several times: a real device with N interface\n # appears also as N device with as single interface.\n # We only keep the \"device\" that declares the most\n # interface count and discard the \"virtual\" ones.\n filtered_devs = dict()\n for dev in devs:\n vid = dev.idVendor\n pid = dev.idProduct\n ifc = max([cfg.bNumInterfaces for cfg in dev])\n k = (vid, pid, dev.bus, dev.address)\n if k not in filtered_devs:\n filtered_devs[k] = dev\n else:\n fdev = filtered_devs[k]\n fifc = max([cfg.bNumInterfaces for cfg in fdev])\n if fifc < ifc:\n filtered_devs[k] = dev\n devs = set(filtered_devs.values())\n cls.UsbDevices[vidpid] = devs\n return cls.UsbDevices[vidpid]",
"def get_generic_pci_devices_count(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetGenericPciDevicesCount', self.handle)",
"def devices(self):\n return DeviceCollection(client=self)",
"async def get_devices(self, params: Optional = None) -> dict:\r\n return await self.get_items(API_DEVICES, params=params)",
"def get_available_devices():\n executable_path = os.path.join(os.path.dirname(__file__), 'build')\n try:\n num_devices = int(subprocess.check_output(\n [\"{}/query_devices\".format(executable_path)]))\n except subprocess.CalledProcessError as e:\n return [0]\n\n FNULL = open(os.devnull, 'w')\n\n available_devices = []\n for i in range(num_devices):\n try:\n if b\"NVIDIA\" in subprocess.check_output(\n [\"{}/test_device\".format(executable_path),\n str(i)], stderr=FNULL):\n available_devices.append(i)\n logging.info('Device {} is available for rendering'.format(i))\n except subprocess.CalledProcessError as e:\n logging.info(e)\n logging.info('Device {} is not available for rendering'.format(i))\n FNULL.close()\n\n return available_devices",
"def devices() -> typing.List[str]:\n devices = sounddevice.query_devices()\n return [device['name'] for device in devices if device['max_output_channels'] > 0]",
"def scan():\n debug(\"CBA4.scan()\")\n num = MpOrLibUsb.get_device_count()\n devices = []\n i = 0\n while i < num:\n cba = CBA4(interface=MpOrLibUsb(i))\n i += 1\n sn = cba.get_serial_number()\n if sn:\n devices.append(sn)\n cba.close()\n #end loop\n return devices\n #end scan()",
"def _get_device(node):\n\n vpp = VppPCIUtil(node)\n vpp.get_all_devices()\n\n # Save the device information\n node[\"devices\"] = {}\n node[\"devices\"][\"dpdk_devices\"] = vpp.get_dpdk_devices()\n node[\"devices\"][\"kernel_devices\"] = vpp.get_kernel_devices()\n node[\"devices\"][\"other_devices\"] = vpp.get_other_devices()\n node[\"devices\"][\"linkup_devices\"] = vpp.get_link_up_devices()",
"def enumerate_devices(vendor_id: int = 0x2C97) -> List[bytes]:\n devices: List[bytes] = []\n\n for hid_device in hid.enumerate(vendor_id, 0):\n if (hid_device.get(\"interface_number\") == 0 or\n # MacOS specific\n hid_device.get(\"usage_page\") == 0xffa0):\n devices.append(hid_device[\"path\"])\n\n assert len(devices) != 0, (\n f\"Can't find Ledger device with vendor_id {hex(vendor_id)}\")\n\n return devices",
"def get_devices(mac=None):\n wemo_devices = discover_wemo()\n\n if mac:\n dev = get_device(mac, wemo_devices)\n if not dev:\n return []\n return [dev]\n\n return wemo_devices",
"def list_devices(self):\n response = self.oauth.get(url=f'{self.base_url}/json/devices/list')\n\n result = response.json()['device']\n for device in result:\n print(device)",
"def list_devices(context, connstrings):\n return _nfc.list_devices(context, connstrings)",
"def GetDeviceSerials(self):\n return self._device_serial_index.keys()",
"def device_info(node):\n\n if \"cpu\" in node and \"total_mbufs\" in node[\"cpu\"]:\n total_mbufs = node[\"cpu\"][\"total_mbufs\"]\n if total_mbufs != 0:\n print(\"Total Number of Buffers: {}\".format(total_mbufs))\n\n vpp = VppPCIUtil(node)\n vpp.get_all_devices()\n linkup_devs = vpp.get_link_up_devices()\n if len(linkup_devs):\n print(\"\\nDevices with link up (can not be used with VPP):\")\n vpp.show_vpp_devices(linkup_devs, show_header=False)\n # for dev in linkup_devs:\n # print (\" \" + dev)\n kernel_devs = vpp.get_kernel_devices()\n if len(kernel_devs):\n print(\"\\nDevices bound to kernel drivers:\")\n vpp.show_vpp_devices(kernel_devs, show_header=False)\n else:\n print(\"\\nNo devices bound to kernel drivers\")\n\n dpdk_devs = vpp.get_dpdk_devices()\n if len(dpdk_devs):\n print(\"\\nDevices bound to DPDK drivers:\")\n vpp.show_vpp_devices(dpdk_devs, show_interfaces=True, show_header=False)\n else:\n print(\"\\nNo devices bound to DPDK drivers\")\n\n other_devs = vpp.get_other_devices()\n if len(other_devs):\n print(\"\\nDevices not bound to Kernel or DPDK drivers:\")\n vpp.show_vpp_devices(other_devs, show_interfaces=True, show_header=False)\n else:\n print(\"\\nNo devices not bound to Kernel or DPDK drivers\")\n\n vpputl = VPPUtil()\n interfaces = vpputl.get_hardware(node)\n if interfaces == {}:\n return\n\n print(\"\\nDevices in use by VPP:\")\n\n if len(interfaces.items()) < 2:\n print(\"None\")\n return\n\n print(\n \"{:30} {:4} {:4} {:7} {:4} {:7}\".format(\n \"Name\", \"Numa\", \"RXQs\", \"RXDescs\", \"TXQs\", \"TXDescs\"\n )\n )\n for intf in sorted(interfaces.items()):\n name = intf[0]\n value = intf[1]\n if name == \"local0\":\n continue\n numa = rx_qs = rx_ds = tx_qs = tx_ds = \"\"\n if \"numa\" in value:\n numa = int(value[\"numa\"])\n if \"rx queues\" in value:\n rx_qs = int(value[\"rx queues\"])\n if \"rx descs\" in value:\n rx_ds = int(value[\"rx descs\"])\n if \"tx queues\" in value:\n tx_qs = int(value[\"tx queues\"])\n if \"tx descs\" in value:\n tx_ds = int(value[\"tx descs\"])\n\n print(\n \"{:30} {:>4} {:>4} {:>7} {:>4} {:>7}\".format(\n name, numa, rx_qs, rx_ds, tx_qs, tx_ds\n )\n )",
"def _get_available_usb_devices(self, regexp=None, include_links=True) -> list:\n logger.debug(\n f\"_get_available_usb_devices(regexp={regexp}, include_links={include_links})\"\n )\n if not regexp:\n ports = serial_list_ports.comports(include_links=include_links)\n else:\n # cast as list because it's a generator and I want an easy return type\n # How many USB devices could a user possibly have?\n ports = list(\n serial_list_ports.grep(regexp=regexp, include_links=include_links)\n )\n return ports",
"def devices(self):\n return self._recordings.keys()",
"def get_cl_devices():\n\n _devices = {'CPU':[], 'GPU':[]}\n\n platforms = cl.get_platforms()\n for platform in platforms:\n devices = platform.get_devices()\n for device in devices:\n if device.type == cl.device_type.CPU:\n _devices['CPU'].append(device)\n elif device.type == cl.device_type.GPU:\n _devices['GPU'].append(device)\n \n \n return _devices",
"def scan_chip_ble_devices(devCtrl):\n devices = []\n bleMgr = BleManager(devCtrl)\n bleMgr.scan(\"-t 10\")\n\n for device in bleMgr.peripheral_list:\n devIdInfo = bleMgr.get_peripheral_devIdInfo(device)\n if devIdInfo:\n devInfo = devIdInfo.__dict__\n devInfo[\"name\"] = device.Name\n devices.append(devInfo)\n\n return devices",
"def list_devices(cls, filters={}):\n return cls.dbdriver.list_devices(filters)",
"def ret_device_list():\n token = get_auth_token() # Get Token\n url = \"https://sandboxdnac.cisco.com/api/v1/network-device\"\n hdr = {'x-auth-token': token, 'content-type' : 'application/json'}\n resp = requests.get(url, headers=hdr) # Make the Get Request\n device_list = resp.json()\n return device_list",
"def get_devices(self) -> List[LandscapeDevice]:\n device_list = None\n\n self.landscape_lock.acquire()\n try:\n device_list = [dev for dev in self._all_devices.values()]\n finally:\n self.landscape_lock.release()\n\n return device_list",
"def get_mbed_devices(self):\n upper_ven = [ven.upper() for ven in self.usb_vendor_list]\n mounts_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, 'SYSTEM\\MountedDevices')\n for point, label, _ in self.iter_vals(mounts_key):\n printable_label = label.decode('utf-16le', 'ignore')\n if ('DosDevices' in point and\n any(v in printable_label.upper() for v in upper_ven)):\n logger.debug(\"Found Mount point %s with usb ID %s\",point,\n printable_label)\n yield (point, printable_label)\n else:\n logger.debug(\"Skipping Mount point %r label %r\", point, label)",
"def get_list_devices(self, verbose=False):\n # TODO: refresh region_names if more regions get devices available\n self.backends = {}\n region_names = ['us-west-1', 'us-east-1']\n for region in region_names:\n client = boto3.client(\n 'braket',\n region_name=region,\n aws_access_key_id=self._credentials['AWS_ACCESS_KEY_ID'],\n aws_secret_access_key=self._credentials['AWS_SECRET_KEY'],\n )\n filters = []\n devicelist = client.search_devices(filters=filters)\n for result in devicelist['devices']:\n if result['deviceType'] not in ['QPU', 'SIMULATOR']:\n continue\n if result['deviceType'] == 'QPU':\n device_capabilities = json.loads(\n client.get_device(deviceArn=result['deviceArn'])['deviceCapabilities']\n )\n self.backends[result['deviceName']] = {\n 'nq': device_capabilities['paradigm']['qubitCount'],\n 'coupling_map': device_capabilities['paradigm']['connectivity']['connectivityGraph'],\n 'version': device_capabilities['braketSchemaHeader']['version'],\n 'location': region, # deviceCapabilities['service']['deviceLocation'],\n 'deviceArn': result['deviceArn'],\n 'deviceParameters': device_capabilities['deviceParameters']['properties']['braketSchemaHeader'][\n 'const'\n ],\n 'deviceModelParameters': device_capabilities['deviceParameters']['definitions'][\n 'GateModelParameters'\n ]['properties']['braketSchemaHeader']['const'],\n }\n # Unfortunately the Capabilities schemas are not homogeneus for real devices and simulators\n elif result['deviceType'] == 'SIMULATOR':\n device_capabilities = json.loads(\n client.get_device(deviceArn=result['deviceArn'])['deviceCapabilities']\n )\n self.backends[result['deviceName']] = {\n 'nq': device_capabilities['paradigm']['qubitCount'],\n 'coupling_map': {},\n 'version': device_capabilities['braketSchemaHeader']['version'],\n 'location': 'us-east-1',\n 'deviceArn': result['deviceArn'],\n 'deviceParameters': device_capabilities['deviceParameters']['properties']['braketSchemaHeader'][\n 'const'\n ],\n 'deviceModelParameters': device_capabilities['deviceParameters']['definitions'][\n 'GateModelParameters'\n ]['properties']['braketSchemaHeader']['const'],\n }\n\n if verbose:\n print('- List of AWSBraket devices available:')\n print(list(self.backends))\n\n return self.backends",
"def getDeviceList(self):\n return defer.succeed(self.discovered)",
"def get_devices(adb=DEFAULT_ADB):\n # Check that adb is running\n Device.__start_adb(adb)\n # Split by newline and remove first line (\"List of devices attached\")\n # TODO: surround with try/except?\n devices = subprocess.check_output(\n [adb, \"devices\", \"-l\"]).decode().split('\\n')[1:]\n tmp = {}\n for dev in devices:\n if dev:\n tmp[dev.split()[0]] = dev\n return tmp",
"def scan_devices(self):\n self._update_info()\n\n return [client['mac'] for client in self.last_results]",
"def enumerate_devices():\n devices = list(\n map(XInputJoystick, list(range(XInputJoystick.max_devices))))\n return [device for device in devices if device.is_connected()]",
"def get_device_list_by_path(self):\n by_path_dir = \"/dev/disk/by-path/\"\n disk_list = os.listdir(by_path_dir)\n usb_set = set()\n for device in disk_list:\n if device.find(\"usb\") != -1:\n path = os.readlink(by_path_dir + device)\n abs_path = os.path.abspath(by_path_dir + path)\n usb_set.add(abs_path)\n return usb_set",
"def get_devices(needs: int = None):\n\n num_gpus = torch.cuda.device_count()\n\n if num_gpus == 0:\n devices = [torch.device(\"cpu\")]\n if needs is None:\n return devices\n return devices * needs\n\n devices = [torch.device(f\"cuda:{index:d}\") for index in range(num_gpus)]\n if needs is None:\n return devices\n return [device for _, device in zip(range(needs), itertools.cycle(devices))]",
"def scan_devices(self):\n self._update_info()\n return [client[\"mac\"] for client in self.last_results]",
"def test_get_pci_coprocessor_card_list(self):\n pass",
"def get_generic_pci_devs_count(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetGenericPciDevsCount', self.handle)",
"def com_select():\n if 'win' in sys.platform:\n ports = [x.device\n for x in comports()]\n else:\n ports = ['/dev/' + x.device\n for x in comports() if 'AMA' not in x.name and x.name]\n return ports",
"def YieldAllDevices(self):\n\n for deviceManagedObject in self.managedObject.config.hardware.device:\n yield Device(vm=self, managedObject=deviceManagedObject)",
"def FindAllAvailableDevices(options):\n use_ssh = options.cros_remote and cros_interface.HasSSH()\n if not use_ssh and not IsRunningOnCrOS():\n logging.debug('No --remote specified, and not running on ChromeOs.')\n return []\n\n return [CrOSDevice(options.cros_remote, options.cros_remote_ssh_port,\n options.cros_ssh_identity, not use_ssh)]",
"def devices():\n\n ret = {}\n\n p = subprocess.Popen([\"lsusb\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n\n out = p.stdout.read()\n err = p.stderr.read()\n\n if err:\n raise salt.exceptions.CommandExecutionError(\"Failed to run lsusb: {}\".format(err))\n\n # Ensure pattern is compiled\n global pattern\n if not pattern:\n log.info(\"Compiling regex pattern {}\".format(LSUSB_OUTPUT_REGEX))\n pattern = re.compile(LSUSB_OUTPUT_REGEX)\n\n # Parse output\n devices = []\n for dev_line in out.split(\"\\n\"):\n if dev_line == \"\":\n # empty line, skip\n continue\n\n match = pattern.match(dev_line)\n if not match:\n log.warning(\"Couldn't match line {}\".format(dev_line))\n continue\n\n devices.append({\n \"bus\": match.group(\"bus\"),\n \"device\": match.group(\"device\"),\n \"vendor\": match.group(\"vendor\"),\n \"product\": match.group(\"product\"),\n \"name\": match.group(\"name\"),\n })\n\n ret[\"values\"] = devices\n return ret",
"def get_devices_per_node(self):\n\n for i in self._nodes.items():\n node = i[1]\n # Update the interface data\n\n self._get_device(node)\n\n self.updateconfig()",
"async def async_get_devices(self):\n if self.token is None:\n await self.async_initialize_token()\n\n self.devices.clear()\n raw = await self._async_ws_get_function(CMD_DEVICES)\n\n try:\n xml_root = element_tree.fromstring(raw)\n mac_adresses: List[str] = [mac.text for mac in xml_root.iter(\"MACAddr\")]\n hostnames: List[str] = [mac.text for mac in xml_root.iter(\"hostname\")]\n ip_addresses: List[str] = [mac.text for mac in xml_root.iter(\"IPv4Addr\")]\n interfaces: List[str] = [mac.text for mac in xml_root.iter(\"interface\")]\n speeds: List[str] = [mac.text for mac in xml_root.iter(\"speed\")]\n interface_ids: List[str] = [\n mac.text for mac in xml_root.iter(\"interfaceid\")\n ]\n methods: List[str] = [mac.text for mac in xml_root.iter(\"method\")]\n lease_times: List[str] = [mac.text for mac in xml_root.iter(\"leaseTime\")]\n\n for (\n mac_address,\n hostname,\n ip_address,\n interface,\n speed,\n interface_id,\n method,\n lease_time,\n ) in zip(\n mac_adresses,\n hostnames,\n ip_addresses,\n interfaces,\n speeds,\n interface_ids,\n methods,\n lease_times,\n ):\n self.devices.append(\n Device(\n mac_address,\n hostname,\n ip_address.partition(\"/\")[0],\n interface,\n speed,\n interface_id,\n method,\n lease_time,\n )\n )\n except (element_tree.ParseError, TypeError):\n _LOGGER.warning(\"Can't read device from %s\", self.host)\n self.token = None\n raise exceptions.ConnectBoxNoDataAvailable() from None",
"def getDevices(self):\n\n devices = None\n\n for i in range(3):\n devices = subprocess.check_output(\"adb devices -l\", creationflags=self.createNoWindow)\n\n devices = devices.decode()\n deviceModel = re.findall(\"model:(.*) device\", devices)\n deviceID = re.findall(r\"(\\S+) {2}\", devices, flags=re.IGNORECASE)\n\n return deviceModel, deviceID",
"def get_devices():\n try:\n with open(DEVICES, 'r') as f:\n data = json.load(f)['devices']\n except (IOError, ValueError) as err:\n raise SwiftlmCheckFailure('Failure opening %s: %s' % (DEVICES, err))\n\n devices = []\n for d in data:\n l = d.get('label', LABEL_CHECK_DISABLED)\n devices.append(Device(\n device=d['name'],\n mount=MOUNT_PATH+d['swift_drive_name'],\n label=l\n ))\n\n return devices",
"def usb_devices():\r\n ret_out = utils.run('lsusb').stdout.strip('\\n').replace(',', ' ')\r\n return ret_out",
"def getDevices():\n \n scannedDevices = list()\n \n proc = subprocess.Popen('bluetoothctl scan on', shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=8192, universal_newlines=True)\n \n time.sleep(10)\n \n proc.stdin.write('scan off')\n \n try:\n stdout, stderr = proc.communicate()\n except subprocess.TimeoutExpired:\n proc.kill()\n stdout, stderr = proc.communicate()\n\n ansiEscapePattern = re.compile(r'\\x1B[@-_][0-?]*[ -/]*[@-~]')\n stdout = ansiEscapePattern.sub('', stdout)\n \n #deviceNamePattern = re.compile('^\\[NEW\\] Device [A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2} ')\n \n for line in stdout.split('\\n'):\n if '[NEW] Device' in line:\n device = list()\n device.append(line[13:31])\n device.append(line[31:])\n scannedDevices.append(device)\n \n return scannedDevices",
"def getRegisteredDevices(self):\n raise NotImplementedError(\"All inherited classes of DeviceRegisterer must implement getRegisteredDevices.\")",
"def getDevicesList(self, serialNum, internal=False):\r\n\r\n self._logger.debug(\"in API getDevicesList()...\")\r\n\r\n # check the auth tokens and TTL unless this is a get state call (a non-polling call)\r\n if not internal:\r\n self._checkTokens()\r\n\r\n # format url parameters\r\n params = {\r\n \"actionID\": \"command\",\r\n \"command\": _SESSION_COMMAND_GET_DEVICES,\r\n \"serial\": serialNum,\r\n \"sessionID\": self._sessionID,\r\n } \r\n\r\n # call the session API with the parameters\r\n response = self._call_api(_API_SESSION, params=params)\r\n \r\n # if data returned, format devices state and return\r\n if response and response.status_code == 200:\r\n\r\n respData = response.json() \r\n return self._buildDevicesState(respData)\r\n\r\n # otherwise return empty dictionary (evaluates to false)\r\n else:\r\n return {}",
"def getGpus():\n nvmlInit()\n gpu_list = []\n for i in range(0, nvmlDeviceGetCount()):\n handle = nvmlDeviceGetHandleByIndex(i)\n gpu_list.append(NvidiaGPU(handle))\n return gpu_list",
"def trusted_devices(self):\n request = self.session.get(\n f\"{self.SETUP_ENDPOINT}/listDevices\", params=self.params\n )\n return request.json().get(\"devices\")",
"def find_all(cls, vps: Sequence[Tuple[int, int]],\n nocache: bool = False) -> \\\n List[Tuple[UsbDeviceDescriptor, int]]:\n cls.Lock.acquire()\n try:\n devs = set()\n for vid, pid in vps:\n # TODO optimize useless loops\n devs.update(UsbTools._find_devices(vid, pid, nocache))\n devices = set()\n for dev in devs:\n ifcount = max([cfg.bNumInterfaces for cfg in dev])\n # TODO: handle / is serial number strings\n sernum = UsbTools.get_string(dev, dev.iSerialNumber)\n description = UsbTools.get_string(dev, dev.iProduct)\n descriptor = UsbDeviceDescriptor(dev.idVendor, dev.idProduct,\n dev.bus, dev.address,\n sernum, None, description)\n devices.add((descriptor, ifcount))\n return list(devices)\n finally:\n cls.Lock.release()"
] | [
"0.74077874",
"0.7218372",
"0.72180504",
"0.72177297",
"0.7194209",
"0.7167658",
"0.70619637",
"0.69890875",
"0.698407",
"0.69460297",
"0.6933677",
"0.6915167",
"0.688954",
"0.68429095",
"0.6825503",
"0.68069667",
"0.68069667",
"0.68069667",
"0.68069667",
"0.67960405",
"0.6794884",
"0.6740752",
"0.6725867",
"0.6676614",
"0.66740334",
"0.6669641",
"0.65504587",
"0.65294904",
"0.652689",
"0.65047586",
"0.6483012",
"0.6480133",
"0.64636755",
"0.6460072",
"0.64591336",
"0.64491916",
"0.6446392",
"0.6424255",
"0.64083874",
"0.63767874",
"0.63637185",
"0.63579273",
"0.6353942",
"0.6345698",
"0.63386524",
"0.63132715",
"0.6311758",
"0.63026774",
"0.62914455",
"0.6286081",
"0.6282109",
"0.62639296",
"0.62578416",
"0.6242904",
"0.6220873",
"0.6215855",
"0.6155891",
"0.61509913",
"0.61467934",
"0.6117489",
"0.61166835",
"0.61084175",
"0.6106356",
"0.6101432",
"0.60662127",
"0.60542727",
"0.6036936",
"0.6023035",
"0.60193163",
"0.60165966",
"0.6010647",
"0.60031146",
"0.5986539",
"0.5974483",
"0.5970972",
"0.59696317",
"0.596953",
"0.59687597",
"0.5956305",
"0.5955439",
"0.5951275",
"0.5946631",
"0.59398305",
"0.5938364",
"0.592917",
"0.5919017",
"0.5915702",
"0.5911931",
"0.59092885",
"0.5905951",
"0.5892601",
"0.58900744",
"0.58887875",
"0.58747315",
"0.58710325",
"0.5852706",
"0.58456373",
"0.5795013",
"0.57801837",
"0.57738453"
] | 0.8361106 | 0 |
Returns the list of gpu devices. | def _get_gpu_pci_devices(self):
pci_device_list = self._get_pci_devices()
gpu_list = []
items = pci_device_list['Items']
for item in items:
if item['ClassCode'] in CLASSCODE_FOR_GPU_DEVICES:
if item['SubclassCode'] in SUBCLASSCODE_FOR_GPU_DEVICES:
gpu_list.append(item)
return gpu_list | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def gpu_devices(self):\n return self._gpu_devices",
"def devices(self):\n\t\t\tdevices = []\n\t\t\tnum = cuda.Device.count()\n\t\t\tfor id in range(num):\n\t\t\t\tname = cuda.Device(id).name()\n\t\t\t\tmemory = cuda.Device(id).total_memory()\n\t\t\t\tdevices.append((memory, name, id))\n\t\t\treturn devices",
"def list_devices():\r\n DeviceManagerCLI.BuildDeviceList()\r\n return DeviceManagerCLI.GetDeviceList()",
"def getGpus():\n nvmlInit()\n gpu_list = []\n for i in range(0, nvmlDeviceGetCount()):\n handle = nvmlDeviceGetHandleByIndex(i)\n gpu_list.append(NvidiaGPU(handle))\n return gpu_list",
"def get_devices():\n devices = []\n for device_id in range(pm.lib.Pm_CountDevices()):\n devices.append(DeviceInfo(device_id))\n\n return devices",
"def list_devices(self):\n return [x for x in self.devices.keys()]",
"def _get_available_gpus():\n global _LOCAL_DEVICES\n if _LOCAL_DEVICES is None:\n if _is_tf_1():\n devices = get_session().list_devices()\n _LOCAL_DEVICES = [x.name for x in devices]\n else:\n _LOCAL_DEVICES = tf.config.experimental_list_devices()\n return [x for x in _LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def _get_gpu_names() -> Sequence[str]:\n result = []\n for device in device_lib.list_local_devices():\n if device.device_type != \"GPU\":\n continue\n desc = device.physical_device_desc\n\n fields = desc.split(\",\")\n for field in fields:\n name, value = field.split(\":\", maxsplit=1)\n name = name.strip()\n value = value.strip()\n if name == \"name\":\n result.append(value)\n return result",
"def get_devices(self):\n devices = []\n for i in self.devices:\n devices.append(self.devices[i])\n\n return devices",
"def _get_available_gpus():\r\n #global _LOCAL_DEVICES\r\n if tfback._LOCAL_DEVICES is None:\r\n devices = tf.config.list_logical_devices()\r\n tfback._LOCAL_DEVICES = [x.name for x in devices]\r\n return [x for x in tfback._LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def _get_available_gpus():\n #global _LOCAL_DEVICES\n if tfback._LOCAL_DEVICES is None:\n devices = tf.config.list_logical_devices()\n tfback._LOCAL_DEVICES = [x.name for x in devices]\n return [x for x in tfback._LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def _get_available_gpus():\n #global _LOCAL_DEVICES\n if tfback._LOCAL_DEVICES is None:\n devices = tf.config.list_logical_devices()\n tfback._LOCAL_DEVICES = [x.name for x in devices]\n return [x for x in tfback._LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def _get_available_gpus():\n #global _LOCAL_DEVICES\n if tfback._LOCAL_DEVICES is None:\n devices = tf.config.list_logical_devices()\n tfback._LOCAL_DEVICES = [x.name for x in devices]\n return [x for x in tfback._LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def _display_cuda_devices():\n\n cuda_query_output = subprocess.run(\"nvidia-smi --query-gpu=gpu_uuid,gpu_name,compute_mode --format=csv\", shell=True, capture_output=True, text=True)\n # Check if command worked\n if cuda_query_output.returncode == 0:\n # Split by line jump and comma\n cuda_devices_list = [entry for entry in cuda_query_output.stdout.splitlines()]\n logger.debug(f\"CUDA devices available: {*cuda_devices_list,}\")\n # We only support \"Default\" and not \"Exclusive_Process\" for the compute mode\n if \"Default\" not in cuda_query_output.stdout:\n logger.warning(f\"GPU in 'Exclusive_Process' mode (or Prohibited), one context is allowed per device. This may prevent some openmmtools features from working. GPU must be in 'Default' compute mode\")\n # Handel the case where the command had some error\n else:\n logger.debug(f\"nvidia-smi command failed: {cuda_query_output.stderr}, this is expected if there is no GPU available\")",
"def get_devices(self):\n return self.api_request('GET', self.url + '/device', {})",
"def _get_available_gpus():\n # global _LOCAL_DEVICES\n if tf_back._LOCAL_DEVICES is None:\n devices = tf.config.list_logical_devices()\n tf_back._LOCAL_DEVICES = [x.name for x in devices]\n return [x for x in tf_back._LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def _get_available_gpus():\n # global _LOCAL_DEVICES\n if tfback._LOCAL_DEVICES is None:\n devices = tf.config.list_logical_devices()\n tfback._LOCAL_DEVICES = [x.name for x in devices]\n return [x for x in tfback._LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def devices(self):\n return self.enumerate_devices()",
"def _get_device_list(self):\n if self.app.config.cloud_type == 'ec2':\n # c5/m5 on AWS mounts EBS volumes as NVMe:\n # http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/nvme-ebs-volumes.html\n # http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html\n for itype in ['c5', 'm5']:\n if itype in self.app.cloud_interface.get_type():\n return frozenset(glob('/dev/nvme[0-26]n1'))\n return frozenset(glob('/dev/*d[a-z]'))",
"def GetAllDevices(self):\n\n return list(self.YieldAllDevices())",
"def get_available_gpus():\n local_device_protos = device_lib.list_local_devices()\n return [x.name for x in local_device_protos if x.device_type == \"GPU\"]",
"def get_cl_devices():\n\n _devices = {'CPU':[], 'GPU':[]}\n\n platforms = cl.get_platforms()\n for platform in platforms:\n devices = platform.get_devices()\n for device in devices:\n if device.type == cl.device_type.CPU:\n _devices['CPU'].append(device)\n elif device.type == cl.device_type.GPU:\n _devices['GPU'].append(device)\n \n \n return _devices",
"def list_devices():\n return _lib.SeaTeaseAPI().list_devices()",
"def try_all_gpus(): #@save\n num_gpus = len(tf.config.experimental.list_physical_devices('GPU'))\n devices = [tf.device(f'/GPU:{i}') for i in range(num_gpus)]\n return devices if devices else [tf.device('/CPU:0')]",
"def get_all_devices(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_GetAllDevices', self.handle))",
"def get_devices():\n devices = []\n for path in hookenv.action_get('osd-devices').split(' '):\n path = path.strip()\n if not os.path.isabs(path):\n raise Error('{}: Not absolute path.'.format(path))\n devices.append(path)\n return devices",
"def get_available_devices():\n executable_path = os.path.join(os.path.dirname(__file__), 'build')\n try:\n num_devices = int(subprocess.check_output(\n [\"{}/query_devices\".format(executable_path)]))\n except subprocess.CalledProcessError as e:\n return [0]\n\n FNULL = open(os.devnull, 'w')\n\n available_devices = []\n for i in range(num_devices):\n try:\n if b\"NVIDIA\" in subprocess.check_output(\n [\"{}/test_device\".format(executable_path),\n str(i)], stderr=FNULL):\n available_devices.append(i)\n logging.info('Device {} is available for rendering'.format(i))\n except subprocess.CalledProcessError as e:\n logging.info(e)\n logging.info('Device {} is not available for rendering'.format(i))\n FNULL.close()\n\n return available_devices",
"def devices(self):\n return list(self._device_types)",
"def list_devices(self):\n response = self.oauth.get(url=f'{self.base_url}/json/devices/list')\n\n result = response.json()['device']\n for device in result:\n print(device)",
"def get_test_devices():\n\n # Assumption: CPU is always available\n devices = ['cpu']\n\n if torch.cuda.is_available():\n devices.append('cuda')\n\n return devices",
"def get_available_devices(self):\n available_devices = []\n try:\n out = self.get_output(\"devices\")\n except Exception as e:\n logger.error(e)\n else:\n for line in out:\n device = self.parse_device_info(line)\n if device:\n available_devices.append(device)\n return available_devices",
"def getDevices():\n devices = create_string_buffer(BUF_SIZE)\n daqmx(\n dll.DAQmxGetSysDevNames,\n (\n devices,\n BUF_SIZE\n )\n )\n return parseStringList(devices.value)",
"def list_devices(self):\n xml = str(self._server.listDevices())\n return self._parse_cabling_xml(xml)",
"def devices() -> typing.List[str]:\n devices = sounddevice.query_devices()\n return [device['name'] for device in devices if device['max_output_channels'] > 0]",
"def devices(self):\n return self._devices",
"def devices(self):\n return self._devices",
"def devices(self):\n return self._devices",
"def devices(self):\n return self._devices",
"def list_devices(cls):\n # get all matching devices\n return usb.core.find(\n find_all=True,\n custom_match=lambda dev: (\n dev.idVendor == cls.vendor_id and dev.idProduct in cls.product_ids\n ),\n )",
"def get_test_devices():\n devices = [\"cpu\"]\n if torch.cuda.is_available():\n devices.append(\"cuda\")\n return devices",
"def get_devices(self):\n return get_devices(self.api_key)",
"def devices(self):\n\n return self.__devices",
"def get_devices(self):\n devices = self.get(\"event/device\")",
"async def find_devices() -> List[DeviceInfo]:\n return await Discovery.search_devices()",
"def get_gpus():\n try:\n re = subprocess.check_output([\"nvidia-smi\", \"-L\"], universal_newlines=True)\n except OSError:\n return []\n return range(len([i for i in re.split('\\n') if 'GPU' in i]))",
"def get_tf_visible_gpus(verbose = False):\n local_device_protos = device_lib.list_local_devices()\n if verbose:\n [print(x.name) for x in local_device_protos if x.device_type == 'GPU']\n return [x.name for x in local_device_protos if x.device_type == 'GPU']",
"def list_devices(cls, filters={}):\n return cls.dbdriver.list_devices(filters)",
"def devices(self, **kwargs):\n return self._get(API.DEVICES.value, check_202=True, **kwargs)",
"def get_computation_devices(\n preferred_gpu_list: Optional[List[int]],\n multi_gpu_flag: bool,\n) -> List[Device]:\n\n # use CPU when GPUs are not preferred or not available\n if (preferred_gpu_list is None) \\\n or (len(preferred_gpu_list) == 0) \\\n or (not torch.cuda.is_available()):\n return [Device('cpu'), ]\n\n # else GPUs are preferred and available\n # get all available GPU indexes\n _available_gpu_list: List[int]\n if getAvailable:\n # by default, use GPU utility package with load and memory usage\n # specification so that the 'available' GPUs are actually ready\n # for deep learning runs (https://github.com/anderskm/gputil)\n _available_gpu_list = getAvailable(\n limit=_MAX_NUM_GPUS,\n maxLoad=_MAX_GPU_LOAD,\n maxMemory=_MAX_GPU_MEM_USED,\n )\n else:\n # assume all GPUs are good to use without GPUtil package\n _available_gpu_list = list(range(torch.cuda.device_count()))\n _warning_msg = \\\n f'GPUtil (https://github.com/anderskm/gputil) not installed.' \\\n f'Assuming all GPUs ({_available_gpu_list}) are available ' \\\n f'and ready for training ... '\n _LOGGER.warning(_warning_msg)\n\n # get the overlap between the preferred and the available GPUs\n _gpus = \\\n [_g for _g in _available_gpu_list if _g in preferred_gpu_list]\n\n # use CPU if there is no preferred GPUs that are available\n if len(_gpus) == 0:\n return [Device('cpu'), ]\n\n # otherwise return one or all GPUs depending on the multi-GPU flag\n return [Device(f'cuda:{_g}') for _g in _gpus] \\\n if multi_gpu_flag else [Device(f'cuda:{_gpus[0]}'), ]",
"def devices(self) -> list[Device]:\n return list(self._devices.values())",
"def detect_gpus():\n def worker(q):\n # `device_lib` will not release the memory it took,\n # so we run it in a sub-process.\n try:\n from tensorflow.python.client import device_lib\n\n if is_tensorflow_version_higher_or_equal('1.8.0'):\n config = tf.ConfigProto()\n config.gpu_options.allow_growth = True\n devices = list(device_lib.list_local_devices(config))\n else:\n devices = list(device_lib.list_local_devices())\n gpus = [\n (device.name, device)\n for device in devices\n if device.device_type == 'GPU'\n ]\n union_set = {i: i for i in range(len(gpus))}\n\n for i, (name, device) in enumerate(gpus):\n assert (device.name == '/device:GPU:{}'.format(i))\n for link in device.locality.links.link:\n if link.device_id != i:\n union_set[i] = union_set[link.device_id]\n\n for i in six.iterkeys(union_set):\n while union_set[i] != union_set[union_set[i]]:\n union_set[i] = union_set[union_set[i]]\n\n root_devices = sorted(set(union_set.values()))\n gpu_groups = [[] for _ in range(len(root_devices))]\n dev_to_group = {j: i for i, j in enumerate(root_devices)}\n for i, (name, device) in enumerate(gpus):\n gpu_groups[dev_to_group[union_set[i]]].append(name)\n\n q.put((1, gpu_groups))\n except Exception:\n q.put((0, traceback.format_exc()))\n\n q = mp.Queue()\n p = mp.Process(target=worker, args=(q,))\n\n try:\n p.start()\n result = q.get()\n if result[0] == 1:\n return result[1]\n else:\n raise RuntimeError(\n 'Failed to retrieve GPU information, the traceback of '\n 'sub-process is:\\n {}'.\n format('\\n '.join(result[1].split('\\n')))\n )\n finally:\n p.terminate()\n p.join()",
"def get_devices(mac=None):\n wemo_devices = discover_wemo()\n\n if mac:\n dev = get_device(mac, wemo_devices)\n if not dev:\n return []\n return [dev]\n\n return wemo_devices",
"def get_devices():\n devices, errors = [], []\n\n for path in hookenv.action_get('devices').split(' '):\n path = path.strip()\n if not os.path.isabs(path):\n errors.append('{}: Not absolute path.'.format(path))\n elif not os.path.exists(path):\n errors.append('{}: Device does not exist.'.format(path))\n else:\n devices.append(path)\n\n if errors:\n raise ZapDiskError(\", \".join(errors))\n\n return devices",
"def countGPUs(self):\n return libnao_gpu.CountDevices()",
"def get_discoverable_devices(self):\n available = self.get_available_devices()\n paired = self.get_paired_devices()\n return [d for d in available if d not in paired]",
"def get_available_gpus() -> List[int]:\n orig_visible_devices = os.environ[f\"{CUDA_ENVVAR}\"]\n available_gpus = [int(g.strip()) for g in orig_visible_devices.split(\",\") if g and not g.isspace()]\n return available_gpus",
"def get_devices(needs: int = None):\n\n num_gpus = torch.cuda.device_count()\n\n if num_gpus == 0:\n devices = [torch.device(\"cpu\")]\n if needs is None:\n return devices\n return devices * needs\n\n devices = [torch.device(f\"cuda:{index:d}\") for index in range(num_gpus)]\n if needs is None:\n return devices\n return [device for _, device in zip(range(needs), itertools.cycle(devices))]",
"async def async_get_devices(self) -> list[dict[str, Any]]:\n return await self.aiolivisi.async_get_devices()",
"def devices(self) -> api.Devices:\n return self._get_model(model=api.Devices)",
"def list_available_devices(self) -> List[LandscapeDevice]:\n self._ensure_activation()\n\n device_list = None\n\n self.landscape_lock.acquire()\n try:\n device_list = [dev for dev in self._device_pool.values()]\n finally:\n self.landscape_lock.release()\n\n return device_list",
"def FindAllAvailableDevices(options):\n use_ssh = options.cros_remote and cros_interface.HasSSH()\n if not use_ssh and not IsRunningOnCrOS():\n logging.debug('No --remote specified, and not running on ChromeOs.')\n return []\n\n return [CrOSDevice(options.cros_remote, options.cros_remote_ssh_port,\n options.cros_ssh_identity, not use_ssh)]",
"def usb_devices():\r\n ret_out = utils.run('lsusb').stdout.strip('\\n').replace(',', ' ')\r\n return ret_out",
"def get_connected_devices(self):\n all_devices = []\n if self.vid_list:\n for vid in self.vid_list:\n all_devices += UsbDriver.usb_list_devices(vid)\n\n if self.pid_ignore_list:\n return [device for device in all_devices\n if not (device.product_id in self.pid_ignore_list)]\n else:\n return all_devices",
"def devices(self):\n return self._sdk_dependencies.device_client",
"def get_discoverable_devices(self):\n available = self.get_available_devices()\n paired = self.get_paired_devices()\n\n return [d for d in available if d not in paired]",
"def do_list(self, _):\n devices = []\n for source in self._target.devices:\n devices.append({\n 'name': source.device['name'],\n 'path': source.device['path'],\n })\n return devices",
"def list_devices(arn=None, nextToken=None):\n pass",
"def CUDA_VISIBLE_DEVICES(self):\n return self._CUDA_VISIBLE_DEVICES",
"def list_local_devices():\n def _convert(pb_str):\n m = device_attributes_pb2.DeviceAttributes()\n m.ParseFromString(pb_str)\n return m\n return [_convert(s) for s in pywrap_tensorflow.DeviceFactory_AddDevices()]",
"def getDevices(self):\n\n devices = None\n\n for i in range(3):\n devices = subprocess.check_output(\"adb devices -l\", creationflags=self.createNoWindow)\n\n devices = devices.decode()\n deviceModel = re.findall(\"model:(.*) device\", devices)\n deviceID = re.findall(r\"(\\S+) {2}\", devices, flags=re.IGNORECASE)\n\n return deviceModel, deviceID",
"def get_available_devices(self):\n try:\n out = self.get_output(\"devices\")\n except BluetoothctlError, e:\n print(e)\n return None\n else:\n available_devices = []\n for line in out:\n device = self.parse_device_info(line)\n if device:\n available_devices.append(device)\n\n return available_devices",
"def get_devices(jwt: str) -> List:\n LOGGER.debug(\"Retrieving devices...\")\n\n args = {\n \"url\": \"{0}/device\".format(CONFIG['dojot']['url']),\n \"headers\": {\n \"Content-Type\": \"application/json\",\n \"Authorization\": \"Bearer {0}\".format(jwt),\n },\n }\n\n res = DojotAPI.call_api(requests.get, args)\n\n devices_ids = [device['id'] for device in res['devices']]\n\n LOGGER.debug(\"... retrieved the devices\")\n\n return devices_ids",
"def lv_devices(self):\n devs = set()\n return devs",
"def get_devices(self) -> List[LandscapeDevice]:\n device_list = None\n\n self.landscape_lock.acquire()\n try:\n device_list = [dev for dev in self._all_devices.values()]\n finally:\n self.landscape_lock.release()\n\n return device_list",
"def listDevices(self):\n count = 0\n for device in self:\n count += 1\n printLog(\"Device \" + str(count) + \": '%s %s (%s, %s, %s)'\" % (\n device.make, device.model, device.deviceId, device.androidVersion, device.operator))\n if device.idle:\n printLog(\"[Idle]\")\n else:\n printLog(\"[Busy]\")",
"def list_available_cameras():\n graph = FilterGraph()\n device_names = graph.get_input_devices()\n return device_names",
"def get_devices(adb=DEFAULT_ADB):\n # Check that adb is running\n Device.__start_adb(adb)\n # Split by newline and remove first line (\"List of devices attached\")\n # TODO: surround with try/except?\n devices = subprocess.check_output(\n [adb, \"devices\", \"-l\"]).decode().split('\\n')[1:]\n tmp = {}\n for dev in devices:\n if dev:\n tmp[dev.split()[0]] = dev\n return tmp",
"def _get_pci_devices(self):\n\n system = self._get_host_details()\n if ('links' in system['Oem']['Hp'] and\n 'PCIDevices' in system['Oem']['Hp']['links']):\n # Get the PCI URI and Settings\n pci_uri = system['Oem']['Hp']['links']['PCIDevices']['href']\n status, headers, pci_device_list = self._rest_get(pci_uri)\n\n if status >= 300:\n msg = self._get_extended_error(pci_device_list)\n raise exception.IloError(msg)\n\n return pci_device_list\n\n else:\n msg = ('links/PCIDevices section in ComputerSystem/Oem/Hp'\n ' does not exist')\n raise exception.IloCommandNotSupportedError(msg)",
"def get_devices():\n try:\n with open(DEVICES, 'r') as f:\n data = json.load(f)['devices']\n except (IOError, ValueError) as err:\n raise SwiftlmCheckFailure('Failure opening %s: %s' % (DEVICES, err))\n\n devices = []\n for d in data:\n l = d.get('label', LABEL_CHECK_DISABLED)\n devices.append(Device(\n device=d['name'],\n mount=MOUNT_PATH+d['swift_drive_name'],\n label=l\n ))\n\n return devices",
"def GetGPU():\n return option['device_id']",
"async def get_devices(self, params: Optional = None) -> dict:\r\n return await self.get_items(API_DEVICES, params=params)",
"def get_devices(self):\n\n \"\"\"\n # Note: This code is no longer required with the latest spt updates.\n # But that said, leaving for now so I don't risk breaking folks!\n if not self._use_lsscsi:\n message = \"Find Number of IOM's\"\n command = \"lsscsi | fgrep enclo | egrep 'HGST|WDC' | wc -l\"\n pdata = self._run_command(command=command, message=message, logger=self._logger, shell=True)\n ioms = (int)(pdata['stdout'].strip())\n if ioms > 1:\n self._use_lsscsi = True\n if not self._use_lsscsi and os.path.exists('/etc/multipath.conf'):\n self._use_lsscsi = True\n \"\"\"\n # Allow above logic or options to override lsscsi vs. spt usage.\n if not self._use_lsscsi or self._force_spt:\n self.get_devices_spt()\n else:\n self.get_devices_lsscsi()\n return",
"def ret_device_list():\n token = get_auth_token() # Get Token\n url = \"https://sandboxdnac.cisco.com/api/v1/network-device\"\n hdr = {'x-auth-token': token, 'content-type' : 'application/json'}\n resp = requests.get(url, headers=hdr) # Make the Get Request\n device_list = resp.json()\n return device_list",
"def get_devices(self):\n data = {\n \"device_id\": self.uuid,\n \"cmd\": \"get_account_units\",\n \"account_token\": self.api_token\n }\n headers = {\n \"Content-Type\": \"application/json\"\n }\n\n response = requests.post(\"{}/box_pin\".format(self.BASE_URL),\n data=json.dumps(data),\n headers=headers)\n response_json = response.json()\n if not response_json.get(\"success\"):\n raise ValueError(response_json.get(\"error_message\"))\n\n units_json = response_json.get(\"units\")\n devices = []\n for unit in units_json:\n device = Charger(unit, self)\n device.update_state()\n devices.append(device)\n\n return devices",
"def try_all_gpus():\n ctx_list = []\n try:\n for i in range(16):\n ctx = mx.gpu(i)\n _ = nd.array([0], ctx=ctx)\n ctx_list.append(ctx)\n except:\n pass\n if not ctx_list:\n ctx_list = [mx.cpu()]\n return ctx_list",
"def get_devices(self): \n devices = []\n \n # get all the keys from the dictionary\n keys = self.SCPI_Data.keys()\n \n # extract the device specifier\n dev_keys = [key.split(':')[0] for key in keys]\n \n # iterate through the devices\n for key in dev_keys:\n if (key not in devices) and (key != 'SUP'):\n # this is a unique device, add it to the list\n devices = devices + [key]\n # end if\n # end for\n \n devices = devices + ['SIM']\n \n # replace the GPS if present with its longer name\n devices = ['GPSRM' if device == 'GPS' else device \n for device in devices]\n return devices",
"def get_list_of_devices(self, give_json=False):\n\n url = Constants.BASE_URL + 'users/devices'\n response = requests.get(url=url, params={'key': self.user_access_token})\n\n if give_json:\n return response.json()\n else:\n return response.text",
"def gpu_metrics(self) -> List[ClaraGpuUtilization]:\r\n return self._gpu_metrics",
"def get_available_device():\n if torch.cuda.is_available():\n free_mem, device_idx = 0.0, 0\n for d in range(torch.cuda.device_count()):\n mem = torch.cuda.get_device_properties(d).total_memory - torch.cuda.memory_allocated(d)\n if mem > free_mem:\n device_idx = d\n free_mem = mem\n return torch.device(f'cuda:{device_idx}')\n else:\n return torch.device('cpu')",
"def enumerate_devices():\n devices = list(\n map(XInputJoystick, list(range(XInputJoystick.max_devices))))\n return [device for device in devices if device.is_connected()]",
"def list_devices(context, connstrings):\n return _nfc.list_devices(context, connstrings)",
"def get_devices():\n names = devices.list()\n if request.args.get('full') is not None:\n data = {d: devices.show(d) for d in names}\n else:\n data = names\n return jsonify({'devices': data})",
"def _get_device(self, n_gpu_use):\n n_gpu = torch.cuda.device_count()\n if n_gpu_use > 0 and n_gpu == 0:\n self.logger.warning(\"Warning: There\\'s no GPU available on this machine,\"\n \"training will be performed on CPU.\")\n n_gpu_use = 0\n if n_gpu_use > n_gpu:\n self.logger.warning(f\"Warning: The number of GPU\\'s configured to use is {n_gpu_use}, \"\n f\"but only {n_gpu} are available on this machine.\")\n n_gpu_use = n_gpu\n device = torch.device('cuda:0' if n_gpu_use > 0 else 'cpu')\n list_ids = list(range(n_gpu_use))\n self.logger.info(f'Using device: {device}, {list_ids}')\n return device, list_ids",
"def devices(self):\n return {k:v for k, v in self._data.items() \n if v[\"type\"] == \"DEVICE\"}",
"def get_physical_output_channels(self):\r\n bufsize = 1024\r\n buf = ctypes.create_string_buffer(bufsize)\r\n NIDAQ_dll.DAQmxGetDevAOPhysicalChans(self.dev_id.encode('ascii'),\r\n ctypes.byref(buf), bufsize)\r\n channel_list = buf_to_list(buf)\r\n channel_list = [channel.lstrip(self.dev_id+'/') for channel in channel_list]\r\n return channel_list",
"def get_devices():\n global managed_objects\n global devices_by_adr\n \n devices_by_adr = {}\n \n r = re.compile(\"\\/org\\/bluez\\/hci\\d*\\/dev\\_(.*)\")\n # e.g., match a string like this:\n # /org/bluez/hci0/dev_58_C9_35_2F_A1_EF\n \n for key, value in managed_objects.items():\n # print(\"key=\", key)\n m = r.match(key)\n if m is not None:\n dev_str = m.group(1) # we have a device string!\n # print(\"dev_str=\", dev_str)\n # let's flatten that dict a bit\n devices_by_adr[dev_str] = value[\"org.bluez.Device1\"]",
"def load_devices(self):\n response = self.oauth.get(url=f'{self.base_url}/json/devices/list')\n\n result = response.json()['device']\n return [(device['id'], device['name'], device['state']) for device in result]",
"def get_mbed_devices(self):\n upper_ven = [ven.upper() for ven in self.usb_vendor_list]\n mounts_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, 'SYSTEM\\MountedDevices')\n for point, label, _ in self.iter_vals(mounts_key):\n printable_label = label.decode('utf-16le', 'ignore')\n if ('DosDevices' in point and\n any(v in printable_label.upper() for v in upper_ven)):\n logger.debug(\"Found Mount point %s with usb ID %s\",point,\n printable_label)\n yield (point, printable_label)\n else:\n logger.debug(\"Skipping Mount point %r label %r\", point, label)",
"def get_device_ids(self) -> list[bluetooth.BluetoothUuid]:\n return [bluetooth.BluetoothUuid(i) for i in self.deviceIds()]",
"def get_device_list_by_path(self):\n by_path_dir = \"/dev/disk/by-path/\"\n disk_list = os.listdir(by_path_dir)\n usb_set = set()\n for device in disk_list:\n if device.find(\"usb\") != -1:\n path = os.readlink(by_path_dir + device)\n abs_path = os.path.abspath(by_path_dir + path)\n usb_set.add(abs_path)\n return usb_set"
] | [
"0.85350674",
"0.7933002",
"0.7830052",
"0.7785717",
"0.77419907",
"0.75910795",
"0.746411",
"0.74347156",
"0.74125063",
"0.74052244",
"0.73901397",
"0.7388185",
"0.7388185",
"0.7371611",
"0.73683524",
"0.7368252",
"0.73586226",
"0.7343914",
"0.73268294",
"0.7308077",
"0.729592",
"0.7274378",
"0.723062",
"0.72238904",
"0.72062576",
"0.72009957",
"0.71951383",
"0.71627414",
"0.71454716",
"0.70925",
"0.7071279",
"0.7054656",
"0.70527685",
"0.7047328",
"0.7039081",
"0.7039081",
"0.7039081",
"0.7039081",
"0.7034091",
"0.7023177",
"0.6966901",
"0.69652057",
"0.6959243",
"0.695624",
"0.6932646",
"0.6897789",
"0.6895697",
"0.68687475",
"0.6832492",
"0.6830991",
"0.6816106",
"0.6796099",
"0.6763211",
"0.67443925",
"0.67424333",
"0.6725712",
"0.6710883",
"0.67034847",
"0.6698673",
"0.66748327",
"0.6659197",
"0.6650141",
"0.66412586",
"0.6637959",
"0.66108686",
"0.6606735",
"0.6604727",
"0.65989375",
"0.65981805",
"0.6591853",
"0.6581246",
"0.65803903",
"0.6572981",
"0.6565795",
"0.65547186",
"0.6543571",
"0.65428585",
"0.65314496",
"0.6516764",
"0.650598",
"0.65024835",
"0.6502176",
"0.64968795",
"0.6492942",
"0.64874756",
"0.6479034",
"0.6472984",
"0.6461105",
"0.64554423",
"0.64445394",
"0.64383924",
"0.643535",
"0.64304554",
"0.6412386",
"0.6399227",
"0.63778543",
"0.63769376",
"0.63725644",
"0.6355859",
"0.6353395"
] | 0.7771476 | 4 |
Get the BIOS settings resource. | def _get_bios_settings_resource(self, data):
try:
bios_settings_uri = data['links']['Settings']['href']
except KeyError:
msg = ('BIOS Settings resource not found.')
raise exception.IloError(msg)
status, headers, bios_settings = self._rest_get(bios_settings_uri)
if status != 200:
msg = self._get_extended_error(bios_settings)
raise exception.IloError(msg)
return headers, bios_settings_uri, bios_settings | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_bios_setting(self, bios_property):\n headers, bios_uri, bios_settings = self._check_bios_resource([\n bios_property])\n return bios_settings[bios_property]",
"def get_current_bios_settings(self, only_allowed_settings=True):\n\n sushy_system = self._get_sushy_system()\n try:\n current_settings = sushy_system.bios.json\n except sushy.exceptions.SushyError as e:\n msg = (self._('The current BIOS Settings were not found. Error '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n\n attributes = current_settings.get(\"Attributes\")\n return attributes",
"def get_settings(self):\n return self.request({\n \"path\": \"/\" + UUID + \"/setting\"\n })",
"def _get_bios_boot_resource(self, data):\n try:\n boot_uri = data['links']['Boot']['href']\n except KeyError:\n msg = ('Boot resource not found.')\n raise exception.IloCommandNotSupportedError(msg)\n\n status, headers, boot_settings = self._rest_get(boot_uri)\n\n if status != 200:\n msg = self._get_extended_error(boot_settings)\n raise exception.IloError(msg)\n\n return boot_settings",
"def get_pending_bios_settings(self, only_allowed_settings=True):\n\n sushy_system = self._get_sushy_system()\n try:\n settings = sushy_system.bios.pending_attributes\n except sushy.exceptions.SushyError as e:\n msg = (self._('The pending BIOS Settings were not found. Error '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n\n return settings",
"def settings(self):\r\n return SettingResource(self)",
"def settings():\n return _get_settings()[1]",
"def get_settings(self):\n url = \"https://api.imgur.com/3/account/{0}/settings\".format(self.name)\n return self._imgur._send_request(url)",
"def get_bios_settings(bmc):\n bios_settings = bmc.list_bios_settings()\n # Convert the settings to something that is JSON-serialisable.\n settings = {}\n for param, value in bios_settings.items():\n setting = {}\n # Not all attributes exist on all settings, so allow them to be absent.\n attrs = {\n 'current_value',\n 'pending_value',\n 'possible_values',\n }\n for attr in attrs:\n if hasattr(value, attr):\n setting[attr] = getattr(value, attr)\n settings[param] = setting\n return settings",
"def get(isamAppliance, check_mode=False, force=False, ignore_error=False):\n return isamAppliance.invoke_get(\"Retrieving a list of firmware settings\",\n \"/firmware_settings\", ignore_error=ignore_error, requires_model=requires_model)",
"def settings(self):\r\n url = '{0}/userSettings'.format(self.get_url())\r\n return http.Request('GET', url), parsers.parse_json",
"def advanced_settings(self):\n settings = ADVANCEDSETTINGS()\n ckresult(_dll.FMOD_System_GetAdvancedSettings(self._ptr, byref(settings)))\n return settings",
"def get_settings(self):\n return self.settings",
"def _load_settings(self):\n self._dll.LS_LoadSettings(self._serial_number)\n return None",
"def get_settings():\n with open('config/config.json') as data_file:\n settings = json.load(data_file)\n return settings",
"def get_settings_resource(res_type, abbr, res_name):\n\t\n\tif zen_settings.has_key(res_type):\n\t\tresource = zen_settings[res_type];\n\t\tif (has_deep_key(resource, [res_name, abbr])):\n\t\t\treturn resource[res_name][abbr]\n\t\telif 'extends' in resource:\n\t#\t\tfind abbreviation in ancestors\n\t\t\tfor v in resource['extends']:\n\t\t\t\tif has_deep_key(zen_settings, [v, res_name, abbr]):\n\t\t\t\t\treturn zen_settings[v][res_name][abbr]\n\treturn None;",
"def get_settings(self):\n settings = self.client._perform_json(\n \"GET\", \"/projects/%s/apiservices/%s/settings\" % (self.project_key, self.service_id))\n\n return DSSAPIServiceSettings(self.client, self.project_key, self.service_id, settings)",
"def get_settings():\n return db.get_data()",
"def get_settings():\n df = Struct(\n template=DYNAMICFORMS_BOOTSTRAP,\n )\n df = df.clone(**getattr(s, \"DYNAMICFORMS\", {}))\n template = df.template\n if template == DYNAMICFORMS_BOOTSTRAP:\n return SettingsBootstrap(**df.__to_dict__())\n return SettingsJqueryUI(**df.__to_dict__())",
"def device_setting(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"device_setting\"), kwargs)",
"def getSerialPortSettings(cls):\n return cls.serial_settings",
"def get_settings():\n settings = {}\n for setting in cfg.displayable_setting:\n settings[setting] = getattr(cfg, setting)\n return settings",
"def get_settings():\n settings = {}\n for setting in cfg.displayable_setting:\n settings[setting] = getattr(cfg, setting)\n return settings",
"def get_settings(self):\n\n\t\t# TODO: Consider YAML. Human writable, machine readable.\n\t\twith open(self.filename) as fp:\n\t\t\ttry:\n\t\t\t\treturn json.load(fp)\n\t\t\texcept Exception, e:\n\t\t\t\tif self.DEBUG:\n\t\t\t\t\tprint >>sys.stderr, 'get_settings exception:', e\n\t\t\t\treturn {}",
"def get_settings():\n return SettingCollection.build()",
"def get(cls, client, name=\"\", option_=\"\") :\n\t\ttry :\n\t\t\tif not name :\n\t\t\t\tobj = appfwlearningsettings()\n\t\t\t\tresponse = obj.get_resources(client, option_)\n\t\t\telse :\n\t\t\t\tif type(name) is not list :\n\t\t\t\t\tif type(name) == cls :\n\t\t\t\t\t\traise Exception('Invalid parameter name:{0}'.format(type(name)))\n\t\t\t\t\tobj = appfwlearningsettings()\n\t\t\t\t\tobj.profilename = name\n\t\t\t\t\tresponse = obj.get_resource(client, option_)\n\t\t\t\telse :\n\t\t\t\t\tif name and len(name) > 0 :\n\t\t\t\t\t\tif type(name[0]) == cls :\n\t\t\t\t\t\t\traise Exception('Invalid parameter name:{0}'.format(type(name[0])))\n\t\t\t\t\t\tresponse = [appfwlearningsettings() for _ in range(len(name))]\n\t\t\t\t\t\tobj = [appfwlearningsettings() for _ in range(len(name))]\n\t\t\t\t\t\tfor i in range(len(name)) :\n\t\t\t\t\t\t\tobj[i] = appfwlearningsettings()\n\t\t\t\t\t\t\tobj[i].profilename = name[i]\n\t\t\t\t\t\t\tresponse[i] = obj[i].get_resource(client, option_)\n\t\t\treturn response\n\t\texcept Exception as e :\n\t\t\traise e",
"def find_settings():\n return Setting()",
"def GetFileCleanerSettings():\n obj = ndb.Key(FileCleanerSettings, FILE_CLEANER_SETTINGS_ID).get()\n return obj or DEFAULT_FILE_CLEANER_SETTINGS",
"def _get_bios_mappings_resource(self, data):\n try:\n map_uri = data['links']['Mappings']['href']\n except KeyError:\n msg = ('Mappings resource not found.')\n raise exception.IloCommandNotSupportedError(msg)\n\n status, headers, map_settings = self._rest_get(map_uri)\n if status != 200:\n msg = self._get_extended_error(map_settings)\n raise exception.IloError(msg)\n\n return map_settings",
"def getCurrentSetting(self):\n return {}",
"def get_settings():\n settings_path = os.path.join(get_config_home(), 'tcharmap', 'settings.yaml')\n try:\n return yaml.safe_load(open(settings_path))\n except FileNotFoundError:\n return {'auto_copy': False}",
"def get_setting(self, id):\n return __settings__.getSetting(id)",
"def current_settings(self):\n return {\n 'power_state': self.power_state,\n 'brightness': self.brightness,\n }",
"def get_raw(self):\n return self.settings",
"def get_raw(self):\n return self.settings",
"def get_raw(self):\n return self.settings",
"def get_raw(self):\n return self.settings",
"def __get_base_info_api(self):\r\n try:\r\n return Call_shelly_api(url=self.__api_address + \"/settings\")\r\n except ShellyException as err:\r\n _LOGGER.warning(err)",
"def settings(self):\r\n return settings.Settings(self)",
"def client_settings():\n return CLIENT_SETTINGS",
"def _get_system_hardware(self):\n return self._get_system_status()[\"hardware\"]",
"def getSettings(self):\n context = findContext(self.request)\n switcher = component.getMultiAdapter((context, self.request),\n name=u\"themeswitcher\")\n\n return switcher.getSettings(self._old_getSettings)",
"def get_skill_settings(self):\n return self.request({\n \"method\": \"GET\",\n \"path\": \"/\" + UUID + \"/skill/settings\",\n })",
"def load_settings(self):\n\n self.std = settings.settings",
"def settings_load(self):\n self.ui.spinBox_ATSP.setValue(self.default['ATSP'])\n\n if self.default['serialLabel'] == 'bt':\n self.ui.btRadio.setChecked(True)\n try:\n os.system(\"blueman-manager\")\n except:\n print \"Please install 'blueman' package\"\n elif self.default['serialLabel'] == 'usb':\n self.ui.usbRadio.setChecked(True)\n else:\n self.ui.devRadio.setChecked(True)\n\n if self.default['units'] == 'metric':\n self.ui.units_metric_radio.setChecked(True)\n else:\n self.ui.units_US_radio.setChecked(True)\n\n return",
"def settings(self) -> Optional[pulumi.Input['ConfigurationServiceSettingsArgs']]:\n return pulumi.get(self, \"settings\")",
"def fusion_api_get_global_settings(self, uri=None, api=None, headers=None, param=''):\n return self.settings.get(uri, api, headers, param)",
"def site_settings(self):\r\n return users.SiteSettings(self)",
"def getSettings(self):\n return self.cfg",
"def _get_iscsi_settings_resource(self, data):\n try:\n iscsi_settings_uri = data['links']['Settings']['href']\n except KeyError:\n msg = ('iscsi settings resource not found.')\n raise exception.IloCommandNotSupportedError(msg)\n\n status, headers, iscsi_settings = self._rest_get(iscsi_settings_uri)\n\n if status != 200:\n msg = self._get_extended_error(iscsi_settings)\n raise exception.IloError(msg)\n\n return headers, iscsi_settings_uri, iscsi_settings",
"def settings(self):\n if self._settings is not None:\n return self._settings\n\n settings = self.binaries['KeeAgent.settings'].content\n self._settings = objectify.fromstring(settings)\n return self._settings",
"def get_settings(f_obj):\n\n return {setting : getattr(f_obj, setting) for setting in get_obj_desc()['settings']}",
"def settings_gui(self) -> api.SettingsGui:\n return self._get_model(model=api.SettingsGui)",
"def get(cls):\n try:\n return cls.objects.get(id=1)\n except cls.DoesNotExist:\n default_settings = SiteSettings(id=1)\n default_settings.save()\n return default_settings",
"def get_resource_config(target=False, force=None):\n return get_stored_property(ctx, 'resource_config', target, force)",
"def __getSettingsFromStorage():\n return AccountSettings.getSettings(NEW_SETTINGS_COUNTER)",
"def settings(self):\n return self._settings",
"def settings(self):\n return self._settings",
"def settings_global(self) -> api.SettingsGlobal:\n return self._get_model(model=api.SettingsGlobal)",
"def get_system_value(name: str):\n return Config.objects.first().__dict__[name]",
"def fusion_api_get_lsg_default_settings(self, api=None, headers=None):\n return self.lsg.get(api=api, param='/defaultSettings', headers=headers)",
"def getResource(self):\n return self.__resource;",
"def getResource(self):\n\n return self.__resource;",
"def getPref(self):\n return col.BusDAO.FindByIndex(self.Scanbus)",
"def get_secure_boot_mode(self):\n system = self._get_host_details()\n\n if ('links' not in system['Oem']['Hp'] or\n 'SecureBoot' not in system['Oem']['Hp']['links']):\n msg = ('\"SecureBoot\" resource or feature is not supported'\n ' on this system')\n raise exception.IloCommandNotSupportedError(msg)\n\n secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']\n\n # get the Secure Boot object\n status, headers, secure_boot_settings = self._rest_get(secure_boot_uri)\n\n if status >= 300:\n msg = self._get_extended_error(secure_boot_settings)\n raise exception.IloError(msg)\n\n return secure_boot_settings['SecureBootCurrentState']",
"def get_account_settings():\n pass",
"def GetAWSSettings(self):\n # [GET] https://assets.falcon.crowdstrike.com/support/api/swagger.html#/cloud-connect-aws/GetAWSSettings\n FULL_URL = self.base_url+'/cloud-connect-aws/combined/settings/v1'\n HEADERS = self.headers\n result = self.Result()\n try:\n response = requests.request(\"GET\", FULL_URL, headers=HEADERS, verify=False)\n returned = result(response.status_code, response.headers, response.json())\n except Exception as e:\n returned = result(500, {}, str(e))\n \n return returned",
"def setting(self, setting):\r\n return SettingResource(self, setting)",
"def _get_conf(self):\n self.press_conf = self.sysconf['PressureRegulators']\n return self.press_conf['PressureRegulator%d' % self.id_]",
"def settings(self):\n from hubspot3.settings import SettingsClient\n\n return SettingsClient(**self.auth, **self.options)",
"def software_config(self) -> pulumi.Output['outputs.RuntimeSoftwareConfigResponse']:\n return pulumi.get(self, \"software_config\")",
"def GetSettingInformation(self):\n if self.cur_uid is None:\n return\n self._get_device_hours()",
"def reset_bios_to_default(self):\n # Check if the BIOS resource if exists.\n headers_bios, bios_uri, bios_settings = self._check_bios_resource()\n # Get the BaseConfig resource.\n try:\n base_config_uri = bios_settings['links']['BaseConfigs']['href']\n except KeyError:\n msg = (\"BaseConfigs resource not found. Couldn't apply the BIOS \"\n \"Settings.\")\n raise exception.IloCommandNotSupportedError(msg)\n\n # Check if BIOS resource supports patch, else get the settings\n if not self._operation_allowed(headers_bios, 'PATCH'):\n headers, bios_uri, _ = self._get_bios_settings_resource(\n bios_settings)\n self._validate_if_patch_supported(headers, bios_uri)\n\n status, headers, config = self._rest_get(base_config_uri)\n if status != 200:\n msg = self._get_extended_error(config)\n raise exception.IloError(msg)\n\n new_bios_settings = {}\n for cfg in config['BaseConfigs']:\n default_settings = cfg.get('default', None)\n if default_settings is not None:\n new_bios_settings = default_settings\n break\n else:\n msg = (\"Default Settings not found in 'BaseConfigs' resource.\")\n raise exception.IloCommandNotSupportedError(msg)\n request_headers = self._get_bios_hash_password(self.bios_password)\n status, headers, response = self._rest_patch(bios_uri, request_headers,\n new_bios_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def settings() -> Settings:\n return Settings()",
"def settings(environment=None):\n if not environment:\n environment = get_environment()\n loader = Loader()\n return loader.settings(environment)",
"def settings(self) -> BaseSettings:\n return self._context.settings",
"def settings(self) -> BaseSettings:\n return self._context.settings",
"def getResource(self):\n return self.serviceClass.app.resource()",
"def os_settings():\n for setting_name, env_name in (\n (\"debug\", \"BACPYPES_DEBUG\"),\n (\"color\", \"BACPYPES_COLOR\"),\n (\"debug_file\", \"BACPYPES_DEBUG_FILE\"),\n (\"max_bytes\", \"BACPYPES_MAX_BYTES\"),\n (\"backup_count\", \"BACPYPES_BACKUP_COUNT\"),\n (\"route_aware\", \"BACPYPES_ROUTE_AWARE\"),\n ):\n env_value = os.getenv(env_name, None)\n if env_value is not None:\n cur_value = settings[setting_name]\n\n if isinstance(cur_value, bool):\n env_value = env_value.lower()\n if env_value in (\"set\", \"true\"):\n env_value = True\n elif env_value in (\"reset\", \"false\"):\n env_value = False\n else:\n raise ValueError(\"setting: \" + setting_name)\n elif isinstance(cur_value, int):\n try:\n env_value = int(env_value)\n except:\n raise ValueError(\"setting: \" + setting_name)\n elif isinstance(cur_value, str):\n pass\n elif isinstance(cur_value, list):\n env_value = env_value.split()\n elif isinstance(cur_value, set):\n env_value = set(env_value.split())\n else:\n raise TypeError(\"setting type: \" + setting_name)\n settings[setting_name] = env_value",
"def user_settings(self):\n return self._user_settings",
"def get_settings(self):\n return AlgoliaUtils_request(self.client.headers, self.read_hosts, \"GET\", \"/1/indexes/%s/settings\" % self.url_index_name, self.client.timeout)",
"def get(self, resource, default=0):\n return getattr(self._resources, resource, default)",
"def read():\n return mac_slideshow.preferences.read(KEY)",
"def getDefaultSettings(self) -> ghidra.docking.settings.Settings:\n ...",
"def get_setting(self, setting):\n return self.do_rpc(\"get_setting\", key=key)",
"def user_account_settings(self) -> pulumi.Output[Optional['outputs.UserAccountSettingsResponse']]:\n return pulumi.get(self, \"user_account_settings\")",
"def read_settings():\n \n settings = OrdDic()\n settings.update(json.load(open(\"resources/files/settings.txt\", \"r\")))\n\n ## OLD WAY BELOW\n\n #r = open(\"resources/files/settings.txt\", \"r\", newline=\"\\n\")\n # for option in r.read().split('\\n'):\n # try:\n # #option = option.split('\\\\')\n # #settings.update({option[0]: option[1]})\n # # settings.update(json.loads(option))\n # except IndexError:\n # pass\n return settings",
"def _get_settings():\n # store_last_good=True tells config component to update the config file\n # in a cron job. Here we just read from the datastore.\n rev, cfg = config.get_self_config(\n SETTINGS_CFG_FILENAME, config_pb2.SettingsCfg, store_last_good=True)\n cfg = cfg or config_pb2.SettingsCfg()\n return rev, cfg",
"def findSettingsFile():\n settingsName = 'oct-fire-settings.json'\n userPath = os.path.expanduser('~')\n if os.path.exists(settingsName):\n return settingsName\n elif os.path.exists(os.path.join(userPath, settingsName)):\n return os.path.join(userPath, settingsName)\n elif os.path.exists(os.path.join(userPath, 'Desktop', settingsName)):\n return os.path.join(userPath, 'Desktop', settingsName)\n elif os.path.exists(os.path.join(userPath, 'Documents', settingsName)):\n return os.path.join(userPath, 'Documents', settingsName)\n elif os.path.exists(os.path.join(userPath, 'Downloads', settingsName)):\n return os.path.join(userPath, 'Downloads', settingsName)\n raise Exception('Could not locate settings file')",
"def loadPBSettings(self):\n # TODO // NEEDS to be IMPROVED and compatible with all softwares (Nuke and Houdini)\n logger.debug(\"Func: loadPBSettings\")\n\n # old Name getPBsettings\n\n # pbSettingsFile = os.path.normpath(os.path.join(self.project_Path, \"Playblasts\", \"PBsettings.json\"))\n\n if not os.path.isfile(self._pathsDict[\"pbSettingsFile\"]):\n # defaultSettings = {\"Resolution\": (1280, 720), ## done\n # \"Format\": 'avi', ## done\n # \"Codec\": 'IYUV codec', ## done\n # \"Percent\": 100, ## done\n # \"Quality\": 100, ## done\n # \"ShowFrameNumber\": True,\n # \"ShowSceneName\": False,\n # \"ShowCategory\": False,\n # \"ShowFrameRange\": True,\n # \"ShowFPS\": True,\n # \"PolygonOnly\": True, ## done\n # \"ShowGrid\": False, ## done\n # \"ClearSelection\": True, ## done\n # \"DisplayTextures\": True, ## done\n # \"WireOnShaded\": False,\n # \"UseDefaultMaterial\": False,\n # }\n defaultSettings = self._sceneManagerDefaults[\"defaultPreviewSettings\"]\n self._dumpJson(defaultSettings, self._pathsDict[\"pbSettingsFile\"])\n return defaultSettings\n else:\n pbSettings = self._loadJson(self._pathsDict[\"pbSettingsFile\"])\n if pbSettings == -2:\n return -2\n return pbSettings",
"def get_pending_boot_mode(self):\n headers, uri, bios_settings = self._check_bios_resource(['BootMode'])\n _, _, settings = self._get_bios_settings_resource(bios_settings)\n boot_mode = settings.get('BootMode')\n if boot_mode == 'LegacyBios':\n boot_mode = 'legacy'\n return boot_mode.upper()",
"def __getattribute__(self, name: str) -> Any:\n if name.isupper():\n try:\n return self._settings[name]\n except KeyError as exc:\n msg = f\"Requested setting {exc.args[0]} does not exist.\"\n raise SettingsError(msg) from exc\n return super().__getattribute__(name)",
"def get_settings(self):\n return DSSWorkspaceSettings(self, self.client._perform_json(\"GET\", \"/workspaces/%s\" % self.workspace_key))",
"def get_bootvar(self):\n module = 'bootimage/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n bootdefault = response.json()['bootimage']['oper']['hd-default']\n print(self.device + ' The device is set to boot from: ' + bootdefault + ' in the future')\n return bootdefault",
"def settings(self) -> Any:\n self.ensure_initialized()\n return SettingsItem(self._data, self, FragmentPath())",
"def get_resources():\r\n global __res\r\n if __res == None:\r\n __init_resources()\r\n return __res",
"def get_resources():\r\n global __res\r\n if __res == None:\r\n __init_resources()\r\n return __res",
"def get_resources():\r\n global __res\r\n if __res == None:\r\n __init_resources()\r\n return __res",
"def _getSettings(checks):\r\n parser = _RCESettingsParser()\r\n\r\n if PATH not in parser.read(PATH):\r\n raise NoValidSettings('Config file is missing.')\r\n\r\n try:\r\n return _Settings.load(parser, checks)\r\n except (Error, ValueError) as e:\r\n raise NoValidSettings(str(e))",
"def setting(setting_name):\n\n return getattr(settings, setting_name)"
] | [
"0.6687052",
"0.66053444",
"0.6305347",
"0.62781394",
"0.6151722",
"0.6145869",
"0.5832804",
"0.56546223",
"0.5652844",
"0.5579022",
"0.5577148",
"0.55607617",
"0.5531982",
"0.5527874",
"0.54689676",
"0.5428091",
"0.5396962",
"0.53963387",
"0.53295165",
"0.53211266",
"0.52970964",
"0.52945626",
"0.52945626",
"0.527815",
"0.5273763",
"0.5271563",
"0.5270882",
"0.5264808",
"0.52625394",
"0.5255814",
"0.52429557",
"0.52402353",
"0.5239961",
"0.52373445",
"0.52373445",
"0.52373445",
"0.52373445",
"0.5228038",
"0.5215738",
"0.51982856",
"0.5191413",
"0.518209",
"0.5179684",
"0.51777744",
"0.5165988",
"0.5156614",
"0.5152258",
"0.51474684",
"0.5147022",
"0.5142034",
"0.5122645",
"0.51194984",
"0.5114692",
"0.5114065",
"0.5109466",
"0.5103201",
"0.50955975",
"0.50955975",
"0.5081156",
"0.5070536",
"0.5066462",
"0.503609",
"0.49979568",
"0.4990167",
"0.49832258",
"0.49827504",
"0.4980367",
"0.49676615",
"0.49599364",
"0.4956985",
"0.49532676",
"0.49488193",
"0.49410668",
"0.4939998",
"0.49248475",
"0.4923734",
"0.4923734",
"0.4910178",
"0.490382",
"0.4894325",
"0.48894054",
"0.48793855",
"0.48775047",
"0.48761666",
"0.48657787",
"0.48492584",
"0.48481292",
"0.4846967",
"0.4838989",
"0.4837497",
"0.48363656",
"0.4835259",
"0.48326793",
"0.48268136",
"0.48149973",
"0.4814172",
"0.4814172",
"0.4814172",
"0.4804629",
"0.47889808"
] | 0.7173748 | 0 |
Check if the PATCH Operation is allowed on the resource. | def _validate_if_patch_supported(self, headers, uri):
if not self._operation_allowed(headers, 'PATCH'):
msg = ('PATCH Operation not supported on the resource '
'"%s"' % uri)
raise exception.IloError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_PATCH(self):\n if not self.url:\n return\n response = self.client.patch(self.url, {}, format='json')\n self.assertIn(response.status_code, [status.HTTP_405_METHOD_NOT_ALLOWED,\n status.HTTP_401_UNAUTHORIZED])",
"def test_partial_update_should_not_be_allowed(self):\n response = self.client.patch(self.get_url(), {})\n self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)",
"def test_client_can_do_patch_request(self):\n response = self.httpbin_4.test_requests_patch_method()\n self.assertEqual(response.request.method, 'PATCH')\n self.assertEqual(response.status_code, 200)",
"def test_update_should_not_be_allowed(self):\n response = self.client.put(self.get_url(), {})\n self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)",
"def _check_iscsi_rest_patch_allowed(self):\n\n headers, bios_uri, bios_settings = self._check_bios_resource()\n # Check if the bios resource exists.\n\n if('links' in bios_settings and 'iScsi' in bios_settings['links']):\n iscsi_uri = bios_settings['links']['iScsi']['href']\n status, headers, settings = self._rest_get(iscsi_uri)\n\n if status != 200:\n msg = self._get_extended_error(settings)\n raise exception.IloError(msg)\n\n if not self._operation_allowed(headers, 'PATCH'):\n headers, iscsi_uri, settings = (\n self._get_iscsi_settings_resource(settings))\n self._validate_if_patch_supported(headers, iscsi_uri)\n\n return iscsi_uri\n\n else:\n msg = ('\"links/iScsi\" section in bios'\n ' does not exist')\n raise exception.IloCommandNotSupportedError(msg)",
"def _check_patch_requirements(region_name,\n applied_patches=None,\n available_patches=None):\n\n api_token = None\n if applied_patches:\n patches_applied = patch_api.patch_is_applied(\n token=api_token,\n timeout=constants.PATCH_DEFAULT_TIMEOUT_IN_SECS,\n region_name=region_name,\n patches=applied_patches\n )\n if not patches_applied:\n raise wsme.exc.ClientSideError(_(\n \"The following patches must be applied before doing \"\n \"the kubernetes upgrade: %s\" % applied_patches))\n\n if available_patches:\n patches_available = patch_api.patch_is_available(\n token=api_token,\n timeout=constants.PATCH_DEFAULT_TIMEOUT_IN_SECS,\n region_name=region_name,\n patches=available_patches\n )\n if not patches_available:\n raise wsme.exc.ClientSideError(_(\n \"The following patches must be available before doing \"\n \"the kubernetes upgrade: %s\" %\n available_patches))",
"def can_be_modified(self):\n return self.state in {RequestState.pending, RequestState.accepted}",
"def has_update_permissions(self, obj):\n return True",
"def handle_patch(self, api, command):\n return self._make_request_from_command('PATCH', command)",
"def can_update_comments(self):\n # Implemented from template for\n # osid.resource.ResourceAdminSession.can_update_resources\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True",
"def test_unsupported_requests_fail(self):\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, 405)\n\n response = self.client.put(self.url)\n self.assertEqual(response.status_code, 405)\n\n response = self.client.patch(self.url)\n self.assertEqual(response.status_code, 405)",
"def test_patch_not_allowed(self, parse_args):\n parse_args.side_effect = [{\n _ATTEMPT.attempt_id: 'forbidden'\n }, {\n _ATTEMPT.run_id: 'forbidden'\n }]\n _, err = self.resource.patch(self.attempts[1][_ATTEMPT.attempt_id])\n self.assertEqual(403, err)",
"def _operation_allowed(self, headers_dict, operation):\n\n if 'allow' in headers_dict:\n if operation in headers_dict['allow']:\n return True\n return False",
"def is_catastrophic(self):\n if (self.request.method.upper() == 'PUT'\n and 'PLURAL_PUT' not in self.http_methods) \\\n or (self.request.method.upper() == 'DELETE'\n and 'PLURAL_DELETE' not in self.http_methods):\n return True\n return False",
"def test_method_not_allowed(self):\n resp = self.app.put(\n \"/customers\", \n json={\"not\": \"today\"}, \n content_type=\"application/json\"\n )\n self.assertEqual(resp.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)",
"def test_review_list_other_method_not_allowed(self):\n client = Client()\n client.login(username='TEST_USER_1',\n email='TEST_EMAIL_1', password='TEST_PW_1')\n response = client.put('/api/review/')\n\n self.assertEqual(response.status_code, 405)",
"def partial_update(self, request, pk=None):\n return Response({'http_method':'PATCH'})",
"def patch(resource, data, **kwargs):\n\tresp = requests.patch(\n\t\t_endpoint(resource, 'PATCH'),\n\t\tparams=_jsonify_dict_values(kwargs),\n\t\tdata=json.dumps(data),\n\t\theaders=PAYLOAD_HEADERS,\n\t\tverify=SERVER_CERT\n\t)\n\tresp.raise_for_status()\n\treturn resp.json()",
"def test_validate_patch(client):\n response = client.patch(\n '/user/1',\n data=json.dumps({\n 'name': 'Jeff Knupp',\n }),\n headers={'Content-Type': 'application/json'}\n )\n assert response.status_code == 400\n assert response.json['message'] == INVALID_ACTION_MESSAGE",
"def assertHttpMethodNotAllowed(self, resp):\r\n return self.assertEqual(resp.status_code, 405)",
"def partial_update(self, request, pk=None):\n\n return Response({'http_method': 'PATCH'})",
"def is_patched(self) -> bool:\n client = Client()\n # Get the relevant service from the cluster\n service = client.get(Service, name=self.service_name, namespace=self._namespace)\n # Construct a list of expected ports, should the patch be applied\n expected_ports = [(p.port, p.targetPort) for p in self.service.spec.ports]\n # Construct a list in the same manner, using the fetched service\n fetched_ports = [(p.port, p.targetPort) for p in service.spec.ports] # type: ignore[attr-defined] # noqa: E501\n return expected_ports == fetched_ports",
"def test_unsupported_request_methods(self):\n unsupported_methods = [\"POST\", \"PUT\", \"PATCH\", \"DELETE\"]\n for method_name in unsupported_methods:\n with self.subTest(method_name=method_name):\n request_method = getattr(self.client, method_name.lower())\n response = request_method(self.url)\n self.assertEqual(response.status_code, 405)",
"def partial_update(self, request, pk=None):\n\n return Response({'http_method':'PATCH'})",
"def patch(self , request , pk = None ):\r\n return Response({'method':'patch'})",
"def test_patch_request_by_non_owner(self):\n client = APIClient()\n client.credentials(HTTP_AUTHORIZATION=self.test_user2_token)\n response = client.post('/api/places/', self.restaurant_data, format='json')\n url = f\"/api/places/{response.data['id']}/\"\n\n client.credentials(HTTP_AUTHORIZATION=self.test_user1_token)\n response = client.patch(url, self.restaurant_data, format='json')\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def patch(self, resource, data, target=None, verb='patch', **kwargs):\n return self._modify_resource(resource, data, target, verb, **kwargs)",
"def is_update(self):\n return self.action in [\"update\", \"partial_update\"]",
"def check_method_allowed(cls, request):\r\n if not request.method in cls._meta.allowed_methods:\r\n raise HttpError(\r\n 'Method \\'%s\\' not allowed on this resource.' % request.method,\r\n status=status.HTTP_405_METHOD_NOT_ALLOWED)",
"def has_change_permissions_permission(self, request):\n return self.has_generic_permission(request, \"change_permissions\")",
"def patch(self, request, *args, **kwargs):\n verify_secure(request)\n return super().patch(request, args, kwargs)",
"def patch(self, request, *args, **kwargs):\n verify_secure(request)\n return super().patch(request, args, kwargs)",
"def _changeable_fields(self, request, obj):\n return not obj or not self.is_readonly(request, obj)",
"def test_patch_a_resource_that_does_not_exist():\n pass",
"def has_change_permission(self, request, obj=None):\n return False",
"def _can_do_updates(self):\n return True",
"def partial_update(self, request, pk=None):\n\n return Response({'http_method': 'PATCH'})",
"def test_allow(self) -> None:\n response = self.request(\"/\", method=\"HEAD\")\n self.assert_allowed(response, (\"GET\", \"POST\"))",
"def has_modify_permissions(self, request, obj, local_site=None, *args,\n **kwargs):\n return obj.is_mutable_by(request.user, local_site=local_site)",
"def can_update_relationships(self):\n # Implemented from template for\n # osid.resource.ResourceAdminSession.can_update_resources\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True",
"def test_partial_update(self):\n self.client.force_authenticate(user=self.admin)\n\n data = {\n 'retreat': reverse(\n 'retreat:retreat-detail', args=[self.retreat.id]\n ),\n 'user': reverse('user-detail', args=[self.user2.id]),\n }\n\n response = self.client.put(\n reverse(\n 'retreat:waitqueuenotification-detail',\n kwargs={'pk': 1},\n ),\n data,\n format='json',\n )\n\n self.assertEqual(\n response.status_code,\n status.HTTP_405_METHOD_NOT_ALLOWED\n )",
"def test_update_person_not_authenticated(self):\n\n data = {'first_name': 'Daenerys'}\n response = self.client.patch(self.url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def can_edit(self):\n return self._can_edit",
"def partial_update(self,request,pk= None):\n return Response({'http_method':'PATCH'})",
"def test_only_edit_perm(self):\n self.assertStatusCode(self.url, 403)",
"def can_update(self, dataset, updates_allowed=None):\n need_sources = dataset.sources is not None\n existing = self.get(dataset.id, include_sources=need_sources)\n if not existing:\n raise ValueError('Unknown dataset %s, cannot update – did you intend to add it?' % dataset.id)\n\n if dataset.product.name != existing.product.name:\n raise ValueError('Changing product is not supported. From %s to %s in %s' % (existing.product.name,\n dataset.product.name,\n dataset.id))\n\n # TODO: figure out (un)safe changes from metadata type?\n allowed = {\n # can always add more metadata\n tuple(): changes.allow_extension,\n }\n allowed.update(updates_allowed or {})\n\n doc_changes = get_doc_changes(existing.metadata_doc, jsonify_document(dataset.metadata_doc))\n good_changes, bad_changes = changes.classify_changes(doc_changes, allowed)\n\n return not bad_changes, good_changes, bad_changes",
"def supports_rescoring(self):\r\n return all('filesubmission' not in responder.allowed_inputfields for responder in self.responders.values())",
"def patch(self, request , pk=None):\n return Response({'message':'PATCH'})",
"def _assert_patched_settings(self, data, expected_response):\n response = self.patch_request(data)\n assert response.status_code == 204\n self._assert_current_settings(expected_response)",
"def partial_update(self, request, pk=None):\n is_stop_requested = request.data.get(\"is_stop_requested\", False)\n if is_stop_requested:\n run = self.get_object()\n\n if request.user != run.user and not admin_check(request.user):\n raise PermissionDenied\n run.request_stop(request.user)\n\n return self.patch_object(request, pk)",
"def has_object_update_permission(self, request):\n user = request.user\n if self == user:\n return True\n return user.is_superuser",
"def has_change_permission(self, request, instance=None):\n return False",
"def test_patch_a_resource_that_exists():\n pass",
"def should_validate(self):\n \n return self.request.method in self.validate_methods",
"def partial_update(self, request, pk=None): #partial update a specific object\n return Response({'http_method': 'PATCH'})",
"def partial_update(self,request,pk = None):\r\n\r\n return Response({'HTTP method':'PATCH'})",
"def test_update_checklists_delete_unsupported(self):\r\n update_url = self.get_url(100)\r\n response = self.client.delete(update_url)\r\n self.assertEqual(response.status_code, 405)",
"def patch(self, request, pk=None):\n return Response({'method': 'PATCH'})",
"def patch(self, request, pk=None):\n return Response({'method': 'PATCH'})",
"def patch(self, request, pk=None):\n return Response({'method': 'PATCH'})",
"def test_patch(self, patch):\n self.clean()\n error = self.apply_patch(patch)\n diff = self.run(['git', 'diff', 'origin/master'])\n self.clean()\n if error != '':\n return False, error\n if diff == '':\n # No error message is returned for empty diff. The patch might be\n # empty or has been exported.\n return False, ''\n return True, ''",
"def is_method_allowed(self):\n if self.request.method.upper() not in self.http_methods:\n raise exceptions.MethodNotAllowed(self.http_methods)\n return True",
"def test_reusableitem_unsupported_modification(self):\n\n self.client.force_authenticate(user=self.user_1)\n\n response = self.client.patch(get_reusable_item_1_url(self), {'change_request': 'Some text'}, format='json')\n\n updated_object = ReusableItem.objects.get(pk=self.reusableitem_1.id)\n\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)",
"def check_resource_mode(request):\n if request.method == \"GET\":\n edit_resource = request.session.get('resource-mode', None) == 'edit'\n if edit_resource:\n del request.session['resource-mode']\n else:\n if request.session.get('just_created', False):\n edit_resource = True\n else:\n edit_resource = request.GET.get('resource-mode', None) == 'edit'\n else:\n edit_resource = True\n\n return edit_resource",
"def can_update(self, user, **data):\n raise Return((True, set([])))",
"def patch(self, request, pk=None):\n\n return Response({'method': 'patch'})",
"def testUpdateAccessAllowed(self):\n for user in (self.contributor, self.delegate, self.owner, self.root):\n response = self.runPut(user, sequencer=self.hiseq2000.sodar_uuid, data=self.post_data)\n self.response_200(response)\n data = json.loads(response.content.decode(\"utf-8\"))\n self.assertEqual(data[\"vendor_id\"], self.post_data[\"vendor_id\"])",
"def patch(self,request,pk = None):\n return Response({'method': 'PATCH'})",
"def has_change_permission(self, request, obj=None):\n if obj is not None:\n return False\n return super().has_change_permission(request, obj)",
"def match_request(self, req):\n\n return req.method == 'POST' and req.path_info == '/bitbucketsync'",
"def permit_required(self):\n return \"permission\" in self.description.lower()",
"def assertHttpMethodNotAllowed(self, response):\r\n self.assertEqual(response.status_code, 405)",
"def is_Restart_allowed(self):\n handler = self.get_command_object(\"Restart\")\n return handler.check_allowed()",
"def check_response_valid_update(response: HTTPResponse) -> bool:\n return response.status_code == 200",
"def patch(self,request,pk=None):\n return Response({'method':'Patch'})",
"def has_change_permission(self, request, obj=None):\n opts = self.opts\n codename = get_permission_codename('change', opts)\n return any([\n request.user.has_perm(\"%s.%s\" % (opts.app_label, codename)),\n request.user.has_perm(\"%s.%s\" % (opts.app_label, codename), obj)])",
"def can_edit(self):\n return self.state not in (\n 'scanning', 'resulted', 'cancelled', 'aborted')",
"def can_update_order_items(self) -> bool:\n return self.is_created or self.is_pending",
"def test_cannot_change_usage(self):\n p = Permission.objects.get(name='Can change usage')\n self.user.user_permissions.add(p)\n self.client.login(username='testuser', password='q2w3E$R%')\n data = {'month': 2}\n response = self.client.patch(reverse('api_v1:usage-detail', kwargs={'pk': 1}),\n data=json.dumps(data),\n content_type='application/json',\n follow=True)\n self.assertEqual(response.status_code, 405)\n self.assertIn('not allowed', str(response.content))",
"def supports_operation(self, operation: str) -> bool:\n return True",
"def checkPatchValidity(val):\n\n tag_list = val.split('-')\n if len(tag_list) < 5:\n return False\n\n if tag_list[0] not in os.environ.get('environment'):\n return False\n\n if tag_list[1] not in os.environ.get('platform'):\n return False\n\n if tag_list[2] not in os.environ.get('role'):\n return False \n\n if tag_list[3] not in os.environ.get('urgency'):\n return False \n\n if tag_list[4] not in os.environ.get('order'):\n return False\n\n return True",
"def patch(self, request, pk=None):\n return Response({'message': 'patch'})",
"def test_costcenter_patch_permissions(self):\n costCenterPK = CostCenter.objects.get(name='c2c1').pk\n url = reverse('CostCenter-detail', kwargs={'pk': costCenterPK})\n data = {'name': 'testCostCenter'}\n response = self.client.patch(url, data, format='json')\n #This is 404 instead of 403 because there is no way to view a company\n #that you arent an employee of.\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)\n self.assertNotEqual(CostCenter.objects.get(pk=costCenterPK).name,\n 'testCostCenter')",
"def test_post_partial_update_unauthorized(self):\n url = reverse('post-detail', kwargs={'pk': self.post.id})\n title = 'Random New Title'\n response = self.client.patch(url, {'title': title}, format='json')\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def patch_resource(self, **kwargs):\n results = self.api.action.resource_patch(**kwargs)\n self.get_ckan_metadata(True)\n if 'upload' in kwargs:\n resource_id = results['id'] if 'id' in results else kwargs['id']\n self._import_resource_to_cache(kwargs['upload'], resource_id)\n return results",
"def is_valid_method(verb):\n if not isinstance(verb, six.string_types):\n return False\n\n # Note: RAML also define the OPTIONS verbs\n return verb.upper() in [\"GET\", \"POST\", \"PUT\", \"DELETE\", \"HEAD\", \"PATCH\"]",
"def _check_is_editable(self, raise_error: bool = True) -> bool:\n try:\n # static analysis: ignore[incompatible_call]\n self._object.mod.update() # type: ignore[call-arg]\n except prawcore.exceptions.Forbidden as error:\n if not raise_error:\n return False\n raise submanager.exceptions.NotAModError(\n self.config,\n message_pre=(\n f\"Account {self.config.context.account!r} must \"\n \"be a moderator to update widgets\"\n ),\n message_post=error,\n ) from error\n\n return True",
"def request_is_valid(request):\n return 'method' in request",
"def is_modify_required(self, obj_fs, cap_unit):\n try:\n to_update = {}\n obj_fs = obj_fs.update()\n description = self.module.params['description']\n\n if description is not None and description != obj_fs.description:\n to_update.update({'description': description})\n\n size = self.module.params['size']\n if size and cap_unit:\n size_byte = int(utils.get_size_bytes(size, cap_unit))\n if size_byte < obj_fs.size_total:\n self.module.fail_json(msg=\"Filesystem size can be \"\n \"expanded only\")\n elif size_byte > obj_fs.size_total:\n to_update.update({'size': size_byte})\n\n tiering_policy = self.module.params['tiering_policy']\n if tiering_policy and self.get_tiering_policy_enum(\n tiering_policy) != obj_fs.tiering_policy:\n to_update.update({'tiering_policy':\n self.get_tiering_policy_enum(\n tiering_policy)})\n\n is_thin = self.module.params['is_thin']\n if is_thin is not None and is_thin != obj_fs.is_thin_enabled:\n to_update.update({'is_thin': is_thin})\n\n data_reduction = self.module.params['data_reduction']\n if data_reduction is not None and \\\n data_reduction != obj_fs.is_data_reduction_enabled:\n to_update.update({'is_compression': data_reduction})\n\n access_policy = self.module.params['access_policy']\n if access_policy and self.get_access_policy_enum(\n access_policy) != obj_fs.access_policy:\n to_update.update({'access_policy':\n self.get_access_policy_enum(access_policy)})\n\n locking_policy = self.module.params['locking_policy']\n if locking_policy and self.get_locking_policy_enum(\n locking_policy) != obj_fs.locking_policy:\n to_update.update({'locking_policy':\n self.get_locking_policy_enum(\n locking_policy)})\n\n snap_sch = obj_fs.storage_resource.snap_schedule\n\n if self.snap_sch_id is not None:\n if self.snap_sch_id == \"\":\n if snap_sch and snap_sch.id != self.snap_sch_id:\n to_update.update({'is_snap_schedule_paused': False})\n elif snap_sch is None or snap_sch.id != self.snap_sch_id:\n to_update.update({'snap_sch_id': self.snap_sch_id})\n\n smb_properties = self.module.params['smb_properties']\n if smb_properties:\n sync_writes_enabled = \\\n smb_properties['is_smb_sync_writes_enabled']\n oplocks_enabled = \\\n smb_properties['is_smb_op_locks_enabled']\n notify_on_write = \\\n smb_properties['is_smb_notify_on_write_enabled']\n notify_on_access = \\\n smb_properties['is_smb_notify_on_access_enabled']\n notify_on_change_dir_depth = \\\n smb_properties['smb_notify_on_change_dir_depth']\n\n if sync_writes_enabled is not None and \\\n sync_writes_enabled != obj_fs.is_cifs_sync_writes_enabled:\n to_update.update(\n {'is_cifs_sync_writes_enabled': sync_writes_enabled})\n\n if oplocks_enabled is not None and \\\n oplocks_enabled != obj_fs.is_cifs_op_locks_enabled:\n to_update.update(\n {'is_cifs_op_locks_enabled': oplocks_enabled})\n\n if notify_on_write is not None and \\\n notify_on_write != \\\n obj_fs.is_cifs_notify_on_write_enabled:\n to_update.update(\n {'is_cifs_notify_on_write_enabled': notify_on_write})\n\n if notify_on_access is not None and \\\n notify_on_access != \\\n obj_fs.is_cifs_notify_on_access_enabled:\n to_update.update(\n {'is_cifs_notify_on_access_enabled':\n notify_on_access})\n\n if notify_on_change_dir_depth is not None and \\\n notify_on_change_dir_depth != \\\n obj_fs.cifs_notify_on_change_dir_depth:\n to_update.update(\n {'cifs_notify_on_change_dir_depth':\n notify_on_change_dir_depth})\n if len(to_update) > 0:\n return to_update\n else:\n return None\n\n except Exception as e:\n errormsg = \"Failed to determine if FileSystem id: {0}\" \\\n \" modification required with error {1}\".format(obj_fs.id,\n str(e))\n LOG.error(errormsg)\n self.module.fail_json(msg=errormsg)",
"def test_edit_object_with_require_auth_false(self):\n self.test_object.require_auth = False\n self.test_object.save()\n response = self.client.put(\n f\"/permissiontest/{self.test_object.id}/\", self.test_update_object\n )\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)",
"def _is_valid_update_operation(session, row):\n # Check if there are older updates in the queue\n if db.check_for_older_ops(session, row):\n return False\n\n # Check for a pending or processing create operation on this uuid\n if db.check_for_pending_or_processing_ops(\n session, row.object_uuid, operation=odl_const.ODL_CREATE):\n return False\n return True",
"def test_user_update_procedure_failure(self):\n p1 = models.Procedure.objects.create(\n name='temp',\n overview='bla bla bla'\n )\n p1.speciality.set([self.speciality.pk])\n p1.save()\n\n res = self.client.get(PROCEDURE_URL)\n\n url = get_item_url(res.data[0]['id'])\n new_payload = {\n 'other_details': 'new details'\n }\n\n response = self.client.patch(url, new_payload, format='json')\n\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def has_object_permission(self, request, view, obj):\n if request.method in permissions.SAFE_METHODS:\n return True\n \n \"\"\"Check if the user has the permission to edit their profile. If True it will allow PUT, PATCH & DELETE operations\"\"\"\n return obj.id == request.user.id # returns True or False",
"def edit_config_verify(self,\n raw_response: Any,\n *args,\n **kwargs) -> bool:\n pass",
"def has_object_permission(self, request, view, obj):\n if request.method == \"GET\":\n return self.model_admin_config.has_view_permission(self, request, obj=obj)\n if request.method == \"PUT\":\n return self.model_admin_config.has_change_permission(self, request, obj=obj)\n if request.method == \"DELETE\":\n return self.model_admin_config.has_delete_permission(self, request, obj=obj)",
"def edit_allowed(self):\n account = Account.current_user_account\n if account is None:\n return False\n return self.user_can_edit(account.user)",
"def patch(self, controller_fs_uuid, patch):\n raise exception.OperationNotPermitted",
"def validate_complaint_document(self, operation):\n if operation == \"update\" and self.request.authenticated_role != self.context.author:\n self.request.errors.add(\"url\", \"role\", \"Can update document only author\")\n self.request.errors.status = 403\n raise error_handler(self.request.errors)\n if self.request.validated[\"tender_status\"] not in [\"active.qualification\", \"active.awarded\"]:\n raise_operation_error(\n self.request,\n \"Can't {} document in current ({}) tender status\".format(\n operation, self.request.validated[\"tender_status\"]\n ),\n )\n if any(\n [\n i.status != \"active\"\n for i in self.request.validated[\"tender\"].lots\n if i.id == self.request.validated[\"award\"].lotID\n ]\n ):\n raise_operation_error(self.request, \"Can {} document only in active lot status\".format(operation))\n if self.request.validated[\"complaint\"].status not in STATUS4ROLE.get(self.request.authenticated_role, []):\n raise_operation_error(\n self.request,\n \"Can't {} document in current ({}) complaint status\".format(\n operation, self.request.validated[\"complaint\"].status\n ),\n )\n return True",
"def test_friend_list_other_method_not_allowed(self):\n client = Client()\n user3_id = Profile.objects.get(nickname='user3').id\n client.login(username='TEST_USER_1',\n email='TEST_EMAIL_1', password='TEST_PW_1')\n response = client.put('/api/friend/'+str(user3_id)+'/review/')\n self.assertEqual(response.status_code, 405)",
"def is_put_or_post(split_request_header: list) -> bool:\n if split_request_header[0] == \"PUT\":\n return True\n elif split_request_header[0] == \"POST\":\n return True\n\n return False"
] | [
"0.69640553",
"0.6944961",
"0.6830189",
"0.6657633",
"0.65127224",
"0.63872164",
"0.6329733",
"0.62977004",
"0.61410993",
"0.6072097",
"0.5949872",
"0.5900068",
"0.58420664",
"0.58221006",
"0.5762764",
"0.57505614",
"0.5717169",
"0.571581",
"0.56980234",
"0.5684516",
"0.56841666",
"0.56760865",
"0.5669029",
"0.5661743",
"0.56611824",
"0.5661169",
"0.5660552",
"0.5647885",
"0.56432146",
"0.56142247",
"0.5611893",
"0.5611893",
"0.5601765",
"0.5597153",
"0.55966353",
"0.55919176",
"0.5582093",
"0.5568883",
"0.556424",
"0.55640316",
"0.5560173",
"0.5551014",
"0.55370826",
"0.5535696",
"0.5527537",
"0.55163133",
"0.5515401",
"0.55133903",
"0.551205",
"0.5508586",
"0.5507197",
"0.5482736",
"0.54816455",
"0.5470968",
"0.54526967",
"0.5446502",
"0.54425454",
"0.5436139",
"0.5436139",
"0.5436139",
"0.5419744",
"0.541648",
"0.5407993",
"0.5402721",
"0.53995216",
"0.53930575",
"0.5392962",
"0.5381364",
"0.5380479",
"0.53753126",
"0.5362264",
"0.53558147",
"0.5352919",
"0.53439456",
"0.53439105",
"0.5340612",
"0.5339051",
"0.5325509",
"0.5323583",
"0.53170085",
"0.5313721",
"0.53134334",
"0.53128266",
"0.53110665",
"0.5309164",
"0.53077036",
"0.5293994",
"0.52911615",
"0.52901006",
"0.52846444",
"0.5283152",
"0.52829164",
"0.5282811",
"0.5277354",
"0.52757514",
"0.5274292",
"0.526835",
"0.52564377",
"0.5248086",
"0.5241208"
] | 0.803524 | 0 |
Retrieves bios settings of the server. | def _get_bios_setting(self, bios_property):
headers, bios_uri, bios_settings = self._check_bios_resource([
bios_property])
return bios_settings[bios_property] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_bios_settings_resource(self, data):\n try:\n bios_settings_uri = data['links']['Settings']['href']\n except KeyError:\n msg = ('BIOS Settings resource not found.')\n raise exception.IloError(msg)\n\n status, headers, bios_settings = self._rest_get(bios_settings_uri)\n if status != 200:\n msg = self._get_extended_error(bios_settings)\n raise exception.IloError(msg)\n\n return headers, bios_settings_uri, bios_settings",
"def get_current_bios_settings(self, only_allowed_settings=True):\n\n sushy_system = self._get_sushy_system()\n try:\n current_settings = sushy_system.bios.json\n except sushy.exceptions.SushyError as e:\n msg = (self._('The current BIOS Settings were not found. Error '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n\n attributes = current_settings.get(\"Attributes\")\n return attributes",
"def get_bios_settings(bmc):\n bios_settings = bmc.list_bios_settings()\n # Convert the settings to something that is JSON-serialisable.\n settings = {}\n for param, value in bios_settings.items():\n setting = {}\n # Not all attributes exist on all settings, so allow them to be absent.\n attrs = {\n 'current_value',\n 'pending_value',\n 'possible_values',\n }\n for attr in attrs:\n if hasattr(value, attr):\n setting[attr] = getattr(value, attr)\n settings[param] = setting\n return settings",
"def get_settings(self):\n return self.request({\n \"path\": \"/\" + UUID + \"/setting\"\n })",
"def _get_bios_boot_resource(self, data):\n try:\n boot_uri = data['links']['Boot']['href']\n except KeyError:\n msg = ('Boot resource not found.')\n raise exception.IloCommandNotSupportedError(msg)\n\n status, headers, boot_settings = self._rest_get(boot_uri)\n\n if status != 200:\n msg = self._get_extended_error(boot_settings)\n raise exception.IloError(msg)\n\n return boot_settings",
"def get_pending_bios_settings(self, only_allowed_settings=True):\n\n sushy_system = self._get_sushy_system()\n try:\n settings = sushy_system.bios.pending_attributes\n except sushy.exceptions.SushyError as e:\n msg = (self._('The pending BIOS Settings were not found. Error '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n\n return settings",
"def fusion_api_get_server_hardware_bios(self, uri, api=None, headers=None):\n return self.sh.get(uri=uri, api=api, headers=headers, param='/bios')",
"def get_settings(self):\n url = \"https://api.imgur.com/3/account/{0}/settings\".format(self.name)\n return self._imgur._send_request(url)",
"def get_common_settings(self):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/Settings/\"))",
"def get_nic_settings(bmc):\n nic_settings = bmc.list_nics()\n return nic_settings",
"def settings(self):\r\n url = '{0}/userSettings'.format(self.get_url())\r\n return http.Request('GET', url), parsers.parse_json",
"async def test_get_settings(spawn_client):\n client = await spawn_client(authorize=True)\n\n resp = await client.get(\"/account/settings\")\n\n assert resp.status == 200\n\n assert await resp.json() == {\n \"skip_quick_analyze_dialog\": True,\n \"show_ids\": True,\n \"show_versions\": True,\n \"quick_analyze_workflow\": \"pathoscope_bowtie\",\n }",
"def set_bios_bootmode_uefi(ip, login_account, login_password, system_id):\n result = {}\n login_host = \"https://\" + ip\n try:\n # Connect using the BMC address, account name, and password\n # Create a REDFISH object\n REDFISH_OBJ = redfish.redfish_client(base_url=login_host, username=login_account, timeout=utils.g_timeout,\n password=login_password, default_prefix='/redfish/v1', cafile=utils.g_CAFILE)\n # Login into the server and create a session\n REDFISH_OBJ.login(auth=utils.g_AUTH)\n except:\n traceback.print_exc()\n result = {'ret': False, 'msg': \"Please check the username, password, IP is correct\"}\n return result\n\n # GET the ComputerSystem resource\n system = utils.get_system_url(\"/redfish/v1\", system_id, REDFISH_OBJ)\n if not system:\n result = {'ret': False, 'msg': \"This system id is not exist or system member is None\"}\n REDFISH_OBJ.logout()\n return result\n for i in range(len(system)):\n system_url = system[i]\n response_system_url = REDFISH_OBJ.get(system_url, None)\n if response_system_url.status != 200:\n error_message = utils.get_extended_error(response_system_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (system_url, response_system_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else:\n # Get the bios resource\n bios_url = response_system_url.dict['Bios']['@odata.id']\n response_bios_url = REDFISH_OBJ.get(bios_url, None)\n if response_bios_url.status != 200:\n error_message = utils.get_extended_error(response_bios_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (bios_url, response_bios_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else: # Get bios success\n # Seek boot mode from bios attributes\n attribute_bootmode = None\n attributes = response_bios_url.dict['Attributes']\n for attribute in attributes:\n if attribute == \"BootMode\" or attribute == \"SystemBootMode\":\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"SystemBootMode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"Boot\" in attribute and \"Mode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n result = {'ret': False, 'msg': \"Can not found BootMode attribute in response of url %s\" %(bios_url)}\n REDFISH_OBJ.logout()\n return result\n\n # Get boot mode setting guide from bios registry\n WarningText = None\n ValueName = None\n bios_registry_url = \"/redfish/v1/Registries/\" + response_bios_url.dict['AttributeRegistry']\n response_bios_registry_url = REDFISH_OBJ.get(bios_registry_url, None)\n if response_bios_registry_url.status == 200:\n locations = response_bios_registry_url.dict['Location']\n bios_regjson_url = None\n for location in locations:\n if 'en' in location['Language']:\n bios_regjson_url = location['Uri']\n break\n if bios_regjson_url:\n response_bios_regjson_url = REDFISH_OBJ.get(bios_regjson_url, None)\n if response_bios_regjson_url.status == 200:\n regattributes = response_bios_regjson_url.dict['RegistryEntries']['Attributes']\n for regattribute in regattributes:\n if regattribute['AttributeName'] == attribute_bootmode:\n if 'WarningText' in regattribute:\n WarningText = regattribute['WarningText']\n for value in regattribute['Value']:\n if 'legacy' in value['ValueName'].lower():\n continue\n if 'uefi' in value['ValueName'].lower():\n ValueName = value['ValueName']\n break\n ValueName = value['ValueName']\n break\n \n # Perform patch to set\n if ValueName == None:\n ValueName = \"UEFIMode\"\n pending_url = response_bios_url.dict['@Redfish.Settings']['SettingsObject']['@odata.id']\n parameter = {attribute_bootmode: ValueName}\n attribute = {\"Attributes\": parameter}\n headers = {\"If-Match\": '*'}\n response_pending_url = REDFISH_OBJ.patch(pending_url, body=attribute, headers=headers)\n if response_pending_url.status in [200,204]:\n if WarningText:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful. WarningText: %s'% (WarningText) }\n else:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful'}\n elif response_pending_url.status == 405:\n result = {'ret': False, 'msg': \"Resource not supported\"}\n else:\n error_message = utils.get_extended_error(response_pending_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (\n pending_url, response_pending_url.status, error_message)}\n\n # Logout of the current session\n try:\n REDFISH_OBJ.logout()\n except:\n pass\n return result",
"def get():\n\n mba_ctrl_info = caps.mba_ctrl_info()\n\n res = {\n 'supported': mba_ctrl_info['supported'],\n 'enabled': mba_ctrl_info['enabled']\n }\n return res, 200",
"def _get_bios_mappings_resource(self, data):\n try:\n map_uri = data['links']['Mappings']['href']\n except KeyError:\n msg = ('Mappings resource not found.')\n raise exception.IloCommandNotSupportedError(msg)\n\n status, headers, map_settings = self._rest_get(map_uri)\n if status != 200:\n msg = self._get_extended_error(map_settings)\n raise exception.IloError(msg)\n\n return map_settings",
"async def get_system_info(self) -> Dict[str, Any]:\n assert self._client is not None\n return await self._client.invoke_method(\"system.info\")",
"def get_settings():\n settings = {}\n for setting in cfg.displayable_setting:\n settings[setting] = getattr(cfg, setting)\n return settings",
"def get_settings():\n settings = {}\n for setting in cfg.displayable_setting:\n settings[setting] = getattr(cfg, setting)\n return settings",
"def ex_get_hypervisor_sysinfo(self):\n xml = self.connection.getSysinfo()\n etree = ET.XML(xml)\n\n attributes = [\"bios\", \"system\", \"processor\", \"memory_device\"]\n\n sysinfo = {}\n for attribute in attributes:\n element = etree.find(attribute)\n entries = self._get_entries(element=element)\n sysinfo[attribute] = entries\n\n return sysinfo",
"def _check_bios_resource(self, properties=[]):\n\n system = self._get_host_details()\n if ('links' in system['Oem']['Hp'] and\n 'BIOS' in system['Oem']['Hp']['links']):\n # Get the BIOS URI and Settings\n bios_uri = system['Oem']['Hp']['links']['BIOS']['href']\n status, headers, bios_settings = self._rest_get(bios_uri)\n\n if status >= 300:\n msg = self._get_extended_error(bios_settings)\n raise exception.IloError(msg)\n\n # If property is not None, check if the bios_property is supported\n for property in properties:\n if property not in bios_settings:\n # not supported on this platform\n msg = ('BIOS Property \"' + property + '\" is not'\n ' supported on this system.')\n raise exception.IloCommandNotSupportedError(msg)\n\n return headers, bios_uri, bios_settings\n\n else:\n msg = ('\"links/BIOS\" section in ComputerSystem/Oem/Hp'\n ' does not exist')\n raise exception.IloCommandNotSupportedError(msg)",
"def get_srv_config(name):\n cmd = \"ceph --admin-daemon %s/%s.asok config show\" % \\\n (CEPH_SOCKET_PATH, name)\n out = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, \\\n stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n return json.loads(out.stdout.read())",
"def cmd_account_settings(client, args):\n account_settings = client.get_account_settings(args.username)\n data = account_settings.__dict__\n generate_output({'account_settings': data})",
"def get_settings():\n return db.get_data()",
"def retrieveGuildsInfo(self):\n serverInfo = self.con.getGuildsInfo()\n\n for server in serverInfo:\n serverData = server.split(', ')\n self.serverSettings[serverData[0]] = serverData[1]",
"def get_config(self, retrieve=\"all\", full=False, sanitized=False):\n\n command = \"/export verbose\" if full else \"/export\"\n\n running_config = self._send_command(command)\n running_config = re.sub(r'^#.*$', \"\", running_config, flags=re.M)\n\n return {\n \"startup\": \"\",\n \"running\": running_config.strip(),\n \"candidate\": \"\"\n }",
"async def economyset_showsettings(self, ctx: commands.Context):\r\n guild = ctx.guild\r\n if await bank.is_global():\r\n conf = self.config\r\n else:\r\n conf = self.config.guild(guild)\r\n await ctx.send(\r\n box(\r\n _(\r\n \"----Economy Settings---\\n\"\r\n \"Minimum slot bid: {slot_min}\\n\"\r\n \"Maximum slot bid: {slot_max}\\n\"\r\n \"Slot cooldown: {slot_time}\\n\"\r\n \"Payday amount: {payday_amount}\\n\"\r\n \"Payday cooldown: {payday_time}\\n\"\r\n \"Amount given at account registration: {register_amount}\\n\"\r\n \"Maximum allowed balance: {maximum_bal}\"\r\n ).format(\r\n slot_min=humanize_number(await conf.SLOT_MIN()),\r\n slot_max=humanize_number(await conf.SLOT_MAX()),\r\n slot_time=humanize_number(await conf.SLOT_TIME()),\r\n payday_time=humanize_number(await conf.PAYDAY_TIME()),\r\n payday_amount=humanize_number(await conf.PAYDAY_CREDITS()),\r\n register_amount=humanize_number(await bank.get_default_balance(guild)),\r\n maximum_bal=humanize_number(await bank.get_max_balance(guild)),\r\n )\r\n )\r\n )",
"def __get_base_info_api(self):\r\n try:\r\n return Call_shelly_api(url=self.__api_address + \"/settings\")\r\n except ShellyException as err:\r\n _LOGGER.warning(err)",
"def get_config(site='self'):\n path='/sites/%s/configuration' % (site)\n return _api_request('GET', path)",
"def GetAWSSettings(self):\n # [GET] https://assets.falcon.crowdstrike.com/support/api/swagger.html#/cloud-connect-aws/GetAWSSettings\n FULL_URL = self.base_url+'/cloud-connect-aws/combined/settings/v1'\n HEADERS = self.headers\n result = self.Result()\n try:\n response = requests.request(\"GET\", FULL_URL, headers=HEADERS, verify=False)\n returned = result(response.status_code, response.headers, response.json())\n except Exception as e:\n returned = result(500, {}, str(e))\n \n return returned",
"def get():\n\n mba_info = caps.mba_info()\n\n res = {\n 'clos_num': mba_info['clos_num'],\n 'mba_enabled': mba_info['enabled'],\n 'mba_bw_enabled': mba_info['ctrl_enabled']\n }\n return res, 200",
"def get(isamAppliance, check_mode=False, force=False, ignore_error=False):\n return isamAppliance.invoke_get(\"Retrieving a list of firmware settings\",\n \"/firmware_settings\", ignore_error=ignore_error, requires_model=requires_model)",
"def verify_server_profile_bios_settings_info(*profile_obj):\n\n FusionUIBase.navigate_to_section(SectionType.SERVER_PROFILES, time_for_loading=5)\n\n if isinstance(profile_obj, test_data.DataObj):\n profile_obj = [profile_obj]\n elif isinstance(profile_obj, tuple):\n profile_obj = list(profile_obj[0])\n\n for _, profile in enumerate(profile_obj):\n logger.info(\"verifying server_profile_bios named '%s'\" % profile.name)\n # check if server profile exists\n VerifyServerProfile.verify_server_profile_exist(profile.name, fail_if_false=True)\n CommonOperationServerProfile.click_server_profile(profile_name=profile.name, time_for_loading=10)\n FusionUIBase.select_view_by_name(view_name='BIOS Settings', timeout=10, fail_if_false=True)\n if hasattr(profile.BIOSSettings.Verify, 'ServerAssetInformation'):\n logger.info(\"verifying server_profile_bios expected values before power on named '%s'\" % profile.name)\n VerifyServerProfile.verify_server_asset_info(profile.name, profile.BIOSSettings.Verify.ServerAssetInformation)",
"def bios_vendor(self):\n\t\treturn self.__info_dict['info']['bios_vendor']['value']",
"def get_attributes():\n bot_id = socket.gethostname().lower().split('.', 1)[0]\n return os_utilities.get_attributes(bot_id)",
"def get_settings(self):\n return AlgoliaUtils_request(self.client.headers, self.read_hosts, \"GET\", \"/1/indexes/%s/settings\" % self.url_index_name, self.client.timeout)",
"def _request_bootstrap_server_info() -> str:\n if __debug__:\n logger.info(\"Requesting bootstrap server...\")\n req = BootstrapServerRequest()\n DistroStreamClientHandler.request(req)\n\n # Retrieve answer\n req.wait_processed()\n error = req.get_error_code()\n if error != 0:\n raise BackendException(error, req.get_error_msg())\n\n # Parse answer\n answer = req.get_response_msg()\n if __debug__:\n logger.debug(\"Retrieved bootstrap server information: %s\", answer)\n\n return answer",
"def settings():\n return _get_settings()[1]",
"def system_info(self, system_id):\n\n\t\tpath = f'{self.BIKE_ENDPOINT}system/{system_id}/{self.secret_key}'\n\t\tresponse = requests.get(path).json()\n\t\tself.check_api_key(response)\n\n\t\treturn response",
"def get_account_settings():\n pass",
"async def info() -> json:\n return SpacyNER().settings",
"def loadConfigInfo(self):\n reg = self.reg\n yield reg.cd(['', 'Servers', 'current_controller', 'Links'], True)\n dirs, keys = yield reg.dir()\n p = reg.packet()\n for k in keys:\n p.get(k, key=k)\n ans = yield p.send()\n self.serialLinks = dict((k, ans[k]) for k in keys)\n # Get output state and last value of current set\n yield reg.cd(['', 'Servers', 'current_controller', 'parameters'], True)\n dirs, keys = yield reg.dir()\n p = reg.packet()\n for k in keys:\n p.get(k, key=k)\n ans = yield p.send()\n self.params = dict((k, ans[k]) for k in keys)\n try:\n self.output = bool(self.params['state'])\n self.current = self.params['current']\n except:\n print \"Failed to load current controller state. Check Registry\"",
"def settings_load(self):\n self.ui.spinBox_ATSP.setValue(self.default['ATSP'])\n\n if self.default['serialLabel'] == 'bt':\n self.ui.btRadio.setChecked(True)\n try:\n os.system(\"blueman-manager\")\n except:\n print \"Please install 'blueman' package\"\n elif self.default['serialLabel'] == 'usb':\n self.ui.usbRadio.setChecked(True)\n else:\n self.ui.devRadio.setChecked(True)\n\n if self.default['units'] == 'metric':\n self.ui.units_metric_radio.setChecked(True)\n else:\n self.ui.units_US_radio.setChecked(True)\n\n return",
"def _MocaCtlShowConfig(self):\n mc = subprocess.Popen([MOCACTL, 'show', '--config'], stdout=subprocess.PIPE)\n out, _ = mc.communicate(None)\n return out.splitlines()",
"def get_all(self):\n logging.info(__name__ + ' : reading all settings from instrument')\n self.level.get()\n self.status.get()\n self.rate.get()",
"def loadConfigInfo(self):\n reg = self.reg\n yield reg.cd(['', 'Servers', 'SIM900 Serial', 'links'], True)\n dirs, keys = yield reg.dir()\n p = reg.packet()\n for k in keys:\n p.get(k, key=k)\n ans = yield p.send()\n b = 0\n hostname = gethostname()\n for ss in ans['Serial Links']:\n if ss[0] == hostname + ' Serial Server':\n self.serialLinks = {ss[0]:ss[1]}\n # self.serialLinks = dict((ans[k][0][0], ans[k][0][1]) for k in keys) \n print self.serialLinks",
"def test_get_bios_boot_mode_list(self):\n pass",
"def get_skill_settings(self):\n return self.request({\n \"method\": \"GET\",\n \"path\": \"/\" + UUID + \"/skill/settings\",\n })",
"def client_settings():\n return CLIENT_SETTINGS",
"def get_server_capabilities(self):\n capabilities = {}\n system = self._get_host_details()\n capabilities['server_model'] = system['Model']\n rom_firmware_version = (\n system['Oem']['Hp']['Bios']['Current']['VersionString'])\n capabilities['rom_firmware_version'] = rom_firmware_version\n capabilities.update(self._get_ilo_firmware_version())\n capabilities.update(self._get_number_of_gpu_devices_connected())\n if self._get_tpm_capability():\n capabilities['trusted_boot'] = 'true'\n\n if self._get_cpu_virtualization():\n capabilities['cpu_vt'] = 'true'\n if self._get_nvdimm_n_status():\n capabilities['nvdimm_n'] = 'true'\n try:\n self.get_secure_boot_mode()\n capabilities['secure_boot'] = 'true'\n except exception.IloCommandNotSupportedError:\n # If an error is raised dont populate the capability\n # secure_boot\n pass\n if self._is_sriov_enabled():\n capabilities['sriov_enabled'] = 'true'\n return capabilities",
"def create_test_bios_setting(**kw):\n bios_setting = get_test_bios_setting(**kw)\n dbapi = db_api.get_instance()\n node_id = bios_setting['node_id']\n version = bios_setting['version']\n settings = [{'name': bios_setting['name'],\n 'value': bios_setting['value'],\n 'attribute_type': bios_setting['attribute_type'],\n 'allowable_values': bios_setting['allowable_values'],\n 'read_only': bios_setting['read_only'],\n 'reset_required': bios_setting['reset_required'],\n 'unique': bios_setting['unique']}]\n return dbapi.create_bios_setting_list(node_id, settings, version)[0]",
"def get_settings():\n with open('config/config.json') as data_file:\n settings = json.load(data_file)\n return settings",
"async def settings(self, ctx):\n settings = config.load_settings()\n guild = ctx.guild.id\n embed = discord.Embed(\n title=ctx.guild.name + \" bot settings!\",\n description=\"My settings for this server!\",\n color=discord.Colour.purple()\n )\n embed.add_field(name=\"Prefix\", value=settings['guilds'][str(guild)]['prefix'])\n embed.add_field(name=\"Max Volume\", value=str(settings['guilds'][str(guild)]['max_volume']))\n embed.add_field(name=\"Leveling system\", value=settings['guilds'][str(guild)]['leveling'])\n embed.add_field(name=\"Welcome Message\", value=settings['guilds'][str(guild)]['welcome'])\n embed.add_field(name=\"Goodbye Message\", value=settings['guilds'][str(guild)]['goodbye'])\n embed.add_field(name=\"Warns until kick\", value=str(settings['guilds'][str(guild)]['warn_kick']))\n embed.add_field(name=\"Warns until ban\", value=str(settings['guilds'][str(guild)]['warn_ban']))\n await ctx.send(\"\", embed=embed)",
"def settings_information():\n return {\n \"version\": VERSION,\n \"modules_directory\": MODULES_DIR,\n \"web_directory\": WEB_DIR,\n \"dependencies_directory\": DEPENDENCIES_DIR,\n \"bot_directory\": BOT_DIR,\n \"bot_data_directory\": BOT_DATA_DIR,\n \"bot_image_directory\": BOT_IMAGE_DIR,\n \"local_data_directory\": LOCAL_DATA_DIR,\n \"local_data_database_directory\": LOCAL_DATA_DB_DIR,\n \"local_data_log_directory\": LOCAL_DATA_LOG_DIR,\n \"local_data_backup_directory\": LOCAL_DATA_BACKUP_DIR,\n \"database_name\": DB_NAME,\n \"database_file\": DB_FILE,\n \"authentication_base_url\": AUTH_BASE_URL,\n \"authentication_auth_url\": AUTH_AUTH_URL,\n \"tesseract_dependency_directory\": TESSERACT_DEPENDENCY_DIR,\n \"tesseract_directory\": TESSERACT_DIR,\n \"tesseract_path\": TESSERACT_PATH,\n }",
"def properties(self):\n response = self._client.get('server/properties')\n return ServerProperties.from_json(response.text)",
"def get_server_info(self):\n\t\treturn handle_to_object(call_sdk_function('PrlSrv_GetServerInfo', self.handle))",
"def get_settings(self):\n return self.settings",
"def get_settings() -> Dict[str, Any]:\n settings = dict()\n with open(\"config,ini\") as file_config:\n for line in file_config:\n try:\n key = line.split(\":\")[0]\n value = line.split(\":\")[1].strip().split() if key == 'invisible_manes' else line.split(\":\")[1].strip()\n settings[key] = value\n except IndexError:\n pass\n return settings",
"def cont_settings_(request):\n \n return {\"settings\": settings}",
"def read_properties():\n # Initialize variables.\n properties = {}\n for prop in config_properties:\n properties[prop] = None\n\n # Read the XBee settings saved in the firmware.\n for prop, atcmd in xbee_properties.items():\n read_value = xbee.atcmd(atcmd)\n if read_value is None:\n properties[prop] = None\n elif prop in text_properties:\n properties[prop] = read_value\n else:\n properties[prop] = binascii.hexlify(read_value).decode()\n print(\" - Read property '%s' from the XBee device: '%s'\" %\n (prop, properties[prop]))\n\n # Return the properties dictionary.\n return properties",
"async def randomizer_settings(self):\r\n return await http.request_generic(\r\n url=f'/api/randomizers/{self.randomizer}',\r\n method='get',\r\n returntype='json'\r\n )",
"def get(self) -> dict:\n return Config.get()",
"def config_get():\n server_config = db.get().server_config_get()\n\n if not server_config:\n return flask.jsonify({\n \"message\": \"Netmet server has not been setup yet\"}), 404\n\n return flask.jsonify(server_config), 200",
"def get_system_info(self):\r\n method = self.public_endpoints['system_info']['method']\r\n url = self.base_url + self.public_endpoints['system_info']['url']\r\n req = requests.request(method, url)\r\n res = req.json()\r\n\r\n if res['success'] == True:\r\n return res[\"result\"]\r\n else:\r\n return res",
"def get_srv_config(self):\n\t\treturn Job(SDK.PrlSrv_GetSrvConfig(self.handle)[0])",
"def get_ded_info(server, show=False):\n # start Requests session\n sc = requests.Session()\n\n # import cookies from Firefox\n sc.cookies.update(get_cookies('imhsc.imhadmin.net'))\n\n # send request\n vpx = sc.get('https://imhsc.imhadmin.net/index.php',\n params={'v': \"Dedicated\", 'selectServer': server})\n\n # check if login failed\n check_sc_login(vpx.text)\n\n # parse with BS4\n bs = BeautifulSoup(vpx.text, \"xml\")\n\n # server=0 ip=4 net=5 psc=6 user=11 type=14\n trr = bs.tbody.find_all('tr')\n if len(trr) > 0:\n tsrv = {\n 'hostname': trr[0].find_all('td')[0].string,\n 'ip': trr[0].find_all('td')[2].string,\n 'net': trr[0].find_all('td')[3].string,\n 'psc': trr[0].find_all('td')[4].a.string,\n 'user': trr[0].find_all('td')[9].string,\n 'type': trr[0].find_all('td')[12].string,\n 'status': trr[0].find_all('td')[13].string.strip()\n }\n else:\n tsrv = None\n\n if show:\n if tsrv:\n print(\"[%(hostname)s] IP: %(ip)s (%(net)s) / PSC: %(psc)s / User: %(user)s / Type: %(type)s / Status: %(status)s\" % tsrv)\n else:\n print(\"!! Server '%s' not found\" % (server))\n\n return tsrv",
"def get_values(self):\n self.active_changes = False # (flag) Once changes are retrieved, we assume that they will be sent to the controller\n return self.settings",
"def os_settings():\n for setting_name, env_name in (\n (\"debug\", \"BACPYPES_DEBUG\"),\n (\"color\", \"BACPYPES_COLOR\"),\n (\"debug_file\", \"BACPYPES_DEBUG_FILE\"),\n (\"max_bytes\", \"BACPYPES_MAX_BYTES\"),\n (\"backup_count\", \"BACPYPES_BACKUP_COUNT\"),\n (\"route_aware\", \"BACPYPES_ROUTE_AWARE\"),\n ):\n env_value = os.getenv(env_name, None)\n if env_value is not None:\n cur_value = settings[setting_name]\n\n if isinstance(cur_value, bool):\n env_value = env_value.lower()\n if env_value in (\"set\", \"true\"):\n env_value = True\n elif env_value in (\"reset\", \"false\"):\n env_value = False\n else:\n raise ValueError(\"setting: \" + setting_name)\n elif isinstance(cur_value, int):\n try:\n env_value = int(env_value)\n except:\n raise ValueError(\"setting: \" + setting_name)\n elif isinstance(cur_value, str):\n pass\n elif isinstance(cur_value, list):\n env_value = env_value.split()\n elif isinstance(cur_value, set):\n env_value = set(env_value.split())\n else:\n raise TypeError(\"setting type: \" + setting_name)\n settings[setting_name] = env_value",
"def settings(self):\n if self._settings is not None:\n return self._settings\n\n settings = self.binaries['KeeAgent.settings'].content\n self._settings = objectify.fromstring(settings)\n return self._settings",
"async def settings(self, ctx: BBContext):\n pass",
"def showSettings():\n cq = dz()\n cq.abag()",
"def _load_settings(self):\n self._dll.LS_LoadSettings(self._serial_number)\n return None",
"def load_settings_from_cli():\n load_user_from_cli()\n load_local_contacts()",
"def get_ha_config():\n query = {\n \"type\": \"config\",\n \"action\": \"get\",\n \"xpath\": \"/config/devices/entry[@name='localhost.localdomain']/deviceconfig/high-availability\",\n }\n\n return __proxy__[\"panos.call\"](query)",
"def advanced_settings(self):\n settings = ADVANCEDSETTINGS()\n ckresult(_dll.FMOD_System_GetAdvancedSettings(self._ptr, byref(settings)))\n return settings",
"def system_properties(self):\r\n return dict(self._get_system_properties(self.java))",
"def get_settings():\n try:\n branches = database.select(database.QUERY[mn()])\n # QUERY['get_settings'] = \"SELECT number, name, time, intervals, time_wait, start_time, line_type, base_url, pump_enabled from lines where line_type='power_outlet' order by number\"\n for row in branches:\n branch_id = row[0]\n name = row[1]\n time = row[2]\n intervals = row[3]\n time_wait = row[4]\n start_time = row[5]\n line_type = row[6]\n base_url = row[7]\n pump_enabled = row[8]\n\n BRANCHES_SETTINGS[branch_id] = {\n 'branch_id': branch_id,\n 'name': name,\n 'time': time,\n 'intervals': intervals,\n 'time_wait': time_wait,\n 'start_time': start_time,\n 'line_type': line_type,\n 'base_url': base_url,\n 'pump_enabled': True if pump_enabled == 1 else False\n }\n logging.debug(\"{0} added to settings\".format(str(BRANCHES_SETTINGS[branch_id])))\n except Exception as e:\n logging.error(\"Exceprion occured when trying to get settings for all branches. {0}\".format(e))",
"def get_snmp_config():\n query = {\n \"type\": \"config\",\n \"action\": \"get\",\n \"xpath\": \"/config/devices/entry[@name='localhost.localdomain']/deviceconfig/system/snmp-setting\",\n }\n\n return __proxy__[\"panos.call\"](query)",
"async def _async_get_addon_config(self):\n addon_info = await self._async_get_addon_info()\n return addon_info[\"options\"]",
"def read():\n return mac_slideshow.preferences.read(KEY)",
"def getDeviceInfo():\n url = \"https://api.roblox.com/reference/deviceinfo\"\n r = requests.get(url)\n j = json.loads(r.text)\n return j",
"def get_personal_info(self):\n self.get(\"INFO\",\"GetPersonalInfo\")\n response = self.send()\n return response",
"def getSysinfo(self, request):\r\n return self._ref.callRemote('getSysinfo')",
"def get(self, section=None):\n logging.info(\"GET Request for System information, section=\\\"%s\\\"\", section)\n\n system_info = get_system_info(section)\n\n return jsonify(system_info)",
"def readSettings(self):\n for i in range(1,N_STATION+1):\n vol = f\"vol{i}\"\n self.param.vol[i-1] = self.settings.value(vol,type=int)\n info = f\"info{i}\"\n self.param.info[i-1] = self.settings.value(info,type=str)\n ip = f\"ip{i}\"\n self.param.ip[i-1] = self.settings.value(ip,type=str)\n muted = f\"muted{i}\"\n self.param.muted[i-1] = self.settings.value(muted,type=bool)",
"def remote_getSysinfo(self, request):\r\n # TODO : replace these calls with call to rce.util.sysinfo\r\n response_table = {\r\n 'size':self._size,\r\n 'cpu':self._cpu,\r\n 'memory': self._memeory,\r\n 'bandwidth': self._bandwidth,\r\n # 'keyword': some value or function to provide the data\r\n }\r\n\r\n return response_table[request]",
"def _get_samba_clients(self):\n # TODO preferably use a library, don't assume localhost.\n logging.debug(\"inspecting samba...\")\n command = 'sudo smbstatus -p | sed -n 5p | tr -s \" \" | cut -d\" \" -f4'\n output = utils.run_os_command(command)\n return output.strip().split(\"\\n\")",
"def _get_system_hardware(self):\n return self._get_system_status()[\"hardware\"]",
"def current_settings(self):\n return {\n 'power_state': self.power_state,\n 'brightness': self.brightness,\n }",
"def reset_bios_to_default(self):\n # Check if the BIOS resource if exists.\n headers_bios, bios_uri, bios_settings = self._check_bios_resource()\n # Get the BaseConfig resource.\n try:\n base_config_uri = bios_settings['links']['BaseConfigs']['href']\n except KeyError:\n msg = (\"BaseConfigs resource not found. Couldn't apply the BIOS \"\n \"Settings.\")\n raise exception.IloCommandNotSupportedError(msg)\n\n # Check if BIOS resource supports patch, else get the settings\n if not self._operation_allowed(headers_bios, 'PATCH'):\n headers, bios_uri, _ = self._get_bios_settings_resource(\n bios_settings)\n self._validate_if_patch_supported(headers, bios_uri)\n\n status, headers, config = self._rest_get(base_config_uri)\n if status != 200:\n msg = self._get_extended_error(config)\n raise exception.IloError(msg)\n\n new_bios_settings = {}\n for cfg in config['BaseConfigs']:\n default_settings = cfg.get('default', None)\n if default_settings is not None:\n new_bios_settings = default_settings\n break\n else:\n msg = (\"Default Settings not found in 'BaseConfigs' resource.\")\n raise exception.IloCommandNotSupportedError(msg)\n request_headers = self._get_bios_hash_password(self.bios_password)\n status, headers, response = self._rest_patch(bios_uri, request_headers,\n new_bios_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def read_bootinfo(self, orig_name):\n api_page = \"/configuration/object/read_bootinfo\"\n url = \"{}{}?{}&UIDARUBA={}\".format(\n self.base_url,\n api_page,\n self.config_path,\n self.uidaruba)\n\n obj = {\"_action\": \"modify\",\n \"read_bootinfo_option\": \"ap-name\",\n \"ap-name\": orig_name\n }\n\n json_obj = json.loads(json.dumps(obj))\n resp = self.post(url, json_obj)\n print(\"read_bootinfo_resp: {}\".format(resp.status_code))\n # print(resp.text)",
"def health_probe_settings(self) -> Optional['outputs.HealthProbeParametersResponse']:\n return pulumi.get(self, \"health_probe_settings\")",
"def set_bios_settings(self, data=None):\n\n if not data:\n raise exception.SDFlexError(\"Could not apply settings with\"\n \" empty data\")\n sushy_system = self._get_sushy_system()\n\n try:\n for key in data.keys():\n sushy_system.bios.set_attribute(key, data[key])\n except sushy.exceptions.SushyError as e:\n message_extended_info = e.body.get('@Message.ExtendedInfo')\n error_message = message_extended_info[0]['Message']\n\n msg = (self._(\"Setting the value of Bios attribute \"\n \"'%(atrribute)s' is not succesfull. \"\n \"Error: %(error)s\") %\n {'error': str(error_message), 'atrribute': key})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def get_settings(self):\n\n\t\t# TODO: Consider YAML. Human writable, machine readable.\n\t\twith open(self.filename) as fp:\n\t\t\ttry:\n\t\t\t\treturn json.load(fp)\n\t\t\texcept Exception, e:\n\t\t\t\tif self.DEBUG:\n\t\t\t\t\tprint >>sys.stderr, 'get_settings exception:', e\n\t\t\t\treturn {}",
"def getBootstrapInfo(self, locale):\r\n self.send_getBootstrapInfo(locale)\r\n return self.recv_getBootstrapInfo()",
"def _get_config_data(self, cr, uid):\n\n model_conf = self.pool.get('customer.support.settings')\n args = [('selected', '=', True)] \n ids = model_conf.search(cr, uid, args)\n config = model_conf.browse(cr, uid, ids[0])\n\n return {\n 'tor_api_key': config.tor_api_key,\n 'tor_domain': config.tor_domain,\n 'company': config.company\n }",
"def service_config():\n global _service_config\n if not _service_config:\n r = requests.get('https://tech.lds.org/mobile/ldstools/config.json')\n r.raise_for_status()\n _service_config = r.json()\n return _service_config",
"def get(self, request, format=None):\n return Response({k: getattr(config, k) for k in list(dir(config))})",
"def get_settings(self):\n return {\n \"game_name\": self.game_name,\n \"n_epochs\": self.n_epochs,\n \"n_episodes\": self.n_episodes,\n \"n_frames\": self.n_frames,\n \"agent\": self.agent.get_settings(),\n \"results_dir\": self.results_dir,\n \"use_minimal_action_set\": self.use_minimal_action_set,\n }",
"def test_get_bios_policy_list(self):\n pass",
"def config(gvar):\n\n mandatory = []\n required = []\n optional = ['-cc', '-ckv', '-CSEP', '-CSV', '-g', '-H', '-h', '-NV', '-ok', '-r', '-s', '-V', '-VC', '-v', '-x509', '-xA']\n\n if gvar['retrieve_options']:\n return mandatory + required + optional\n\n # Check for missing arguments or help required.\n form_data = check_keys(\n gvar,\n mandatory,\n required,\n optional,\n key_map=KEY_MAP)\n\n # List the current defaults. If the form_data contains any optional fields,\n # those values will be updated before the list is retrieved.\n response = requests(\n gvar,\n '/server/config/',\n form_data\n )\n \n if response['message']:\n print(response['message'])\n\n # Print report\n show_active_user_groups(gvar, response)\n\n show_table(\n gvar,\n response['config_list'],\n [\n 'category/Category,k',\n 'config_key/Config Key,k',\n 'config_type/Type',\n 'config_value/Value',\n ],\n title=\"Server Configuration\",\n )"
] | [
"0.70597833",
"0.67072386",
"0.66295433",
"0.63893646",
"0.6140606",
"0.59224844",
"0.58876735",
"0.57552373",
"0.57381696",
"0.5689282",
"0.5650241",
"0.55698514",
"0.5555418",
"0.55383116",
"0.54772294",
"0.5444395",
"0.5402679",
"0.5402679",
"0.53762496",
"0.5365478",
"0.5358037",
"0.535643",
"0.5350886",
"0.53149605",
"0.5313657",
"0.5291689",
"0.527867",
"0.5252195",
"0.5247421",
"0.52301615",
"0.52282137",
"0.5228041",
"0.51966643",
"0.5175384",
"0.5169107",
"0.5152258",
"0.51436687",
"0.51419806",
"0.5134109",
"0.5132719",
"0.51213056",
"0.51176167",
"0.51096934",
"0.5107457",
"0.50924814",
"0.5081557",
"0.5077605",
"0.5069699",
"0.50574464",
"0.5056719",
"0.5052669",
"0.5040147",
"0.500979",
"0.49862462",
"0.49842706",
"0.4983591",
"0.49813595",
"0.4975732",
"0.49740773",
"0.4966732",
"0.49587733",
"0.49527606",
"0.49493238",
"0.49335822",
"0.49276987",
"0.49198702",
"0.49161673",
"0.491597",
"0.49158666",
"0.49102658",
"0.49090862",
"0.48988032",
"0.4886191",
"0.48801982",
"0.48759297",
"0.48741588",
"0.4873391",
"0.4872801",
"0.48622096",
"0.485638",
"0.4836278",
"0.4836205",
"0.4835248",
"0.48303387",
"0.48303008",
"0.48286164",
"0.4825885",
"0.48233512",
"0.48184633",
"0.4816313",
"0.48161572",
"0.48128688",
"0.48036003",
"0.4798532",
"0.47916594",
"0.47770828",
"0.47747788",
"0.47683206",
"0.4761865",
"0.4761277"
] | 0.6843512 | 1 |
Get the hashed BIOS password. | def _get_bios_hash_password(self, bios_password):
request_headers = {}
if bios_password:
bios_password_hash = hashlib.sha256((bios_password.encode()).
hexdigest().upper())
request_headers['X-HPRESTFULAPI-AuthToken'] = bios_password_hash
return request_headers | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_password(self):\n return self.controller.dbfilter.db.get('passwd/user-password')",
"def get_user_password(text):\n return getpass.getpass(text)",
"def hash_password(self, password):\n cmd = [\n \"snap\",\n \"run\",\n \"{}.hash-password\".format(self.synapse_snap),\n \"-c\",\n self.synapse_config,\n \"-p\",\n password,\n ]\n result = check_output(cmd)\n str_result = result.decode(\"utf-8\")\n return str_result.rstrip()",
"def hash_password(self, password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\n pwdhash = hashlib.pbkdf2_hmac('sha256', password.encode('utf-8'), \n salt, 100000)\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')",
"def get_password_hash(password: str) -> str:\n return pwd_context.hash(password)",
"def get_passwd(self):\n if self.__password:\n aes_cipher = AESCipher()\n return aes_cipher.decrypt(self.__password, self.__aes_key)",
"def password(self) -> str:",
"def get_password_hash(password):\n\n return pwd_context.hash(password)",
"def hashed_passwd(passwd):\n salt = uuid.uuid4().hex\n return hashlib.sha512(passwd.encode('utf-8')\n + salt.encode('utf-8')).hexdigest()",
"def _get_password(self):\r\n return self._password",
"def _get_password(self):\n return self._password",
"def get_hashed_value(password):\n salt = 'saifulBoss'\n password = salt + password\n return md5(password.encode('utf-8')).hexdigest()",
"def get_hashed_value(password):\n salt = 'saifulBoss'\n password = salt + password\n return md5(password.encode('utf-8')).hexdigest()",
"def retrieve_hash(host, salt):\n \n password_pattern = re.compile(r'\\npassword=(.+)\\r')\n url = 'http://%s/CFIDE/administrator/enter.cfm?locale=../../../../../../../../../../ColdFusion8/lib/password.properties%%00en' % host\n \n try:\n response = requests.post(url)\n password_hash = re.search(password_pattern, response.text)\n \n if len(password_hash.groups()) > 0:\n password_hash = str(password_hash.groups()[0])\n \n output_hash = generate_hash(password_hash, str(salt))\n click.echo('ColdFusion 8 admin password pass-the-hash form bypass.')\n click.echo('Created by: [email protected]')\n click.echo('NOTE** Use Tamper Data or similar to set form field \"cfadminPassword\" to this hash value. Enjoy!')\n click.echo('------------------')\n click.echo('Result: %s' % output_hash)\n click.echo('------------------')\n else:\n click.secho('Unable to retrieve either password or salt value.', fg='red', bold=True)\n \n except Exception as e:\n click.secho('Error: %s.' % e, fg='red', bold=True)",
"def password(self):\n return self._password()",
"def hash_password(password):\r\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\r\n pwdhash = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'),salt,100000)\r\n pwdhash = binascii.hexlify(pwdhash)\r\n return (salt+pwdhash).decode('ascii')",
"def device_password(self) -> str:\n return pulumi.get(self, \"device_password\")",
"def device_password(self) -> str:\n return pulumi.get(self, \"device_password\")",
"def GetPassword(self):\n pass",
"def get_verified_password(self):\n return self.controller.dbfilter.db.get('passwd/user-password-again')",
"def getpass(self, prompt):\r\n return getpass.getpass(prompt)",
"def password(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"password\")",
"def hash_string(password):\n return hash(password)",
"def password (string):\n\t\n\treturn hexdigest_mySQL41plus (string)",
"def get_password(self):\n raise NotImplementedError('get_password')",
"def get_password(self) -> str:\n return self._password",
"def getpassword(value):\n hashed = \"%s%s\" % (value, SECRET_KEY)\n hasher = hashlib.md5()\n hasher.update(hashed)\n return hasher.hexdigest()[-8:]",
"def getPassword(self):\n\t\treturn self.Password",
"def get_password(self):\n return self.__password",
"def hash_passwd(password, hash_method=\"sha256\"):\n\n return generate_password_hash(password, hash_method)",
"def hash_password(password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\n pwdhash = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'),\n salt, 100000)\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')",
"def get_password_from_user():\n pwd = ''\n keyboard = xbmc.Keyboard('', ADDON_NAME + ': ' + localise(32022), True)\n keyboard.doModal()\n if keyboard.isConfirmed():\n pwd = keyboard.getText()\n return pwd",
"def hash_password(password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\n pwdhash = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'), \n salt, 100000)\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')",
"def hash_password(password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\n pwdhash = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'), \n salt, 100000)\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')",
"def hash_password(password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\n pwdhash = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'), \n salt, 100000)\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')",
"def get_password_hash(password):\n if not Utility.check_empty_string(password):\n return Utility.pwd_context.hash(password)",
"def GetPassword(self):\n return self._password",
"def password(self) -> str:\n return pulumi.get(self, \"password\")",
"def password(self) -> str:\n return pulumi.get(self, \"password\")",
"def password(self) -> str:\n return pulumi.get(self, \"password\")",
"def hash_password(password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\n pwdhash = hashlib.pbkdf2_hmac(\n 'sha512', password.encode('utf-8'), salt, 100000\n )\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')",
"def get_password(self):\n mpw = master_pass.MPW(self.user, self.master_password)\n return mpw.password(self.ucs_server)",
"def hashPassword(passwd):\r\n \r\n return hashlib.sha224(passwd).hexdigest()",
"def hash_password(password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('utf-8')\n pwdhash = hashlib.pbkdf2_hmac(\n 'sha512', password.encode('utf-8'), salt, 100000\n )\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('utf-8')",
"def _get_user_password(self):\n return self.__user_password",
"def _hashPassword(password):\n charset = './' + ascii_letters + digits\n return crypt.crypt(password, ''.join(random.sample(charset, 2)))",
"def hash_pass(password, salt):\n return hashlib.pbkdf2_hmac('sha512', password.encode(), salt, 100000)",
"def get_auth_password():\n password = AUTH_PASSWORD_SCRIPT.get()\n if password:\n return password\n return DEFAULT_AUTH_PASSWORD.get()",
"def get_weak_password(self, host):\n try:\n return self.weak_hosts.get(host)[2]\n except IndexError:\n return \" \"",
"def _hash_password(self, password):\n passwordhash = bcrypt.hashpw(password.encode('utf8'), bcrypt.gensalt())\n return passwordhash",
"def pwd(password: str):\n password = password.encode('utf8')\n s = sha1()\n s.update(password)\n return s.hexdigest()",
"def get_password(self, service, username):\n init_part = self._keyring.get_password(service, username)\n if init_part:\n parts = [init_part]\n i = 1\n while True:\n next_part = self._keyring.get_password(\n service, '%s{{part_%d}}' % (username, i)\n )\n if next_part:\n parts.append(next_part)\n i += 1\n else:\n break\n return ''.join(parts)\n return None",
"def generate_hash(passwd):\n return hashlib.sha512(passwd.encode(\"utf-8\")).hexdigest()",
"def __generate_hash(password):\n if password is None:\n return None\n return bcrypt.generate_password_hash(password, rounds=10).decode(\"utf8\")",
"def get_lc_passwd(self):\n if self.__lc_password:\n aes_cipher = AESCipher()\n return aes_cipher.decrypt(self.__lc_password, self.__aes_key)",
"def _get_password(self):\n if self._password != None:\n return self._password\n raise DbiException(\"Can't get password\")",
"def device_password(self) -> Optional[str]:\n return pulumi.get(self, \"device_password\")",
"def device_password(self) -> Optional[str]:\n return pulumi.get(self, \"device_password\")",
"def test_random_password():\n output = sh.random_password()\n assert isinstance(output, str) is True\n assert len(output) == 16",
"def generate_hash(password):\n return pbkdf2_sha256.hash(password)",
"def get_default_password(cls):\n \n _position = cls.basic_parameters[2]\n \n return _position['password']",
"def password(self) :\n\t\ttry :\n\t\t\treturn self._password\n\t\texcept Exception as e:\n\t\t\traise e",
"def _pepper_hash(pepper, password, salt):\n return '{:0>8}{:s}{:s}'.format(pepper, password, salt)",
"def hash_password(password: str) -> str:\n return pbkdf2_sha512.hash(password)",
"def hashPassword(self, password):\n key = hashlib.pbkdf2_hmac(\n 'sha256',\n str.encode(password),\n self.salt,\n 100000\n )\n return key",
"def hash_password(password):\n #return passlib.hash.pbkdf2_sha512.encrypt(password)\n return sha256_crypt.hash(password)",
"def hash_password(password: str) -> str:\n return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode('utf8')",
"def old_password (string):\n\t\n\treturn hexdigest_mySQL41prior (string)",
"def hash_password(password):\n salt = binascii.b2a_base64(hashlib.sha256(os.urandom(60)).digest()).strip()\n pwdhash = (\n binascii.b2a_base64(\n hashlib.pbkdf2_hmac(\"sha256\", password.encode(\"utf-8\"), salt, 10000)\n )\n .strip()\n .decode()\n )\n return {\"salt\": salt.decode(), \"pwdhash\": pwdhash}",
"def getFingerprint(self):\r\n return b2a_hex(SHA1(self.bytes))",
"def password(self) -> str:\n return self._password",
"def password(self) -> str:\n return self._password",
"def password_encryption(self, password):\n return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt())",
"def askPass( user, host ):\n prompt = \"Password for user {} on host {}: \".format( user, host )\n password = getpass.getpass( prompt )\n return password",
"def password(self):\n return self._password",
"def password(self):\n return self._password",
"def password(self):\n return self._password",
"def password(self):\n return self._password",
"def get_password_hash(self, username):\n raise NotImplementedError()",
"async def hash_password(self, password: str | None) -> str | None:\n if password is not None:\n fut = self._hash_password(password)\n return await self.cpu_subsystem.execute(fut)\n return None",
"def get_salt():\n return os.urandom(32)",
"def hashedPassword(password, salt):\n\tif not hasattr(password, 'decode'):\n\t\tpassword = password.encode('utf-8')\n\tkey = makeKey(password, salt)\n\treturn base64.b64encode(\n\t hashlib.pbkdf2_hmac('sha256', key, password, 1,\n\t dklen=32)).decode('utf-8')",
"def prompt_pass():\n msg = \"Enter Password: \"\n password = getpass.getpass(msg)\n return password",
"def hash_password(password):\n\n return hashlib.sha224(password).hexdigest()[:20]",
"def hash_password(password):\n return pbkdf2_sha512.encrypt(password)",
"def solve_part_two(self):\n password = list(\"XXXXXXXX\")\n index = 0\n counter = 0\n while counter < 8:\n (s, found_index) = self.find_next_hash(index)\n index = found_index + 1\n offset = ord(s[5]) - ord(\"0\")\n # Offset invalid or password character already set previously?\n if offset >= 8 or password[offset] != \"X\":\n continue\n password[offset] = s[6]\n counter += 1\n return \"\".join(password)",
"def generate_password():\n return urlsafe_b64encode(urandom(32)).decode('utf-8')",
"def get_hash():\n return render(build_hash('command'),False)",
"def _hash_password(password: str) -> str:\n # return pbkdf2_sha512.encrypt(password, rounds=ROUNDS, salt=SALT)\n return pbkdf2_sha512.using(rounds=ROUNDS, salt=SALT).hash(password)",
"def password(self):\n return self.factory.server_password",
"def derive_device_authentication_password(device_authentication_password: str) -> bytes:\n kdf = PBKDF2HMAC(\n algorithm=hashes.SHA256(),\n length=16,\n salt=b\"device-authentication-code.1.secure.ip.knx.org\",\n iterations=65536,\n )\n return kdf.derive(device_authentication_password.encode(\"latin-1\"))",
"def get_user_binary(self):\n pass",
"def password_builder():\n password = Credentials.password_buidler()\n return password",
"def password( self ):\n return self._password",
"def hex_key(uid: Text, mp: Text) -> Text:\n\n key = sha256(mp.encode('utf-8') + admin_pass.encode('utf-8')).hexdigest()\n return sha256(uid.lower().encode('utf-8') + key.encode('utf-8')).hexdigest()[:40]",
"def Password(self):\n return self._Password",
"def salted_password(self) -> bytes:\n #NB. FOR NOW, USE THIS.\n return self.password.encode()",
"def password(self) -> str:\n return self.get_env_var(self.password_var)",
"def password(self) -> str:\n return self.get_env_var(self.password_var)",
"def solve_part_one(self):\n password = \"\"\n index = 0\n while len(password) < 8:\n (s, found_index) = self.find_next_hash(index)\n password += s[5]\n index = found_index + 1\n return password"
] | [
"0.69720805",
"0.67519724",
"0.6749621",
"0.6708863",
"0.67022777",
"0.6677505",
"0.66462785",
"0.66458017",
"0.66124636",
"0.659454",
"0.6591045",
"0.657617",
"0.657617",
"0.6572322",
"0.6572243",
"0.655515",
"0.6551918",
"0.6551918",
"0.65395516",
"0.6535958",
"0.6509161",
"0.6503585",
"0.6482336",
"0.64819974",
"0.6469121",
"0.6456717",
"0.6450064",
"0.64438784",
"0.64345384",
"0.6433458",
"0.64175683",
"0.64126515",
"0.64091504",
"0.64091504",
"0.64091504",
"0.64087737",
"0.6393993",
"0.6328709",
"0.6328709",
"0.6328709",
"0.63224727",
"0.6320359",
"0.6318026",
"0.63122386",
"0.62748253",
"0.6272466",
"0.62706023",
"0.6264555",
"0.6264374",
"0.6239721",
"0.6206185",
"0.6196142",
"0.6194153",
"0.61881685",
"0.6183108",
"0.61711854",
"0.6163698",
"0.6163698",
"0.61590284",
"0.61564416",
"0.61561453",
"0.61428803",
"0.61208004",
"0.61196244",
"0.61072695",
"0.6104041",
"0.609654",
"0.60709435",
"0.6066503",
"0.60610425",
"0.6059686",
"0.6059686",
"0.6054831",
"0.60474586",
"0.60466766",
"0.60466766",
"0.60466766",
"0.60466766",
"0.6042296",
"0.6034305",
"0.6031449",
"0.6026249",
"0.6017075",
"0.6009596",
"0.6007447",
"0.6006361",
"0.60052836",
"0.59983176",
"0.5995495",
"0.5995389",
"0.59953696",
"0.5995258",
"0.59930456",
"0.5987401",
"0.59847164",
"0.5981002",
"0.5973533",
"0.5971809",
"0.5971809",
"0.59701806"
] | 0.62322944 | 50 |
Change the bios settings to specified values. | def _change_bios_setting(self, properties):
keys = properties.keys()
# Check if the BIOS resource/property exists.
headers, bios_uri, settings = self._check_bios_resource(keys)
if not self._operation_allowed(headers, 'PATCH'):
headers, bios_uri, _ = self._get_bios_settings_resource(settings)
self._validate_if_patch_supported(headers, bios_uri)
request_headers = self._get_bios_hash_password(self.bios_password)
status, headers, response = self._rest_patch(bios_uri, request_headers,
properties)
if status >= 300:
msg = self._get_extended_error(response)
raise exception.IloError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update_settings(self):\n\n self.sim.account.set_balance(int(self.balance_str.get()))\n\n self.sim.config.set_base_bet(int(self.base_bet_str.get()))\n self.sim.config.set_payout(float(self.payout_str.get()))\n self.sim.config.set_iterations(int(self.iterations_str.get()))\n self.sim.config.set_loss_adder(int(self.loss_adder_str.get()))",
"def set_bios_settings(self, data=None):\n\n if not data:\n raise exception.SDFlexError(\"Could not apply settings with\"\n \" empty data\")\n sushy_system = self._get_sushy_system()\n\n try:\n for key in data.keys():\n sushy_system.bios.set_attribute(key, data[key])\n except sushy.exceptions.SushyError as e:\n message_extended_info = e.body.get('@Message.ExtendedInfo')\n error_message = message_extended_info[0]['Message']\n\n msg = (self._(\"Setting the value of Bios attribute \"\n \"'%(atrribute)s' is not succesfull. \"\n \"Error: %(error)s\") %\n {'error': str(error_message), 'atrribute': key})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def set_settings_devices(self):\n self.set_thermostat, self.set_humidifier, self.set_sprinklers, self.set_ventilation = self.settings[3:]",
"def set_config(self): # called from button_set object \n self.settings['lights_on'] = self.lights_on.get()\n self.settings['lights_off'] = self.lights_off.get()\n self.settings['ambient_min'] = self.ambient_min.get()\n self.settings['soil_1'] = self.smc1.get()\n self.settings['soil_2'] = self.smc2.get()\n self.settings['soil_3'] = self.smc3.get()\n self.settings['soil_4'] = self.smc4.get()\n self.settings['overhead_level'] = self.overhead_level.get()\n\n # Save settings to config file in case of reboot / power-loss\n print \"UPDATING SETTINGS FILE\"\n with open(self.settings_path, 'w') as jsonfile:\n jsonfile.write(json.dumps(self.settings, indent=4))\n self.active_changes = True # (flag) changes are active!",
"def update_values(self, values):\n if values is not None:\n self.settings.update(values)\n\n # External (from MCU)\n self.label_smc1.configure(text=self.smc1_template % self.settings['s1'], font=self.font)\n self.label_smc2.configure(text=self.smc2_template % self.settings['s2'], font=self.font)\n self.label_smc3.configure(text=self.smc3_template % self.settings['s3'], font=self.font)\n self.label_smc4.configure(text=self.smc4_template % self.settings['s4'], font=self.font)\n self.label_ambient_min.configure(text=self.ambient_light_template % self.settings['p'], font=self.font)\n\n # Internal (from GUI)\n self.label_overhead_level.configure(text=self.overhead_level_template % self.overhead_level.get(), font=self.font)\n self.active_changes = True # (flag) Once changes are retrieved, we assume that they will be sent to the controller",
"def set_attributes(self, settings):\n\n for key, value in settings.items():\n self.__dict__[key] = value",
"def edit_settings(self):\n while True:\n os.system('cls' if os.name == 'nt' else 'clear')\n valid_numbers, number_setting_corr = self.print_settings()\n print('Which setting you want to change? Enter \"number, new value\" to modify, or \"done\" to exit.')\n print('Observe the possible values for each setting! They are case sensitive. '\n 'Inputting wrong values might break the program. \\n')\n choice = input('Input:')\n if choice == 'done':\n break\n if ',' not in choice:\n print('Invalid input. Place the number, followed by a comma, followed by its value. Eg: 1,TRUE')\n continue\n if len(choice.split(',')) != 2:\n print('Invalid input, must have only one comma')\n continue\n\n var, val = choice.split(',')\n if var not in valid_numbers:\n print('Invalid number.')\n continue\n real_var = number_setting_corr[var] # Changes from a number to the actual parameter\n if val.lower() == 'true':\n setattr(self, real_var, True)\n continue\n elif val.lower() == 'false':\n setattr(self, real_var, False)\n continue\n else:\n setattr(self, real_var, val)\n\n # todo: check for all possible values to avoid inputting wrong settings and messing everything up.\n # if val not in valid_options_nl_sorting:\n # print('Invalid nonlinear sorting option. Case sensitive! Be very precise.')\n # continue\n # if val not in valid_options_lin_sorting:\n # print('Invalid linear sorting option. Case sensitive! Be very precise.')\n # continue\n # if val not in models:\n # print('Invalid nonlinear fitting model. Case sensitive! Be very precise.')\n # continue\n\n print('===Final settings===')\n _, _ = self.print_settings()\n self.save_settings()\n return",
"def settings_OBD(self, label):\n if label == 'bt':\n try:\n self.default['serialLabel'] = label\n self.default['serialDevice'] = config.config().serialDevice[label]\n os.system(\"blueman-manager\")\n except:\n print \"Please install 'blueman' package\"\n elif label == 'usb':\n self.default['serialLabel'] = label\n self.default['serialDevice'] = config.config().serialDevice[label]\n elif label == 'dev':\n self.default['serialLabel'] = label\n self.default['serialDevice'] = config.config().serialDevice[label]\n elif label == 'metric':\n self.default['units'] = 'metric'\n print 'made it'\n elif label == 'US':\n self.default['units'] = 'US' \n else: #ATSP signal return int -> else\n self.default['ATSP'] = self.ui.spinBox_ATSP.value()\n\n return",
"def _change_secure_boot_settings(self, property, value):\n system = self._get_host_details()\n # find the BIOS URI\n if ('links' not in system['Oem']['Hp'] or\n 'SecureBoot' not in system['Oem']['Hp']['links']):\n msg = (' \"SecureBoot\" resource or feature is not '\n 'supported on this system')\n raise exception.IloCommandNotSupportedError(msg)\n\n secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']\n\n # Change the property required\n new_secure_boot_settings = {}\n new_secure_boot_settings[property] = value\n\n # perform the patch\n status, headers, response = self._rest_patch(\n secure_boot_uri, None, new_secure_boot_settings)\n\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)\n\n # Change the bios setting as a workaround to enable secure boot\n # Can be removed when fixed for Gen9 snap2\n val = self._get_bios_setting('CustomPostMessage')\n val = val.rstrip() if val.endswith(\" \") else val+\" \"\n self._change_bios_setting({'CustomPostMessage': val})",
"def changeSettings(self,instance,description, cur_memory, memory, cur_vcpu, vcpu):\n memory = int(memory) * 1024\n cur_memory = int(cur_memory) * 1024\n\n xml = instance.XMLDesc(1)\n tree = ElementTree.fromstring(xml)\n\n set_mem = tree.find('memory')\n set_mem.text = str(memory)\n set_cur_mem = tree.find('currentMemory')\n set_cur_mem.text = str(cur_memory)\n set_desc = tree.find('description')\n set_vcpu = tree.find('vcpu')\n set_vcpu.text = vcpu\n set_vcpu.set('current', cur_vcpu)\n\n if not set_desc:\n tree_desc = ElementTree.Element('description')\n tree_desc.text = description\n tree.insert(2, tree_desc)\n else:\n set_desc.text = description\n\n new_xml = ElementTree.tostring(tree)\n return self.defineXML(new_xml)",
"def set_values(self, settings, autosave=True, override=True):\n if settings:\n for key, value in settings.items():\n self.set(key, value, autosave=False, override=override)\n if autosave:\n self.save()\n return self",
"def update_ionic_settings(self, key, value):\n if self._ionic_settings:\n if key in self._ionic_settings:\n self._ionic_settings[key] = value\n else:\n print(\"key does not exist!! keys include: {ediff ,nsw, ibrion ,isif, isym, nblock, kblock}\")\n else:\n print(\"magnetic settings not present!\")",
"def change_settings(self):\r\n self.clear_screen()\r\n # making sure the screen grid will be organized\r\n label_line = Label(self.root, text=\" \", font=self.text_font, bg=self.bg_color)\r\n label_line.grid(row=0, column=0)\r\n label_line = Label(self.root, text=\" \", font=self.text_font, bg=self.bg_color)\r\n label_line.grid(row=0, column=10)\r\n\r\n user_label = Label(self.root, text=\"Hello \" + self.username,\r\n font=self.title_font, bg=self.bg_color, height=2)\r\n user_label.grid(pady=10, padx=50, row=0, column=6, columnspan=4)\r\n settings_title = Label(self.root, text=\"Enigma Settings\",\r\n font=self.title_font, bg=self.bg_color)\r\n settings_title.grid(row=0, column=2, columnspan=4, pady=15)\r\n rotor1_num, rotor2_num, rotor3_num, rotor1_letter, rotor2_letter, rotor3_letter = \\\r\n self.simulator_enigma.rotors.get_initial_setting()\r\n lst_roman_rotor_num = [\"I\", \"II\", \"III\", \"IV\", \"V\"]\r\n\r\n rotors_number = Label(self.root, text=\"the rotors in the enigma\",\r\n font=self.title_font, bg=self.bg_color)\r\n rotors_number.grid(row=1, column=3, columnspan=5, pady=5)\r\n\r\n numbers_lst = [\"I\", \"II\", \"III\", \"IV\", \"V\"]\r\n first_rotor_label_num = Label(self.root, text=\"First Rotor\",\r\n font=self.text_font, bg=self.bg_color)\r\n first_rotor_label_num.grid(row=2, column=1, columnspan=3)\r\n options_rotor1 = StringVar()\r\n options_rotor1.set(lst_roman_rotor_num[int(rotor1_num) - 1])\r\n rotor_num1_options = OptionMenu(self.root, options_rotor1, *numbers_lst)\r\n rotor_num1_options.grid(row=3, column=1, columnspan=3, padx=15)\r\n\r\n second_rotor_label_num = Label(self.root, text=\"Second Rotor\",\r\n font=self.text_font, bg=self.bg_color)\r\n second_rotor_label_num.grid(row=2, column=4, columnspan=3)\r\n options_rotor2 = StringVar()\r\n options_rotor2.set(lst_roman_rotor_num[int(rotor2_num) - 1])\r\n rotor_num2_options = OptionMenu(self.root, options_rotor2, *numbers_lst)\r\n rotor_num2_options.grid(row=3, column=4, columnspan=3, padx=15)\r\n\r\n third_rotor_label_num = Label(self.root, text=\"Third Rotor\",\r\n font=self.text_font, bg=self.bg_color)\r\n third_rotor_label_num.grid(row=2, column=7, columnspan=3)\r\n options_rotor3 = StringVar()\r\n options_rotor3.set(lst_roman_rotor_num[int(rotor3_num) - 1])\r\n rotor_num3_options = OptionMenu(self.root, options_rotor3, *numbers_lst)\r\n rotor_num3_options.grid(row=3, column=7, columnspan=3, padx=15)\r\n\r\n rotors_letters = Label(self.root, text=\"the letters on the rotors\",\r\n font=self.title_font, bg=self.bg_color)\r\n rotors_letters.grid(row=4, column=3, columnspan=5, pady=5)\r\n\r\n abc_lst = [chr(i) for i in range(65, 91)]\r\n\r\n first_rotor_label_letter = Label(self.root, text=\"first Rotor\",\r\n font=self.text_font, bg=self.bg_color)\r\n first_rotor_label_letter.grid(row=5, column=1, columnspan=3)\r\n options_rotor_l1 = StringVar()\r\n options_rotor_l1.set(rotor1_letter)\r\n rotor_l1_options = OptionMenu(self.root, options_rotor_l1, *abc_lst)\r\n rotor_l1_options.grid(row=6, column=1, columnspan=3, padx=15)\r\n\r\n second_rotor_label_letter = Label(self.root, text=\"second Rotor\",\r\n font=self.text_font, bg=self.bg_color)\r\n second_rotor_label_letter.grid(row=5, column=4, columnspan=3)\r\n options_rotor_l2 = StringVar()\r\n options_rotor_l2.set(rotor2_letter)\r\n rotor_l2_options = OptionMenu(self.root, options_rotor_l2, *abc_lst)\r\n rotor_l2_options.grid(row=6, column=4, columnspan=3, padx=15)\r\n\r\n third_rotor_label_letter = Label(self.root, text=\"Third Rotor\",\r\n font=self.text_font, bg=self.bg_color)\r\n third_rotor_label_letter.grid(row=5, column=7, columnspan=3)\r\n rotors_letters = Label(self.root, text=\"the letters on the rotors\",\r\n font=self.title_font, bg=self.bg_color)\r\n rotors_letters.grid(row=4, column=3, columnspan=5, pady=5)\r\n options_rotor_l3 = StringVar()\r\n options_rotor_l3.set(rotor3_letter)\r\n rotor_l3_options = OptionMenu(self.root, options_rotor_l3, *abc_lst)\r\n rotor_l3_options.grid(row=6, column=7, columnspan=3, padx=15)\r\n\r\n plugboard_title = Label(self.root, text=\"Plugboard settings\",\r\n font=self.title_font, bg=self.bg_color)\r\n plugboard_title.grid(row=7, column=3, columnspan=5, pady=5)\r\n plugboard_note = Label(self.root, text=\"Plugboard can contain 10 pairs max\",\r\n bg=self.bg_color, font=self.text_font)\r\n plugboard_note.grid(row=8, column=3, columnspan=5, pady=5)\r\n lst_buttons = []\r\n for i in range(65, 74):\r\n plugboard_letter = Button(self.root, text=\" \" + chr(i) + \" \", font=self.text_font,\r\n bg=\"khaki\", relief=RIDGE, height=2, width=3,\r\n command=lambda letter=chr(i):\r\n self.add_letter_in_plugboard(letter, lst_buttons))\r\n plugboard_letter.grid(row=9, column=i - 64, pady=5, padx=5)\r\n lst_buttons.append(plugboard_letter)\r\n\r\n for i in range(74, 83):\r\n plugboard_letter = Button(self.root, text=\" \" + chr(i) + \" \", font=self.text_font,\r\n bg=\"khaki\", relief=RIDGE, height=2, width=3,\r\n command=lambda letter=chr(i):\r\n self.add_letter_in_plugboard(letter, lst_buttons))\r\n plugboard_letter.grid(row=10, column=i - 73, pady=5, padx=5)\r\n lst_buttons.append(plugboard_letter)\r\n\r\n for i in range(83, 91):\r\n plugboard_letter = Button(self.root, text=\" \" + chr(i) + \" \", font=self.text_font,\r\n bg=\"khaki\", relief=RIDGE, height=2, width=3,\r\n command=lambda letter=chr(i):\r\n self.add_letter_in_plugboard(letter, lst_buttons))\r\n plugboard_letter.grid(row=11, column=i - 82, pady=5, padx=5)\r\n lst_buttons.append(plugboard_letter)\r\n\r\n self.set_plugboard(lst_buttons)\r\n\r\n button_save_settings = Button(self.root, text=\"save settings and go to simulator\",\r\n height=2, width=35, font=self.text_font,\r\n command=lambda: self.save_settings(options_rotor1.get(),\r\n options_rotor2.get(),\r\n options_rotor3.get(),\r\n options_rotor_l1.get(),\r\n options_rotor_l2.get(),\r\n options_rotor_l3.get()))\r\n button_save_settings.grid(row=12, column=0, columnspan=10, rowspan=2, pady=20, padx=5)",
"def settings_load(self):\n self.ui.spinBox_ATSP.setValue(self.default['ATSP'])\n\n if self.default['serialLabel'] == 'bt':\n self.ui.btRadio.setChecked(True)\n try:\n os.system(\"blueman-manager\")\n except:\n print \"Please install 'blueman' package\"\n elif self.default['serialLabel'] == 'usb':\n self.ui.usbRadio.setChecked(True)\n else:\n self.ui.devRadio.setChecked(True)\n\n if self.default['units'] == 'metric':\n self.ui.units_metric_radio.setChecked(True)\n else:\n self.ui.units_US_radio.setChecked(True)\n\n return",
"def quick_set_obsidian_settings(self):\n self.logger.debug(\"Obsidian conversion settings\")\n self.quick_setting = 'obsidian'\n self.export_format = 'obsidian'\n self.front_matter_format = 'yaml'\n self.metadata_schema = []\n if self.conversion_input == 'nsx':\n self.metadata_schema = ['title', 'ctime', 'mtime', 'tag']\n self.spaces_in_tags = False\n self.split_tags = False\n self.first_row_as_header = True\n self.first_column_as_header = True\n self.chart_image = True\n self.chart_csv = True\n self.chart_data_table = True",
"def update_electronic_settings(self, key, value):\n\n if key in self._electronic_settings:\n self._electronic_settings[key] = value\n else:\n print(\"key does not exist!! keys include: {prec_level, algo, encut , nelm,nelmin, ediff, sigma, lasph, lreal, addgrid, bmaxmix, bmix}\")",
"async def settings(self, ctx: BBContext):\n pass",
"def cmd_account_change_settings(client, args):\n fields = data_fields(args, client.allowed_account_fields)\n account_settings = client.change_account_settings(args.user, fields)\n generate_output({'account_settings': account_settings})",
"def reset_bios_to_default(self):\n # Check if the BIOS resource if exists.\n headers_bios, bios_uri, bios_settings = self._check_bios_resource()\n # Get the BaseConfig resource.\n try:\n base_config_uri = bios_settings['links']['BaseConfigs']['href']\n except KeyError:\n msg = (\"BaseConfigs resource not found. Couldn't apply the BIOS \"\n \"Settings.\")\n raise exception.IloCommandNotSupportedError(msg)\n\n # Check if BIOS resource supports patch, else get the settings\n if not self._operation_allowed(headers_bios, 'PATCH'):\n headers, bios_uri, _ = self._get_bios_settings_resource(\n bios_settings)\n self._validate_if_patch_supported(headers, bios_uri)\n\n status, headers, config = self._rest_get(base_config_uri)\n if status != 200:\n msg = self._get_extended_error(config)\n raise exception.IloError(msg)\n\n new_bios_settings = {}\n for cfg in config['BaseConfigs']:\n default_settings = cfg.get('default', None)\n if default_settings is not None:\n new_bios_settings = default_settings\n break\n else:\n msg = (\"Default Settings not found in 'BaseConfigs' resource.\")\n raise exception.IloCommandNotSupportedError(msg)\n request_headers = self._get_bios_hash_password(self.bios_password)\n status, headers, response = self._rest_patch(bios_uri, request_headers,\n new_bios_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def test_update_bios_boot_mode(self):\n pass",
"def change_settings(self, bio=None, public_images=None,\n messaging_enabled=None, album_privacy=None,\n accepted_gallery_terms=None):\n # NOTE: album_privacy should maybe be renamed to default_privacy\n # NOTE: public_images is a boolean, despite the documentation saying it\n # is a string.\n url = \"https://api.imgur.com/3/account/{0}/settings\".format(self.name)\n resp = self._imgur._send_request(url, needs_auth=True, params=locals(),\n method='POST')\n return resp",
"def test_020_change_settings(self):\n\n testflow.step(\"Modifying settings via CLI\")\n assert self.settings_cli.run(\n 'set',\n name='MESSAGE_OF_THE_DAY',\n value='Zdravicko',\n )[0], \"Failed to change MESSAGE_OF_THE_DAY setting\"\n\n testflow.step(\"Querying for modified setting\")\n show_out = self.settings_cli.run(\n 'show',\n name='MESSAGE_OF_THE_DAY',\n )\n assert show_out[0], 'Failed to run show command'\n assert 'Zdravicko' in show_out[1], 'Setting value was not changed'\n\n testflow.step(\"Modifying setting back to default\")\n assert self.settings_cli.run( # Change value back to default\n 'set',\n name='MESSAGE_OF_THE_DAY',\n value='',\n )[0], \"Failed to change MESSAGE_OF_THE_DAY setting to defaul value\"",
"def set_by_gui(self):\n\n # Use the GetFromGui class (below):\n user_choice = GetFromGui(None, -1, 'Params')\n # success is achieved if the user presses 'done': \n if user_choice.success: \n user_params = {\n \"subject\" : user_choice.subject,\n \"texture_dur\" : float(user_choice.SOA)/1000.,\n \"demo\": user_choice.demo,\n }\n else:\n user_choice.Destroy()\n raise ValueError(\"Program stopped by user\")\n # Stop execution of the window\n user_choice.Destroy()\n \n for k in user_params.keys():\n self.__setattr__(k,user_params[k])",
"def update_settings(self):\n settings = {\n \"reference\": self,\n \"draw_tangents\": self.cbDrawTangents.isChecked(),\n }\n if self.cbShowSolarAngle.isChecked():\n settings[\"show_solar_angle\"] = self.cbSolarAngleType.currentText(), self.cbSolarBody.currentText()\n else:\n settings[\"show_solar_angle\"] = None\n\n self.view.set_remote_sensing_appearance(settings)",
"def write_xbee_settings():\n device.apply_changes()\n device.write_changes()",
"def kill_all(self):\n self.settings['lights_on'] = 12\n self.settings['lights_off'] = 12\n self.settings['overhead_level'] = 0\n self.settings['soil_1'] = 0\n self.settings['soil_2'] = 0\n self.settings['soil_3'] = 0\n self.settings['soil_4'] = 0\n self.scale_overhead_level.set(self.settings['overhead_level'])\n self.scale_smc1.set(self.settings['soil_1'])\n self.scale_smc2.set(self.settings['soil_2'])\n self.scale_smc3.set(self.settings['soil_3'])\n self.scale_smc4.set(self.settings['soil_4'])\n self.active_changes = True # (flag) Once changes are retrieved, we assume that they will be sent to the controller",
"def setSettings(self):\r\n # 根据默认参数设置,根据是否使用config来设定参数\r\n if self.__config__[\"config\"] is False:\r\n self.json.setChecked(False)\r\n self.json_path.setEnabled(False)\r\n self.json_select.setEnabled(False)\r\n\r\n tem = [self.l_line, self.r_line, self.p_line]\r\n [x.setEnabled(True) for x in tem]\r\n\r\n for key, value in self.elements.items():\r\n key.setEnabled(True)\r\n\r\n # 设定程序或者json文件的路径\r\n if self.__config__[\"exe\"]:\r\n self.executable.setText(self.__config__[\"exe\"])\r\n else:\r\n self.executable.clear()\r\n if self.__config__[\"config_path\"]:\r\n self.json_path.setText(self.__config__[\"config_path\"])\r\n else:\r\n self.json_path.clear()\r\n \r\n # 设定其他参数\r\n if self.__config__[\"paras\"]:\r\n for key, value in self.__config__[\"paras\"].items():\r\n element = self.parameters[key]\r\n if value not in (\"::\", \"\"):\r\n element.setEnabled(True)\r\n\r\n key1 = get_key_by_value(self.elements, element)\r\n if key1:\r\n key1.setEnabled(True)\r\n key1.setChecked(True)\r\n\r\n if isinstance(element, QLineEdit):\r\n element.setText(value)\r\n elif isinstance(element, QComboBox):\r\n index = element.findText(value, Qt.MatchFixedString)\r\n if index >= 0:\r\n element.setCurrentIndex(index)",
"def test_update_bios_policy(self):\n pass",
"def __setSettingsToStorage(value):\n AccountSettings.setSettings(NEW_SETTINGS_COUNTER, value)",
"def set_by_gui(self):\n\n # Use the GetFromGui class (below):\n user_choice = GetFromGui(None, -1, 'Params')\n # success is achieved if the user presses 'done': \n if user_choice.success: \n user_params = {\n \"subject\" : user_choice.subject,\n \"orientation\" : user_choice.sc_ori,\n \"target_loc\": user_choice.target_loc,\n \"demo\": user_choice.demo,\n \"start_per_staircase\":float(user_choice.start_per),\n \"start_fix_staircase\":float(user_choice.start_fix),\n \"do_peripheral\":user_choice.do_per,\n \"do_fixation\":user_choice.do_fix\n }\n else:\n user_choice.Destroy()\n raise ValueError(\"Program stopped by user\")\n # Stop execution of the window\n user_choice.Destroy()\n \n for k in user_params.keys():\n self.__setattr__(k,user_params[k])",
"def create_test_bios_setting(**kw):\n bios_setting = get_test_bios_setting(**kw)\n dbapi = db_api.get_instance()\n node_id = bios_setting['node_id']\n version = bios_setting['version']\n settings = [{'name': bios_setting['name'],\n 'value': bios_setting['value'],\n 'attribute_type': bios_setting['attribute_type'],\n 'allowable_values': bios_setting['allowable_values'],\n 'read_only': bios_setting['read_only'],\n 'reset_required': bios_setting['reset_required'],\n 'unique': bios_setting['unique']}]\n return dbapi.create_bios_setting_list(node_id, settings, version)[0]",
"def changeInitValues(self, betas):\n\n if self.name in betas:\n self.initValue = betas[self.name]",
"def bs_set(self, cmd, arg):\n\t\tif arg:\n\t\t\tfor x in self.split(arg):\n\t\t\t\tif '=' in x:\n\t\t\t\t\tk, v = x.split('=', 1)\n\t\t\t\t\tos.environ[k] = v\n\t\t\t\t\tself.write(\"%s=%s%s\" %(k, v, os.linesep))\n\t\t\t\telif x:\n\t\t\t\t\tself.write(\"%s=%s%s\" %(x, os.environ.get(x, ''), os.linesep))\n\t\telse:\n\t\t\tfor k,v in os.environ.items():\n\t\t\t\tself.write(\"%s=%s%s\" %(k, v, os.linesep))",
"def load_defaults(self):\n self.set_motor_limits(self.MOTOR_LEFT, self.LEFT_DEFAULT)\n self.set_motor_limits(self.MOTOR_RIGHT, self.RIGHT_DEFAULT)\n self.set_servo(self.SERVO_1, self.MIDPOINT)",
"def load_defaults(self):\n self.set_motor_limits(self.MOTOR_LEFT, self.LEFT_DEFAULT)\n self.set_motor_limits(self.MOTOR_RIGHT, self.RIGHT_DEFAULT)\n self.set_servo(self.SERVO_1, self.MIDPOINT)",
"def load_defaults(self):\n self.set_motor_limits(self.MOTOR_LEFT, self.LEFT_DEFAULT)\n self.set_motor_limits(self.MOTOR_RIGHT, self.RIGHT_DEFAULT)\n self.set_servo(self.SERVO_1, self.MIDPOINT)",
"def os_settings():\n for setting_name, env_name in (\n (\"debug\", \"BACPYPES_DEBUG\"),\n (\"color\", \"BACPYPES_COLOR\"),\n (\"debug_file\", \"BACPYPES_DEBUG_FILE\"),\n (\"max_bytes\", \"BACPYPES_MAX_BYTES\"),\n (\"backup_count\", \"BACPYPES_BACKUP_COUNT\"),\n (\"route_aware\", \"BACPYPES_ROUTE_AWARE\"),\n ):\n env_value = os.getenv(env_name, None)\n if env_value is not None:\n cur_value = settings[setting_name]\n\n if isinstance(cur_value, bool):\n env_value = env_value.lower()\n if env_value in (\"set\", \"true\"):\n env_value = True\n elif env_value in (\"reset\", \"false\"):\n env_value = False\n else:\n raise ValueError(\"setting: \" + setting_name)\n elif isinstance(cur_value, int):\n try:\n env_value = int(env_value)\n except:\n raise ValueError(\"setting: \" + setting_name)\n elif isinstance(cur_value, str):\n pass\n elif isinstance(cur_value, list):\n env_value = env_value.split()\n elif isinstance(cur_value, set):\n env_value = set(env_value.split())\n else:\n raise TypeError(\"setting type: \" + setting_name)\n settings[setting_name] = env_value",
"def SetSamBA():\n log.Log(\"Setting into SAM-BA...\")\n \n ser = serial.Serial(port=ArduinoFlashHardValues.arduinoPort,\\\n baudrate=1200,\\\n parity=serial.PARITY_NONE,\\\n stopbits=serial.STOPBITS_ONE,\\\n bytesize=serial.EIGHTBITS,\\\n timeout=2000)\n \n time.sleep(10)\n \n ser.close() \n log.Log(\"SAM-BA Set.\")",
"def doSettings(self, k):\n def bbAdd(textObj):\n dims = self.adj.tsc.dims(textObj)\n self.dims.setDims(k, name, dims)\n\n for name in self._settings:\n value = self.opts[name]\n if not value: continue\n fontsize = self.fontsize(name, None)\n kw = {'size':fontsize} if fontsize else {}\n bbAdd(self.sp.set_(name, value, **kw))\n if name == 'xlabel':\n self.xlabels[k] = value\n continue\n settings = self.opts['settings']\n for name in settings:\n bbAdd(self.sp.set_(name, settings[name]))",
"def update_hubbard_settings(self, key, value):\n\n if self._hubbard_settings:\n if key in self._hubbard_settings:\n self._hubbard_settings[key] = value\n else:\n print(\"key does not exist!! keys include: {ldau, ldatype, ldaul, dlauu, ldauj, lmaxmix}\")\n else:\n print(\"hybrid settings not present!\")",
"def SetSettings (self, settings) :\n\t\treturn self.run(\"SetSettings\", settings)",
"def apply_settings(camera):\r\n camera.clear_mode = 0\r\n camera.exp_mode = \"Internal Trigger\"\r\n camera.readout_port = 0\r\n camera.speed_table_index = 0\r\n camera.gain = 1",
"def update_control_widgets(self):\n logger.info(f'Loading settings: {self.settings_dict}')\n for k, section in self.settings_dict.items():\n for setting_name, value in section.items():\n self.set_control_value(setting_name, value)",
"def set_defaults(self):\n self.plastic = False\n self.unset_output()\n self.reward = False\n self.patmod = config.impact_modulation_default",
"def set_boot_order(profile_obj):\n status = True\n logger._log_to_console_and_log_file(\"\")\n logger._log_to_console_and_log_file(\"### Testing the 'Boot Settings' session ###\")\n logger._log_to_console_and_log_file(\"- Select the 'Legacy BIOS' mode\")\n createprofile_elements = ProfileContainer(ProfileContainerType.ADD)\n __select_value_from_a_profile_combo_box(createprofile_elements.ID_COMBO_PROFILE_BOOT_MODE, createprofile_elements.ID_COMBO_PROFILE_BOOT_MODE_LIST % \"Legacy BIOS\")\n # Set invalid values\n logger._log_to_console_and_log_file(\"Testing using invalid values\")\n for profile in profile_obj:\n items = [[\"CD\", profile.cd], [\"USB\", profile.usb], [\"HardDisk\", profile.harddisk]]\n for data in items:\n ui_lib.wait_for_element_and_input_text(\"name=%s\" % data[0], data[1])\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_CREATE_SERVER_PROFILE_FORM)\n if data[0] == \"HardDisk\":\n data[0] = \"Hard Disk\"\n if ui_lib.wait_for_element_text(FusionServerProfilesPage.ID_BOOT_ORDER_POSITION % data[0], data[1], timeout=1):\n logger._log_to_console_and_log_file(\"- \" + \"'\" + data[0] + \"'\" + \" field was not cleared to the default value and persisted as '\" + str(data[1]) + \"'\")\n status = False\n else:\n logger._log_to_console_and_log_file(\"- \" + \"'\" + data[0] + \"'\" + \" field was correctly cleared to the default value\")\n return status",
"def change_settings(new_settings={}, file=None):\n gl = globals()\n if file is not None:\n execfile(file)\n gl.update(locals())\n gl.update(new_settings)\n # Here you can add some code to check that the new configuration\n # values are valid.",
"def set_config(self, settings='settings.json'): # called from button_set object \n self.settings['lights_on'] = self.lights_on.get()\n self.settings['lights_off'] = self.lights_off.get()\n self.settings['photo1'] = self.photo1.get()\n self.settings['photo2'] = self.photo2.get()\n self.settings['smc1'] = self.smc1.get()\n self.settings['smc2'] = self.smc2.get()\n self.settings['smc3'] = self.smc3.get()\n self.settings['smc4'] = self.smc4.get()\n self.settings['watering'] = self.watering.get()\n self.settings['cycle'] = self.cycle.get()\n settings_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), settings)\n if os.path.exists(settings_path):\n with open(settings_path, 'w') as jsonfile:\n jsonfile.write(json.dumps(self.settings, indent=4))\n self.active_changes = True # (flag) changes are active!",
"def change_value(self, spinbox, action):\n # Gets the name of the spinbox and finds from which tab it belongs\n spb_name = spinbox.objectName()\n # Parsing to get the index of the \"_\" separators from the beginning (tab)\n tab_sep = spb_name.find(\"_\") \n tab_code = spb_name[0:tab_sep]\n # If the three first letters of the spinbox name correspond to the modes' name, the \n # section has the same name, therefore it is only necessary to copy. The alarms tab\n # is different\n # Depending on the section, it is necessary to remove a few characters from the end of the \n # string. Either remove \"spb\" or \"min_spb\" or \"max_spb\"\n if tab_code in [\"VCV\", \"PSV\", \"PCV\"]:\n conf_section = tab_code\n remove_chars = 3\n elif tab_code == \"al\":\n conf_section = \"Alarms\"\n remove_chars = 7\n elif tab_code == \"cfg\":\n conf_section = \"Config\"\n remove_chars = 3\n # It should never reach the \"else\", but still here it is, if something fails\n else:\n print(\"Tab code \" + tab_code + \" is not valid\")\n return\n # Gets the \"pure\" name of the spb, to which \"inc\" will be appended in order to access it in\n # the conf file\n spb_no_suffix = spb_name[tab_sep+1:-remove_chars]\n # gets the increment as a float number from the appropriate section and spinbox\n increment = self.conf[conf_section].getfloat(spb_no_suffix + \"inc\")\n # Sets the single step property of the spinbox to correspond to the increment\n spinbox.setSingleStep(increment)\n # Adjusts the precision based on the increment's order of magnitude\n for i, limit in enumerate([1, 0.1, 0.01, 0.001, 0.0001, 0.00001]):\n if increment < limit:\n continue\n spinbox.setDecimals(i)\n break\n # Depending on the desired action, increases or deccreases the current spinbox value\n if action == \"-\":\n spinbox.setValue(spinbox.value() - increment)\n else:\n spinbox.setValue(spinbox.value() + increment)",
"def save_changes(self):\n\n velib, autolib, subway = None, None, None\n for key, value in VELIB_SUBSCRIPTIONS.iteritems():\n if self._velib.get() == value:\n velib = key\n break\n for key, value in AUTOLIB_SUBSCRIPTIONS.iteritems():\n if self._autolib.get() == value:\n autolib = key\n break\n for key, value in SUBWAY_SUBSCRIPTIONS.iteritems():\n if self._subway.get() == value:\n subway = key\n break\n preferences = {\n FASTEST: self._fastest.get(),\n SHORTEST: self._shortest.get(),\n CHEAPEST: self._cheapest.get(),\n SIMPLEST: self._simplest.get(),\n WEATHER_IMPACT: self._weather_impact.get(),\n LESS_PAINFUL: self._less_painful.get(),\n LESS_WALKING: self._less_walking.get()\n }\n\n result = self._system.set_profile_settings(velib, autolib, subway, self._driving_licence.get(), preferences)\n if not result[\"success\"]:\n showerror('Erreur système', result[\"error\"])\n return\n\n # Redirection vers la page principale\n from settings import RideSettingsPage\n self.pack_forget()\n RideSettingsPage(self._window, self._system)",
"def __setattr__(self, name: str, value: Any) -> None:\n if name.isupper():\n self._settings[name] = value\n super().__setattr__(name, value)",
"def test_patch_bios_boot_mode(self):\n pass",
"def setValues(\n self,\n mechanicalConstraint: SymbolicConstant = KINEMATIC,\n contactControls: str = \"\",\n ):\n pass",
"def settings(self, value):\n self._settings = value",
"def iniitialize_dynamic_settings(self):\n\t\tself.speed_factor = 1.5\n\t\tself.bullet_speed_factor = 3\n\t\tself.alien_speed_factor = 1\n\n\t\t# fleet_direction of 1 represents right; -1 represents left. \n\t\tself.fleet_direction = 1",
"def _configure(self):\n Values._configure(self)\n self.values = [self.inventory.one, self.inventory.two]\n return",
"async def __set(ctx: commands.Context, setting: str, value: str):\n settings = ctx.bot.app_settings\n valid_settings = settings.USER_FACING_SETTINGS\n found = [key for key in valid_settings if key.startswith(setting)]\n if len(found) == 1:\n setting = found[0]\n else:\n await ctx.send(f'Invalid setting \"{setting}\". Valid choices are:'\n f' [{\", \".join(valid_settings)}]')\n return\n\n valid_values = settings.get_valid_values(setting)\n if not settings.set(setting, value, valid_values):\n if valid_values:\n await ctx.send(f'invalid value, use [{\", \".join(valid_values)}]')\n return\n\n # Reload library when needed\n if setting in ['language', 'system', 'mode']:\n ctx.bot.reload_library()\n\n # Reload cogs when needed\n if setting in ['system', 'mode']:\n try:\n logging.info('%s triggered a cogs reload.', ctx.author)\n await ctx.send(f'{ctx.message.author.mention} triggered a mode change.')\n ctx.bot.reload_cogs()\n except (commands.ExtensionNotLoaded,\n commands.ExtensionNotFound,\n commands.NoEntryPointError,\n commands.ExtensionFailed):\n # Inform User that reload was not successful\n message_error = 'Error on reloading cogs.'\n logging.error(message_error)\n await ctx.send(message_error)\n return\n\n message_success = f'{setting} changed to \"{value}\".'\n logging.info(message_success)\n await ctx.send(message_success)\n return",
"def set_values(self):\n super(ResConfigSettings, self).set_values()\n self.env['ir.config_parameter'].sudo().set_param(\n 'pos_all_orders.pos_all_order', self.pos_all_order)\n self.env['ir.config_parameter'].sudo().set_param(\n 'pos_all_orders.n_days',\n self.n_days)",
"async def change(self, ctx: Context):\n\t\tawait self.send(f\"If you wish to see your settings, go on our site: https://asxlvm.github.io/#/settings • If you already saw your settings and wish to change them. What do you want to change?\", whisper=[ctx.author.id])\n\t\tawait asyncio.sleep(2)\n\t\tawait self.send(f\"Options: allowMentions [bool] • autoRejectFights [bool] • passiveMode [bool] • whisperEconomy [bool]• onJoinMsg [bool] • allowUserInteraction [bool] | [bool] = True / False\", whisper=[ctx.author.id])\n\t\twaitforevent = await self.wait_for('message', check=lambda message: ctx.author.id == message.author.id)\n\t\twfcl = waitforevent.content.lower()\n\t\tusers = await self.get_settings_data()\n\t\tuserid = ctx.author.id\n\t\tif wfcl == \"allowmentions true\":\n\t\t\tawait self.open_settings(userid, ctx.author.username)\n\t\t\tusers[str(userid)][\"allowMentions\"] = True\n\t\t\twith open(\"settings.json\", \"w\") as f:\n\t\t\t\tjson.dump(users, f)\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} I have changed allowMentions to True for you.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"allowmentions false\":\n\t\t\tawait self.open_settings(userid, ctx.author.username)\n\t\t\tusers[str(userid)][\"allowMentions\"] = False\n\t\t\twith open(\"settings.json\", \"w\") as f:\n\t\t\t\tjson.dump(users, f)\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} I have changed allowMentions to False for you.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"autorejectfights true\":\n\t\t\tawait self.open_settings(userid, ctx.author.username)\n\t\t\tusers[str(userid)][\"autoRejectFights\"] = True\n\t\t\twith open(\"settings.json\", \"w\") as f:\n\t\t\t\tjson.dump(users, f)\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} I have changed autoRejectFights to True for you.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"autorejectfights false\":\n\t\t\tawait self.open_settings(userid, ctx.author.username)\n\t\t\tusers[str(userid)][\"autoRejectFights\"] = False\n\t\t\twith open(\"settings.json\", \"w\") as f:\n\t\t\t\tjson.dump(users, f)\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} I have changed autoRejectFights to False for you.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"passivemode true\":\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} as there isn't economy right now, you may not change this setting.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"passivemode false\":\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} as there isn't economy right now, you may not change this setting.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"whispereconomy true\":\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} as there isn't economy right now, you may not change this setting.\")\n\t\telif wfcl == \"whispereconomy false\":\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} as there isn't economy right now, you may not change this setting.\")\n\t\telif wfcl == \"onjoinmsg true\":\n\t\t\tawait self.open_settings(userid, ctx.author.username)\n\t\t\tusers[str(userid)][\"onJoinMsg\"] = True\n\t\t\twith open(\"settings.json\", \"w\") as f:\n\t\t\t\tjson.dump(users, f)\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} I have changed onJoinMsg to True for you.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"onjoinmsg false\":\n\t\t\tawait self.open_settings(userid, ctx.author.username)\n\t\t\tusers[str(userid)][\"onJoinMsg\"] = False\n\t\t\twith open(\"settings.json\", \"w\") as f:\n\t\t\t\tjson.dump(users, f)\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} I have changed onJoinMsg to False for you.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"allowuserinteraction true\":\n\t\t\tawait self.open_settings(userid, ctx.author.username)\n\t\t\tusers[str(userid)][\"allowUserInteraction\"] = True\n\t\t\twith open(\"settings.json\", \"w\") as f:\n\t\t\t\tjson.dump(users, f)\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} I have changed allowUserInteraction to True for you.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"allowuserinteraction false\":\n\t\t\tawait self.open_settings(userid, ctx.author.username)\n\t\t\tusers[str(userid)][\"allowUserInteraction\"] = True\n\t\t\twith open(\"settings.json\", \"w\") as f:\n\t\t\t\tjson.dump(users, f)\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} I have changed allowUserInteraction to True for you.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"allowmentions\":\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} you didn't supply a boolean, run the command again.\", whisper=[ctx.author.id])\n\n\t\telif wfcl == \"autorejectfights\":\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} you didn't supply a boolean, run the command again.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"passivemode\":\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} you didn't supply a boolean, run the command again.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"whispereconomy\":\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} you didn't supply a boolean, run the command again.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"onjoinmsg\":\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} you didn't supply a boolean, run the command again.\", whisper=[ctx.author.id])\n\t\telif wfcl == \"allowuserinteraction\":\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} you didn't supply a boolean, run the command again.\", whisper=[ctx.author.id])\n\t\telse:\n\t\t\tawait asyncio.sleep(2)\n\t\t\treturn await self.send(f\"{ctx.author.mention} I believe that is an incorrect argument, try running the command again.\", whisper=[ctx.author.id])",
"def set_custom(self, custom):\n custom = clamp(custom, 1, 12)\n self._state.mode = custom\n self.send_command(Command.SET_CUSTOM, [int(custom)])",
"def get_bios_settings(bmc):\n bios_settings = bmc.list_bios_settings()\n # Convert the settings to something that is JSON-serialisable.\n settings = {}\n for param, value in bios_settings.items():\n setting = {}\n # Not all attributes exist on all settings, so allow them to be absent.\n attrs = {\n 'current_value',\n 'pending_value',\n 'possible_values',\n }\n for attr in attrs:\n if hasattr(value, attr):\n setting[attr] = getattr(value, attr)\n settings[param] = setting\n return settings",
"def commit_settings(self, param):\n try:\n if param.name() == 'kinesis_lib':\n try:\n sys.path.append(param.value())\n clr.AddReference(\"Thorlabs.MotionControl.DeviceManagerCLI\")\n clr.AddReference(\"Thorlabs.MotionControl.IntegratedStepperMotorsCLI\")\n clr.AddReference(\"Thorlabs.MotionControl.GenericMotorCLI\")\n import Thorlabs.MotionControl.IntegratedStepperMotorsCLI as Integrated\n import Thorlabs.MotionControl.DeviceManagerCLI as Device\n import Thorlabs.MotionControl.GenericMotorCLI as Generic\n Device.DeviceManagerCLI.BuildDeviceList()\n serialnumbers = [str(ser) for ser in\n Device.DeviceManagerCLI.GetDeviceList(Integrated.CageRotator.DevicePrefix)]\n\n except:\n serialnumbers = []\n self.settings.child(('serial_number')).setOpts(limits=serialnumbers)\n\n elif param.name() == 'polling_time':\n self.controller.StopPolling()\n QThread.msleep(500)\n self.controller.StartPolling(self.settings.child(('polling_time')).value())\n QThread.msleep(500)\n self.emit_status(ThreadCommand('update_main_settings', [['wait_time'], param.value(), 'value']))\n\n\n except Exception as e:\n self.emit_status(ThreadCommand('Update_Status', [getLineInfo() + str(e), 'log']))",
"def activateSettings (self):\r\n settings.loadSettings (os.path.expanduser(self.filename))\r\n self.nemeth_translator=settings.brailleTableToUse ()\r\n return settings.activateSettings ({\"braille\":self.nemeth_translator,\"speak\":self.speech_translator,\"preprocessor\":self.preprocessor})",
"def change_settings(settings, methods=['GET', 'POST']):\n message = resolve_settings(settings)\n socketio.emit('settings_update', SETTINGS)\n socketio.emit('log', message)",
"def test_patch_bios_policy(self):\n pass",
"def set_power_management(value: int) -> None:",
"def settingstowidgets(self):\n\n # disconnect before updating, otherwise\n # the current GUI settings will be reinstated\n # after the first GUI element is updated\n self.disconnect_all_widgets()\n\n self.spansliderInt.setLowerValue(int(self.ABsettings[\"intensity_range\"][0]))\n self.spansliderInt.setUpperValue(int(self.ABsettings[\"intensity_range\"][1]))\n print \"vis setting \",self.ABsettings[\"visible\"]\n if self.ABsettings[\"visible\"]:\n print \"setting \",self.objectName(), \" to visible\"\n self.abEnabledCB.setChecked(True)\n else:\n print \"setting \",self.objectName(), \" to invisible\"\n self.abEnabledCB.setChecked(False)\n self.spansliderZ.setLowerValue(int(self.ABsettings[\"zrange\"][0]))\n self.spansliderZ.setUpperValue(int(self.ABsettings[\"zrange\"][1]))\n #self.ABsettings[\"Antibody\"]=self.ab\n self.colorBox.setRGB(self.ABsettings[\"rgb\"])\n if self.isDAPIPanel:\n for rb in self.radiobuttons:\n print \"radio button \", str(rb.objectName())\n if str(rb.objectName()).split(\"_\")[0]==self.ABsettings[\"selected_DAPI_channel\"]:\n rb.setChecked(True)\n print \"is checked\"\n\n # reconnect everything\n self.connect_all_widgets()\n self.updateSettings()",
"def setCmsGenParameters(self, **args):\n self.cmsGenNode.applicationControls.update(args)\n return",
"def test_change_config(self):\n browser = Browser(self.app)\n portalURL = self.portal.absolute_url()\n browser.addHeader('Authorization', 'Basic %s:%s' % (SITE_OWNER_NAME, SITE_OWNER_PASSWORD))\n browser.open(portalURL + '/@@overview-controlpanel')\n browser.getLink('Image WatchDog settings').click()\n browser.getControl('Optimize PNG').selected = True\n browser.getControl('Enabled').selected = True\n browser.getControl('Save').click()\n\n registry = getUtility(IRegistry)\n settings = registry.forInterface(IImageWatchDogSettings)\n self.assertTrue(settings.optimize)\n self.assertTrue(settings.enabled)",
"def valuechange():\n\n tempmin.setMaximum(tempmax.value())\n tempmax.setMinimum(tempmin.value())\n hummin.setMaximum(hummax.value())\n hummax.setMinimum(hummin.value())\n\n self.variables.default_values_dict[\"settings\"][\n \"current_tempmin\"\n ] = tempmin.value()\n self.variables.default_values_dict[\"settings\"][\n \"current_tempmax\"\n ] = tempmax.value()\n self.variables.default_values_dict[\"settings\"][\n \"current_hummin\"\n ] = hummin.value()\n self.variables.default_values_dict[\"settings\"][\n \"current_hummax\"\n ] = hummax.value()\n\n max = build_command(\n self.variables.devices_dict[\"temphum_controller\"],\n (\"set_hummax\", hummax.value()),\n )\n min = build_command(\n self.variables.devices_dict[\"temphum_controller\"],\n (\"set_hummin\", hummin.value()),\n )\n\n self.variables.vcw.write(\n self.variables.devices_dict[\"temphum_controller\"], max\n )\n self.variables.vcw.write(\n self.variables.devices_dict[\"temphum_controller\"], min\n )",
"def changeRingSetting(self):\n #Input code to accommodate function of Ring setting",
"def update_readings(self, settings):\n self.settings = settings\n self.scale_watering.set(settings['watering'])\n self.scale_cycle.set(settings['cycle'])\n self.scale_lights_off.set(settings['lights_off'])\n self.scale_lights_on.set(settings['lights_on'])\n self.scale_photo1.set(settings['photo1'])\n self.scale_photo2.set(settings['photo2'])\n self.scale_smc1.set(settings['smc1'])\n self.scale_smc2.set(settings['smc2'])\n self.scale_smc3.set(settings['smc3'])\n self.scale_smc4.set(settings['smc4'])",
"def action_settings(self):\n\n cur_datadir = self.config.starbound_data_dir\n settings = SettingsDialog(self)\n settings.exec()\n new_datadir = self.config.starbound_data_dir\n if new_datadir:\n if cur_datadir != new_datadir:\n self.load_data()\n self.scene.refresh(self.data)\n else:\n self.close_world()\n\n # Make sure our menus are enabled/disabled as appropriate\n self.enforce_menu_state()\n\n # Re-focus the main window\n self.activateWindow()",
"def save(self):\n for name, obj in inspect.getmembers(self.ui):\n if isinstance(obj, QSpinBox):\n name = obj.objectName()\n value = obj.value()\n self.settings.setValue(name, value)\n\n if isinstance(obj, QDoubleSpinBox):\n name = obj.objectName()\n value = obj.value()\n self.settings.setValue(name, value)\n\n if isinstance(obj, QLineEdit):\n name = obj.objectName()\n value = obj.text()\n self.settings.setValue(name, value)\n\n if isinstance(obj, QRadioButton):\n name = obj.objectName()\n value = obj.isChecked()\n self.settings.setValue(name, value)\n\n if isinstance(obj, QComboBox):\n index = obj.currentIndex() # get current index from combobox\n value = obj.itemText(index)\n self.settings.setValue(name, value)",
"def set_settings(self, settings={}):\n # type: (dict) -> Entity\n if not settings:\n return\n\n # these are used to help with calculations\n t = ('auto', 'fixed')\n for v in ('position', 'size'):\n if v in settings:\n settings[v] = settings[v].lower()\n if settings[v] in t:\n self.settings[v] = settings[v]\n\n # these are inherent entity values\n for s in ['x', 'y', 'width', 'height']:\n self.type_def[s] = settings.get(s, 0)\n\n return self",
"def set_value(self, item, value):\n super(t_16_Bit_Options, self).set_value(item, value)\n\n if(item == t_16_Bit_Options.FAULT_ACTIVE):\n self.set_bools(value, self.faults_current, t_16_Bit_Options.BIT_FAULT_MAX )\n\n if(item == t_16_Bit_Options.FAULT_LATCHED):\n self.set_bools(value, self.faults_latched, t_16_Bit_Options.BIT_FAULT_MAX )",
"async def fishingsettings(self, ctx:commands.Context):",
"def update(self):\n if self.name == \"Settings\":\n args = [\"NAME:Settings\"]\n else:\n args = [\"NAME:\" + self.name, \"Enable:=\", self.Enable]\n if self.UserSpecifiedSettings:\n args += self.manualsettings\n else:\n args += self.autosettings\n if self.name == \"Settings\":\n self.meshmodule.EditGlobalMeshRegion(args)\n else:\n self.meshmodule.EditMeshRegion(self.name, args)\n return True",
"def set_ai(self, value):\n if value not in (\"easy\", \"normal\", \"advanced\"):\n raise SettingsError(\"Invalid choice\")\n self._parser.set(\"settings\", \"difficulty\", value)\n self._save()",
"def save_to_conf(self):\n for checkbox, (option, _default) in list(self.checkboxes.items()):\n self.set_option(option, checkbox.isChecked())\n for radiobutton, (option, _default) in list(self.radiobuttons.items()):\n self.set_option(option, radiobutton.isChecked())\n for lineedit, (option, _default) in list(self.lineedits.items()):\n self.set_option(option, to_text_string(lineedit.text()))\n for spinbox, (option, _default) in list(self.spinboxes.items()):\n self.set_option(option, spinbox.value())\n for combobox, (option, _default) in list(self.comboboxes.items()):\n data = combobox.itemData(combobox.currentIndex())\n self.set_option(option, from_qvariant(data, to_text_string))\n for (fontbox, sizebox), option in list(self.fontboxes.items()):\n font = fontbox.currentFont()\n font.setPointSize(sizebox.value())\n self.set_font(font, option)\n for clayout, (option, _default) in list(self.coloredits.items()):\n self.set_option(option, to_text_string(clayout.lineedit.text()))\n for (clayout, cb_bold, cb_italic), (option, _default) in list(self.scedits.items()):\n color = to_text_string(clayout.lineedit.text())\n bold = cb_bold.isChecked()\n italic = cb_italic.isChecked()\n self.set_option(option, (color, bold, italic))",
"def set_assist(self, a):\n a = int(a*10)\n lo = a&0xff\n hi = a>>8\n self.communicate('S', '\\x03' + chr(lo) + chr(hi) + '\\x01\\x80')",
"def save_to_conf(self):\r\n for checkbox, (option, _default) in list(self.checkboxes.items()):\r\n self.set_option(option, checkbox.isChecked())\r\n for radiobutton, (option, _default) in list(self.radiobuttons.items()):\r\n self.set_option(option, radiobutton.isChecked())\r\n for lineedit, (option, _default) in list(self.lineedits.items()):\r\n self.set_option(option, to_text_string(lineedit.text()))\r\n for spinbox, (option, _default) in list(self.spinboxes.items()):\r\n self.set_option(option, spinbox.value())\r\n for combobox, (option, _default) in list(self.comboboxes.items()):\r\n data = combobox.itemData(combobox.currentIndex())\r\n self.set_option(option, from_qvariant(data, to_text_string))\r\n for (fontbox, sizebox), option in list(self.fontboxes.items()):\r\n font = fontbox.currentFont()\r\n font.setPointSize(sizebox.value())\r\n self.set_font(font, option)\r\n for clayout, (option, _default) in list(self.coloredits.items()):\r\n self.set_option(option, to_text_string(clayout.lineedit.text()))\r\n for (clayout, cb_bold, cb_italic), (option, _default) in list(self.scedits.items()):\r\n color = to_text_string(clayout.lineedit.text())\r\n bold = cb_bold.isChecked()\r\n italic = cb_italic.isChecked()\r\n self.set_option(option, (color, bold, italic))",
"def update_magnetic_settings(self, key, value):\n\n if self._magnetic_settings:\n if key in self._magnetic_settings:\n self._magnetic_settings[key] = value\n else:\n print(\"key does not exist!! keys include: {ispin, magmom, nupdown, saxis, lsorbit,noncollinear}\")\n else:\n print(\"magnetic settings not present!\")",
"def bios_uuid(self, bios_uuid):\n\n self._bios_uuid = bios_uuid",
"def updateSettingsUI(self):\n\n pass",
"def updateSettings(self):\n self.parser.read(self.file)\n self.showTicker = self.parser.getboolean('Settings', 'showTicker')\n self.verbose = self.parser.getboolean('Settings', 'verbose')\n self.sleepTime = self.parser.getint('Settings', 'sleeptime')\n self.saveGraph = self.parser.getboolean('Settings', 'saveGraph')\n self.graphDPI = self.parser.getint('Settings', 'graphDPI')",
"def updateOptions(self):\r\n if self.varSegment.get() == \"binary\":\r\n self.checkSaveBinary.config(state=tk.DISABLED)\r\n else:\r\n self.checkSaveBinary.config(state=tk.NORMAL)",
"def save_settings(self):\n logger.info(f'Saving settings: {self.settings_dict}')\n for k, section in self.settings_dict.items():\n for setting_name in section.keys():\n value = self.get_control_value(setting_name)\n if value is not None:\n section[setting_name] = value\n\n write_settings(self.settings_dict)",
"def set_boost(self, setting: int) -> None:\n if self._tx_power >= 18:\n self._write_u8(_REG_TEST_PA1, setting)\n self._write_u8(_REG_TEST_PA2, setting)",
"def saveSettings(self):\n self.userFiles.applyData()\n self.userPersonal.applyData()",
"def redo_settings(self):\r\n cF.redo_settings()",
"def saveParameters(self):\n super(BuminTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Buminovich.Buminovich(**self.simArgs)",
"async def settings(self, ctx: Context):\n\t\ttry:\n\t\t\tawait self.open_settings(ctx.author.id, ctx.author.username)\n\t\t\tuserid = ctx.author.id\n\t\t\tusers = await self.get_settings_data()\n\t\t\tallowMentions = users[str(userid)][\"allowMentions\"]\n\t\t\tautoRejectFights = users[str(userid)][\"autoRejectFights\"]\n\t\t\tpassiveMode = users[str(userid)][\"passiveMode\"]\n\t\t\tallowUserInteraction = users[str(userid)][\"allowUserInteraction\"]\n\t\t\twhisperEconomy = users[str(userid)][\"whisperEconomy\"]\n\t\t\tonJoinMsg = users[str(userid)][\"onJoinMsg\"]\n\t\t\tlastPassive = users[str(userid)][\"lastPassive\"]\n\t\t\tawait self.send(f\"Your settings:ㅤㅤAllow Mentions: {allowMentions} (If False, bot will send your username instead of mentioning) • Auto-Reject Fights: {autoRejectFights} (If True, you can't accept fight requests as it will decline them automatically) • Passive Mode: {passiveMode} (If True, you can't rob/deposit/withdraw with economy but you also can't get robbed) • Allow User Interaction: {allowUserInteraction} (If True, users can't get information for you, ex. d!balance, d!stats, d!userinfo etc.)\", whisper=[ctx.author.id])\n\t\texcept Exception as e:\n\t\t\tprint(e)",
"def settings():\n # TODO: How should this be handled? Should a speaker's bio be stored\n # as a snapshot from event to event? It could be stored as part of a\n # talks.models.Presentation.\n from pygotham.forms import ProfileForm\n\n form = ProfileForm(request.form, obj=current_user)\n if form.validate_on_submit():\n form.populate_obj(current_user)\n db.session.commit()\n\n flash('Your profile has been updated.', 'success')\n\n return redirect(url_for('profile.settings'))\n\n return render_template('profile/settings.html', form=form)",
"def setShadowDefaults(self):\n for user in self.shadowDefault.keys():\n #if not self.userspace.has_key(user):\n self.userspace[user].info = self.shadowDefault[user]",
"def __fill_boot_settings_fields(profile, profile_elements):\n result = True\n selenium2lib = ui_lib.get_s2l()\n # Validate the profile in XML file\n __validate_boot_settings_properties_in_xml_file(profile)\n # If XML is fine, go ahead filling Boot Setting UI fields\n result &= ui_lib.wait_for_element_and_click(profile_elements.ID_COMBO_MENU_VIEW)\n result &= ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_LINK_BOOTSETTINGS,\n PerfConstants.DEFAULT_SYNC_TIME)\n ui_lib.wait_for_element_visible(profile_elements.ID_CHKBOX_MANAGE_BOOT)\n if profile.has_property(XML_MANAGE_BOOT_MODE_ATTRIBUTE) and profile.manageBoot == \"false\":\n result &= ui_lib.wait_for_checkbox_and_unselect(profile_elements.ID_CHKBOX_MANAGE_BOOT)\n elif profile.has_property(XML_BOOT_MODE_ATTRIBUTE):\n boot_mode_option = profile.bootMode\n logger._log_to_console_and_log_file(\" --> Selecting Boot Mode..\")\n __select_value_from_a_profile_combo_box(profile_elements.ID_COMBO_PROFILE_BOOT_MODE, profile_elements.ID_COMBO_PROFILE_BOOT_MODE_LIST % boot_mode_option)\n if boot_mode_option == CONSTANT_UEFI or boot_mode_option == CONSTANT_UEFI_OPTIMIZED:\n if profile.has_property(XML_BOOT_POLICY_ATTRIBUTE):\n boot_policy_option = profile.bootPolicy\n result &= __select_value_from_a_profile_combo_box(profile_elements.ID_COMBO_PROFILE_PXE_BOOT_POLICY, profile_elements.ID_COMBO_PROFILE_PXE_BOOT_POLICY_LIST % boot_policy_option)\n result &= ui_lib.wait_for_element_visible(profile_elements.ID_CHKBOX_PROFILE_BOOT_ORDER)\n if profile.has_property(XML_MANAGE_BOOT_ORDER_ATTRIBUTE) and profile.manageBootOrder == \"false\":\n selenium2lib.unselect_checkbox(profile_elements.ID_CHKBOX_PROFILE_BOOT_ORDER)\n else:\n selenium2lib.select_checkbox(profile_elements.ID_CHKBOX_PROFILE_BOOT_ORDER)\n # Set primary boot device\n if profile.has_property(XML_PRIMARY_BOOT_DEVICE):\n primary_boot_device = profile.primaryBootDevice\n result &= __select_value_from_a_profile_combo_box(profile_elements.ID_COMBO_PROFILE_PRIMARY_BOOT_DEVICE, profile_elements.ID_COMBO_PROFILE_PRIMARY_BOOT_DEVICE_LIST % primary_boot_device)\n elif boot_mode_option == CONSTANT_LEGACY_BIOS:\n __fill_boot_order(profile, profile_elements)\n else:\n __fill_boot_order(profile, profile_elements)\n return result",
"def set_pref(self, name, value):\r\n pass",
"def writeSettings(self):\n for i in range(1,N_STATION+1):\n vol = f\"vol{i}\"\n self.settings.setValue(vol,self.param.vol[i-1])\n info = f\"info{i}\"\n self.settings.setValue(info,self.param.info[i-1])\n ip = f\"ip{i}\"\n self.settings.setValue(ip,self.param.ip[i-1])\n muted = f\"muted{i}\"\n self.settings.setValue(muted,self.param.muted[i-1])",
"def set_parameters(cls):\r\n \"\"\" EXECUTE THIS FUNCTION IN THE FARM CLASS! \"\"\"\r\n cls.TSR, cls.RPM, cls.RAD, cls.BLA, cls.CHR, cls.SEC, cls.NT = \\\r\n np.loadtxt('settings.csv', delimiter=',', skiprows=1, unpack=True)",
"def set_from_dictionary(self, settings):\n for key, value in settings.items():\n if key in dir(self):\n setattr(self, key, value)\n continue\n msg = f'Invalid key value of {key} provided in dictionary of conversion settings'\n self.logger.warning(msg)\n if not config.silent:\n print(msg)",
"def set_param(self):\n with open(\"settings.txt\", \"r\") as f:\n filedata = f.read()\n settings = [_.split(\"=\") for _ in filedata.split(\"\\n\")]\n for setting in settings:\n if len(setting) < 2: # if blank line\n continue\n if setting[0] == self.param:\n setting[1] = str(self.param_value)\n\n with open(\"settings.txt\", \"w\") as f:\n for setting in settings:\n if len(setting) < 2: # if blank line\n continue\n f.write(setting[0] + \"=\" + setting[1] + \"\\n\")"
] | [
"0.66702175",
"0.6587007",
"0.6494527",
"0.5839522",
"0.5798138",
"0.57914835",
"0.5751046",
"0.57356983",
"0.5709629",
"0.570142",
"0.5681322",
"0.5674938",
"0.5627437",
"0.55195767",
"0.5518369",
"0.5467664",
"0.5458394",
"0.54570925",
"0.54484177",
"0.5436495",
"0.5432372",
"0.5416182",
"0.5415011",
"0.5408303",
"0.54063773",
"0.53664625",
"0.53383577",
"0.53281045",
"0.53280866",
"0.53018546",
"0.5296071",
"0.5292301",
"0.528628",
"0.52741593",
"0.52741593",
"0.52741593",
"0.5259832",
"0.5258412",
"0.52530587",
"0.5235126",
"0.5226797",
"0.52220803",
"0.5219927",
"0.52110565",
"0.52037185",
"0.51815593",
"0.5179159",
"0.5160812",
"0.51575977",
"0.5156164",
"0.5144545",
"0.51433796",
"0.51290387",
"0.5125814",
"0.51238084",
"0.5117703",
"0.5115613",
"0.5112819",
"0.5105685",
"0.5100534",
"0.5098978",
"0.5092918",
"0.50738883",
"0.50706667",
"0.50682765",
"0.5066701",
"0.5065147",
"0.50520116",
"0.5049501",
"0.5045173",
"0.50334334",
"0.5031868",
"0.50138235",
"0.50107795",
"0.5000822",
"0.49810866",
"0.4979891",
"0.49759683",
"0.49756905",
"0.49706933",
"0.4964066",
"0.4958002",
"0.49565357",
"0.49557337",
"0.49545565",
"0.49534318",
"0.49520776",
"0.493803",
"0.4937136",
"0.49363142",
"0.4934989",
"0.49316403",
"0.49254334",
"0.4922726",
"0.4916766",
"0.49136296",
"0.49085057",
"0.49075887",
"0.49054754",
"0.49050772"
] | 0.65265524 | 2 |
Get the iscsi settings resoure. | def _get_iscsi_settings_resource(self, data):
try:
iscsi_settings_uri = data['links']['Settings']['href']
except KeyError:
msg = ('iscsi settings resource not found.')
raise exception.IloCommandNotSupportedError(msg)
status, headers, iscsi_settings = self._rest_get(iscsi_settings_uri)
if status != 200:
msg = self._get_extended_error(iscsi_settings)
raise exception.IloError(msg)
return headers, iscsi_settings_uri, iscsi_settings | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_settings(self):\n return self.request({\n \"path\": \"/\" + UUID + \"/setting\"\n })",
"def get_settings(self):\n return self.settings",
"def get_settings(self):\n url = \"https://api.imgur.com/3/account/{0}/settings\".format(self.name)\n return self._imgur._send_request(url)",
"def settings():\n return _get_settings()[1]",
"def get_resource_config(target=False, force=None):\n return get_stored_property(ctx, 'resource_config', target, force)",
"def get_common_settings(self):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/Settings/\"))",
"def get_settings(self):\n return AlgoliaUtils_request(self.client.headers, self.read_hosts, \"GET\", \"/1/indexes/%s/settings\" % self.url_index_name, self.client.timeout)",
"def _get_conf(self):\n self.press_conf = self.sysconf['PressureRegulators']\n return self.press_conf['PressureRegulator%d' % self.id_]",
"def _get_bios_settings_resource(self, data):\n try:\n bios_settings_uri = data['links']['Settings']['href']\n except KeyError:\n msg = ('BIOS Settings resource not found.')\n raise exception.IloError(msg)\n\n status, headers, bios_settings = self._rest_get(bios_settings_uri)\n if status != 200:\n msg = self._get_extended_error(bios_settings)\n raise exception.IloError(msg)\n\n return headers, bios_settings_uri, bios_settings",
"def get_settings():\n return db.get_data()",
"def get_raw(self):\n return self.settings",
"def get_raw(self):\n return self.settings",
"def get_raw(self):\n return self.settings",
"def get_raw(self):\n return self.settings",
"def getSettings(self):\n return self.cfg",
"def settings(self) -> Optional[pulumi.Input['ConfigurationServiceSettingsArgs']]:\n return pulumi.get(self, \"settings\")",
"def getResolution(self):\n # load it each time, since this setting is not limited to a single user\n projectSettingsDB = self.loadProjectSettings()\n try:\n resolution = projectSettingsDB[\"Resolution\"]\n return resolution\n except KeyError:\n msg = \"Database Error while reading projectSettings.json\"\n logger.error(msg)\n return None",
"def settings(self):\n return self._settings",
"def settings(self):\n return self._settings",
"def requested_config_vals():\n return {'transfer_stats_per_file':'opt'}",
"def settings(self) -> BaseSettings:\n return self._context.settings",
"def settings(self) -> BaseSettings:\n return self._context.settings",
"def ivy_settings(self):\r\n return self._ivy_settings",
"def get_config(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVm_GetConfig', self.handle))",
"def get_srv_config(self):\n\t\treturn Job(SDK.PrlSrv_GetSrvConfig(self.handle)[0])",
"def GetAWSSettings(self):\n # [GET] https://assets.falcon.crowdstrike.com/support/api/swagger.html#/cloud-connect-aws/GetAWSSettings\n FULL_URL = self.base_url+'/cloud-connect-aws/combined/settings/v1'\n HEADERS = self.headers\n result = self.Result()\n try:\n response = requests.request(\"GET\", FULL_URL, headers=HEADERS, verify=False)\n returned = result(response.status_code, response.headers, response.json())\n except Exception as e:\n returned = result(500, {}, str(e))\n \n return returned",
"def get_config(self):\n if self.allow_reco():\n return self.chs_config()\n else:\n return self.get_config_j(self.id)",
"def get_current_bios_settings(self, only_allowed_settings=True):\n\n sushy_system = self._get_sushy_system()\n try:\n current_settings = sushy_system.bios.json\n except sushy.exceptions.SushyError as e:\n msg = (self._('The current BIOS Settings were not found. Error '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n\n attributes = current_settings.get(\"Attributes\")\n return attributes",
"def _get_trs_opts(service_id):\n return trs_config()[service_id]",
"def getreplicationsettings(self):\n d = {}\n try:\n con = hcpsdk.Connection(self.target, debuglevel=self.debuglevel)\n except Exception as e:\n raise hcpsdk.HcpsdkError(str(e))\n else:\n self.connect_time = con.connect_time\n try:\n r = con.GET('/mapi/services/replication')\n except Exception as e:\n raise hcpsdk.HcpsdkError(str(e))\n else:\n if r.status == 200:\n # Good status, get and parse the Response\n x = r.read()\n self.service_time = con.service_time2\n for child in Et.fromstring(x):\n d[child.tag] = child.text\n else:\n raise (hcpsdk.HcpsdkError('{} - {}'.format(r.status, r.reason)))\n finally:\n # noinspection PyUnboundLocalVariable\n con.close()\n\n return d",
"def get_values(self):\n self.active_changes = False # (flag) Once changes are retrieved, we assume that they will be sent to the controller\n return self.settings",
"def get_values(self):\n res = super(ResConfigInherit, self).get_values()\n params = self.env['ir.config_parameter'].sudo().get_param\n product_restriction = params('sale_stock_restrict.product_restriction')\n check_stock = params('sale_stock_restrict.check_stock')\n res.update(\n product_restriction=product_restriction,\n check_stock=check_stock\n )\n return res",
"def get_skill_settings(self):\n return self.request({\n \"method\": \"GET\",\n \"path\": \"/\" + UUID + \"/skill/settings\",\n })",
"def get_network_settings(self, nReserved = 0):\n\t\treturn Job(SDK.PrlVmGuest_GetNetworkSettings(self.handle, nReserved)[0])",
"def _get_settings():\n # store_last_good=True tells config component to update the config file\n # in a cron job. Here we just read from the datastore.\n rev, cfg = config.get_self_config(\n SETTINGS_CFG_FILENAME, config_pb2.SettingsCfg, store_last_good=True)\n cfg = cfg or config_pb2.SettingsCfg()\n return rev, cfg",
"def scale_settings(self) -> pulumi.Output[Optional['outputs.ScaleSettingsResponse']]:\n return pulumi.get(self, \"scale_settings\")",
"def fusion_api_get_global_settings(self, uri=None, api=None, headers=None, param=''):\n return self.settings.get(uri, api, headers, param)",
"def get_setResistance(self):\n self.read(\":RES?\")",
"def _getDefaultSettings(cls):\n return {'minimumROIDimensions': 1,\n 'minimumROISize': None, # Skip testing the ROI size by default\n 'normalize': False,\n 'normalizeScale': 1,\n 'removeOutliers': None,\n 'resampledPixelSpacing': None, # No resampling by default\n 'interpolator': 'sitkBSpline', # Alternative: sitk.sitkBSpline,\n 'padDistance': 5,\n 'distances': [1],\n 'force2D': False,\n 'force2Ddimension': 0,\n 'label': 1,\n 'enableCExtensions': True,\n 'additionalInfo': True}",
"def settings(self):\r\n url = '{0}/userSettings'.format(self.get_url())\r\n return http.Request('GET', url), parsers.parse_json",
"def subscribed_osp_settings(self):\n return self._subscribed_osp_settings",
"def get_resolution(self):\n return self.__resolution",
"def scale_settings(self) -> Optional['outputs.DeploymentScaleSettingsResponse']:\n return pulumi.get(self, \"scale_settings\")",
"def network_settings(self): # type: () -> t.Dict[str, t.Any]\n return self.inspection['NetworkSettings']",
"def get_settings(self, transform):\n return self._handlers_by_class[transform.__class__].get_settings(transform)",
"def settings(self):\r\n return SettingResource(self)",
"def Resolution(self):\n\t\treturn self._get_attribute('resolution')",
"def user_settings(self):\n return self._user_settings",
"def getStorageConfig(self,storage):\n data = self.connect('get','storage/%s' % (storage),None)\n return data",
"def get_settings():\n with open('config/config.json') as data_file:\n settings = json.load(data_file)\n return settings",
"def _risco(self):\n return self.coordinator.risco",
"def _getSettings(checks):\r\n parser = _RCESettingsParser()\r\n\r\n if PATH not in parser.read(PATH):\r\n raise NoValidSettings('Config file is missing.')\r\n\r\n try:\r\n return _Settings.load(parser, checks)\r\n except (Error, ValueError) as e:\r\n raise NoValidSettings(str(e))",
"def get_settings(self):\n settings = self.client._perform_json(\n \"GET\", \"/projects/%s/apiservices/%s/settings\" % (self.project_key, self.service_id))\n\n return DSSAPIServiceSettings(self.client, self.project_key, self.service_id, settings)",
"def client_settings():\n return CLIENT_SETTINGS",
"def get_properties(self):\n return irmc_common.COMMON_PROPERTIES",
"def getSettings(self, guildId):\n return int(self.serverSettings[str(guildId)])",
"def GetFileCleanerSettings():\n obj = ndb.Key(FileCleanerSettings, FILE_CLEANER_SETTINGS_ID).get()\n return obj or DEFAULT_FILE_CLEANER_SETTINGS",
"def getResource(self):\n return self.__resource;",
"def get_quota_config_details(self, obj_fs):\n try:\n all_quota_config = self.unity_conn.get_quota_config(filesystem=obj_fs)\n fs_id = obj_fs.id\n\n if len(all_quota_config) == 0:\n LOG.error(\"The quota_config object for new filesystem \"\n \"is not updated yet.\")\n return None\n\n for quota_config in range(len(all_quota_config)):\n if fs_id and all_quota_config[quota_config].filesystem.id == fs_id and \\\n not all_quota_config[quota_config].tree_quota:\n msg = \"Quota config id for filesystem %s is %s\" \\\n % (fs_id, all_quota_config[quota_config].id)\n LOG.info(msg)\n return all_quota_config[quota_config]\n\n except Exception as e:\n errormsg = \"Failed to fetch quota config for filesystem {0} \" \\\n \" with error {1}\".format(fs_id, str(e))\n LOG.error(errormsg)\n self.module.fail_json(msg=errormsg)",
"def get_settings():\n return SettingCollection.build()",
"def getResistence(self):\n return self.resistence",
"def _cfg(self):\n if not hasattr(self, '__config'):\n self.__config = cuegui.Utils.getResourceConfig()\n return self.__config",
"def getResolution(self):\n return self.resolution",
"def site_settings(self):\r\n return users.SiteSettings(self)",
"async def randomizer_settings(self):\r\n return await http.request_generic(\r\n url=f'/api/randomizers/{self.randomizer}',\r\n method='get',\r\n returntype='json'\r\n )",
"def properties(self):\r\n return resources.Properties(self)",
"def getResource(self):\n\n return self.__resource;",
"def _get_nitro_response(self, service, response) :\n\t\ttry :\n\t\t\tresult = service.payload_formatter.string_to_resource(appfwcustomsettings_response, response, self.__class__.__name__)\n\t\t\tif(result.errorcode != 0) :\n\t\t\t\tif (result.errorcode == 444) :\n\t\t\t\t\tservice.clear_session(self)\n\t\t\t\tif result.severity :\n\t\t\t\t\tif (result.severity == \"ERROR\") :\n\t\t\t\t\t\traise nitro_exception(result.errorcode, str(result.message), str(result.severity))\n\t\t\t\telse :\n\t\t\t\t\traise nitro_exception(result.errorcode, str(result.message), str(result.severity))\n\t\t\treturn result.appfwcustomsettings\n\t\texcept Exception as e :\n\t\t\traise e",
"def _est_config(self):\n return self._est_method.config",
"def rssp(self):\n return self._rssp",
"def get_Srz(self):\n return self.Srz",
"def getSettings(self):\n context = findContext(self.request)\n switcher = component.getMultiAdapter((context, self.request),\n name=u\"themeswitcher\")\n\n return switcher.getSettings(self._old_getSettings)",
"def get_settings(self):\n return {\n \"game_name\": self.game_name,\n \"n_epochs\": self.n_epochs,\n \"n_episodes\": self.n_episodes,\n \"n_frames\": self.n_frames,\n \"agent\": self.agent.get_settings(),\n \"results_dir\": self.results_dir,\n \"use_minimal_action_set\": self.use_minimal_action_set,\n }",
"def grpc_settings(self) -> 'outputs.NotificationEndpointGrpcSettingsResponse':\n return pulumi.get(self, \"grpc_settings\")",
"def get_resul(self):\n return {'W': self.W}",
"def get_resul(self):\n return {'W': self.W}",
"def get_resul(self):\n return {'W': self.W}",
"def get_resul(self):\n return {'W': self.W}",
"def evrConfig(self):\n return getattr(getattr(self._data.evrConfig, self._name), 'evr')",
"def get(self) -> dict:\n return Config.get()",
"def list_idrac_settings(self):\n return self._idrac_cfg.list_idrac_settings()",
"def get_settings_resource(res_type, abbr, res_name):\n\t\n\tif zen_settings.has_key(res_type):\n\t\tresource = zen_settings[res_type];\n\t\tif (has_deep_key(resource, [res_name, abbr])):\n\t\t\treturn resource[res_name][abbr]\n\t\telif 'extends' in resource:\n\t#\t\tfind abbreviation in ancestors\n\t\t\tfor v in resource['extends']:\n\t\t\t\tif has_deep_key(zen_settings, [v, res_name, abbr]):\n\t\t\t\t\treturn zen_settings[v][res_name][abbr]\n\treturn None;",
"def get_settings():\n settings_path = os.path.join(get_config_home(), 'tcharmap', 'settings.yaml')\n try:\n return yaml.safe_load(open(settings_path))\n except FileNotFoundError:\n return {'auto_copy': False}",
"def __getSettingsFromStorage():\n return AccountSettings.getSettings(NEW_SETTINGS_COUNTER)",
"def __get_base_info_api(self):\r\n try:\r\n return Call_shelly_api(url=self.__api_address + \"/settings\")\r\n except ShellyException as err:\r\n _LOGGER.warning(err)",
"def settings(self):\r\n return settings.Settings(self)",
"def option_settings(self) -> pulumi.Output[Optional[Sequence['outputs.EnvironmentOptionSetting']]]:\n return pulumi.get(self, \"option_settings\")",
"def get_settings(self):\n return (self._frequency, self._duration)",
"def getRemoteiSCSI(self,node):\n data = self.connect('get','nodes/%s/scan/iscsi' % (node),None)\n return data",
"def get_config(self):\n return super().get_config()",
"def get_values(self):\n res = super(ResConfigSettings, self).get_values()\n params = self.env['ir.config_parameter'].sudo().get_param\n pos_all_order = params('pos_all_orders.pos_all_order')\n n_days = params('pos_all_orders.n_days')\n res.update(\n pos_all_order=pos_all_order,\n n_days=n_days\n )\n return res",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config"
] | [
"0.6224379",
"0.5955814",
"0.59340453",
"0.5871603",
"0.5869005",
"0.5833151",
"0.57728857",
"0.57651824",
"0.57078904",
"0.57052916",
"0.5703846",
"0.5703846",
"0.5703846",
"0.5703846",
"0.56948245",
"0.5670498",
"0.5628336",
"0.56175953",
"0.56175953",
"0.5589727",
"0.5574095",
"0.5574095",
"0.5556145",
"0.55392355",
"0.55094105",
"0.5488156",
"0.5481084",
"0.5457733",
"0.54547757",
"0.54328203",
"0.5432333",
"0.5420766",
"0.54197985",
"0.5417551",
"0.5407423",
"0.5396548",
"0.5377642",
"0.53448826",
"0.5342515",
"0.53327966",
"0.5331737",
"0.5317983",
"0.5305635",
"0.53036374",
"0.5297419",
"0.5287099",
"0.5267849",
"0.52644753",
"0.5258592",
"0.52558494",
"0.5247028",
"0.52412564",
"0.52329516",
"0.5224687",
"0.52241635",
"0.52121806",
"0.5210492",
"0.5209614",
"0.5193288",
"0.51873463",
"0.51864845",
"0.5183322",
"0.518053",
"0.5179719",
"0.51641667",
"0.5161965",
"0.51616955",
"0.51612836",
"0.5159966",
"0.5157336",
"0.5152453",
"0.514756",
"0.51413727",
"0.5134034",
"0.5133613",
"0.5133613",
"0.5133613",
"0.5133613",
"0.5124467",
"0.51196855",
"0.51070833",
"0.5105138",
"0.5100186",
"0.5094147",
"0.50909406",
"0.5085058",
"0.50811625",
"0.50767446",
"0.5071965",
"0.5071816",
"0.50712556",
"0.5059849",
"0.5059849",
"0.5059849",
"0.5059849",
"0.5059849",
"0.5059849",
"0.5059849",
"0.5059849",
"0.5059849"
] | 0.7113912 | 0 |
Get the Boot resource like BootSources. | def _get_bios_boot_resource(self, data):
try:
boot_uri = data['links']['Boot']['href']
except KeyError:
msg = ('Boot resource not found.')
raise exception.IloCommandNotSupportedError(msg)
status, headers, boot_settings = self._rest_get(boot_uri)
if status != 200:
msg = self._get_extended_error(boot_settings)
raise exception.IloError(msg)
return boot_settings | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_persistent_boot_devices(self):\n # Check if the BIOS resource if exists.\n headers_bios, bios_uri, bios_settings = self._check_bios_resource()\n\n # Get the Boot resource.\n boot_settings = self._get_bios_boot_resource(bios_settings)\n\n # Get the BootSources resource\n try:\n boot_sources = boot_settings['BootSources']\n except KeyError:\n msg = (\"BootSources resource not found.\")\n raise exception.IloError(msg)\n\n try:\n boot_order = boot_settings['PersistentBootConfigOrder']\n except KeyError:\n msg = (\"PersistentBootConfigOrder resource not found.\")\n raise exception.IloCommandNotSupportedError(msg)\n\n return boot_sources, boot_order",
"def Sources():\n return _sources",
"def get_resource_loader(self):\n return self.game.resource_loader",
"def resources(self) -> pulumi.Output[Sequence['outputs.MachineExtensionResponse']]:\n return pulumi.get(self, \"resources\")",
"def resources(self):\n return self.__resources",
"def boot_configuration(self):\n bootconfs = self.get_logical_configuration(gdef.BOOT_LOG_CONF)\n if not bootconfs:\n return bootconfs\n assert len(bootconfs) == 1 # Only one boot configuration can exist for each device instance.\n return bootconfs[0]",
"def get_resources():\r\n global __res\r\n if __res == None:\r\n __init_resources()\r\n return __res",
"def get_resources():\r\n global __res\r\n if __res == None:\r\n __init_resources()\r\n return __res",
"def get_resources():\r\n global __res\r\n if __res == None:\r\n __init_resources()\r\n return __res",
"def get_resources():\n global __res\n if __res == None:\n __init_resources()\n return __res",
"def get_resources():\n global __res\n if __res == None:\n __init_resources()\n return __res",
"def get_resources():\n global __res\n if __res == None:\n __init_resources()\n return __res",
"def get_resources(self):\n return []",
"def get_boot_driver(self):\n return self._boot_driver",
"def sources(self) -> Optional[Sequence['outputs.AddressPrefixItemResponse']]:\n return pulumi.get(self, \"sources\")",
"def resource_mapping():\n return {\n 'OS::Heat::ResourceChain': ResourceChain,\n }",
"def resources(self):\n return [self]",
"def getbootinfo(self):\n self.mount()\n kernel = None\n inits = []\n for line in self.xlist(\"get-bootinfo\", IBASE)[1]:\n if line.startswith('+++'):\n kernel = line.split()[1]\n else:\n inits.append(line)\n self.unmount()\n if not inits:\n run_error(_(\"No initramfs found\"))\n return None\n if not kernel:\n run_error(_(\"GRUB problem:\\n\") + inits[0])\n return None\n return (kernel, inits)",
"def sources(self) -> Sequence[Any]:\n return pulumi.get(self, \"sources\")",
"def _get_source_rd(self):\n return self.__source_rd",
"def lookup(self):\r\n return resources.Lookup(self)",
"def source(self) -> XMLResource:\n return self.schema.source",
"def get_bokeh_resources() -> TemplateResourcesData:\n template_resources = TemplateResourcesData()\n template_resources.js = CDN.js_files[0]\n template_resources.css = CDN.css_files[0]\n\n return template_resources",
"def get_resource(self):\n from rowgenerators import parse_app_url # Here, to break an import cycle\n\n self._resource = self._downloader.download(self.inner)\n\n\n ru = parse_app_url(self._resource.sys_path,\n downloader=self.downloader,\n scheme_extension=self.scheme_extension,\n **self.frag_dict)\n\n\n return ru",
"def BootstrapBootstrap (name):\n module = sys.modules [__name__]\n return BootstrapSource (name, inspect.getsource (module), inspect.getsourcefile (module))",
"def getResource(self):\n return self.serviceClass.app.resource()",
"def get_resource(self):\n return self._stores",
"def get_power_source(self):\n self._info(\"get_power_source\")\n response = self.parent.power_manager.get_power_source()\n if response is not None:\n response = response[0]\n self.parent.controller.get_power_source_cb(response)\n return response",
"def sources(self):\n return self._sources",
"def resource_map(self):",
"def resources(self) -> \"Resources\":\n return self._resources",
"def getResources(self):\n\t\treturn deepcopy(self.server.resources)",
"def GetEventSources(self):\n return self._GetAttributeContainers('event_source')",
"def _GetResourceLoaders():\n loaders = []\n\n # Add all paths to list if they are specified on the command line (will warn\n # if any are invalid).\n # Otherwise add members of the default list iff they exist.\n if FLAGS['data_search_paths'].present:\n for path in FLAGS.data_search_paths:\n loaders.append(FileResourceLoader(path))\n else:\n for path in FLAGS.data_search_paths:\n if os.path.isdir(path):\n loaders.append(FileResourceLoader(path))\n loaders.extend(DEFAULT_RESOURCE_LOADERS)\n return loaders",
"def resources(self):\n return self._resources",
"def resources(self):\n return self._resources",
"def resources(self):\n return self._resources",
"def get_terraform_source():\n material = get_terraform_source_material()\n return _yield_terraform_source(material)",
"def other_coldboot(self):\n try:\n for req in self.request.other_requests():\n if (\n isinstance(req.activity, RebootActivity)\n and req.activity.coldboot\n ):\n return req\n except AttributeError: # self.request has not been set\n pass\n return",
"def get_one_time_boot(self):\n system = self._get_host_details()\n try:\n if system['Boot']['BootSourceOverrideEnabled'] == 'Once':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n else:\n # value returned by RIBCL if one-time boot setting are absent\n return 'Normal'\n\n except KeyError as e:\n msg = \"get_one_time_boot failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)",
"def get_datasource_list():\n global datasource_list\n\n if not datasource_list:\n datasource_list = stixhelpers.get_datasources(get_srcs())\n\n return datasource_list",
"def get_war_eras():\n return datasources_service.get_war_eras()",
"def Sources(self):\n return self._sources",
"def get_http_boot_uri(self):\n try:\n sushy_system = self._get_sushy_system()\n http_boot_uri = sushy_system.http_boot_uri.httpbooturi\n except sushy.exceptions.SushyError as e:\n msg = (self._('Not able to find HTTP Boot URI. Error: '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n return http_boot_uri",
"def get_resource_from_class(klass):\n return _class_to_resources.get(klass, None)",
"def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")",
"def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")",
"def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")",
"def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")",
"def source_list(self):\n source_list = self._source_list.copy()\n if 'wifi' in source_list:\n del source_list['wifi']\n\n if len(self._source_list) > 0:\n return list(source_list.values())\n else:\n return None",
"def __get_sources__(self):\n\n # Let's go to the Apt temporal dir.\n os.chdir(self.conf['AptTmp'])\n\n # Define a global Source file, all the *_Sources files are going to be in this file.\n global_sources_file = open(self.conf['CodeName'] + '_Sources', 'w')\n\n\t\t# The main/debian-installer is in main, so remove it.\n\t\tcomponents = self.conf['Components']\n\t\tif 'main/debian-installer' in components:\n\t\t\tcomponents.remove('main/debian-installer')\n\n # For every component defined...\n for component in components:\n # Download the Packages.gz file\n file = self.__get_packages_file__(self.conf[\"Mirror\"], \\\n \"%s_%s_Sources\" % (self.conf['CodeName'], component), \\\n component, \"source\" + \"/Sources.gz\")\n\n # \"cat\" it into the global_packages_file\n for line in file:\n print >>global_sources_file, line,\n file.close()\n\n\t\tglobal_sources_file.close()\n\t\treturn open(self.conf['CodeName'] + '_Sources', 'r')",
"def getResource(self):\n return self.__resource;",
"def create_external_resources(self) -> List[ResourceDescription]:\r\n return effects.get_effect_resources()",
"def from_sources(cls, sources: Iterable[Source]) -> \"BootloaderOverrides\":\n data = BootloaderOverrides()\n for source in sources:\n for key, value in source.overrides.items():\n if key in BOOTLOADER_OVERRIDE_KEYS_IN_SOURCE:\n key_without_prefix = key[len(\"target.\") :]\n setattr(\n data,\n key_without_prefix,\n BootloaderOverride(name=key_without_prefix, value=value, set_by=source.human_name),\n )\n return data",
"def GetBootDisk(self) -> 'AZComputeDisk':\n # pylint: disable=line-too-long\n disks = self.az_account.compute.ListDisks(\n resource_group_name=self.resource_group_name) # type: Dict[str, AZComputeDisk]\n # pylint: enable=line-too-long\n boot_disk_name = self.compute_client.virtual_machines.get(\n self.resource_group_name, self.name).storage_profile.os_disk.name\n if boot_disk_name not in disks:\n raise errors.ResourceNotFoundError(\n 'Boot disk not found for instance {0:s}'.format(self.resource_id),\n __name__)\n return disks[boot_disk_name]",
"def supported_boot_interfaces(self):\n return [fake.FakeBoot] + super().supported_boot_interfaces",
"def get_recipe_resource():\n return os.getenv(\"DKU_CUSTOM_RESOURCE_FOLDER\")",
"def get(self):\n\n return self.get_request_handler(request.headers).get_all_sources()",
"def getResource(self):\n\n return self.__resource;",
"def get_recipe_resource():\n return os.getenv(\"SKU_CUSTOM_RECIPE_RESOURCE_FOLDER\")",
"def source(self) -> list:\n sources = self.source_control.list_sources()\n sources_list = [source['label'] for source in sources]\n return sources_list",
"def source_resource_path(self) -> Optional[str]:\n return pulumi.get(self, \"source_resource_path\")",
"def get_resources(self):\n return list(self.get_inputs()) + list(self.get_outputs())",
"def get_resource_config(target=False, force=None):\n return get_stored_property(ctx, 'resource_config', target, force)",
"def get_packages_with_prefixes():\n return get_resources('packages')",
"def get_source_files(self):\n return zip(*self.distribution.scripts)[0]",
"def get_resources(self, **extra_args):\n return [lrms for lrms in self.resources.itervalues()]",
"def getloader(self):\n\t\treturn self.train_loader, self.test_loader",
"def resources(self):\n res = []\n for resource in self._resources:\n res = res + resource.resources()\n\n return res",
"def resources(self):\n res = []\n for resource in self._resources:\n res = res + resource.resources()\n\n return res",
"def get_boot_device(self):\n root_vol = None\n boot_vol = None\n for volume in self.volumes:\n if not volume.partitions:\n continue\n for partition in volume.partitions:\n if partition.mount_point == \"/\":\n root_vol = volume\n elif partition.mount_point == '/boot':\n boot_vol = volume\n\n if not boot_vol:\n return root_vol\n return boot_vol",
"def get_srcs():\n global ms\n global srcs\n\n if not srcs:\n # Update both of them if one was not already declared\n ms, srcs = stixhelpers.get_stix_memory_stores() \n \n return srcs",
"def get_bootvar(self):\n module = 'bootimage/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n bootdefault = response.json()['bootimage']['oper']['hd-default']\n print(self.device + ' The device is set to boot from: ' + bootdefault + ' in the future')\n return bootdefault",
"def GetResourceExtension(self):\n\n return self.resource_extension",
"def get_datasource_of():\n global datasource_of\n\n if not datasource_of:\n datasource_of = stixhelpers.datasource_of()\n \n return datasource_of",
"def get_terraform_source_material(target=False):\n instance = get_ctx_instance(target=target)\n terraform_source = instance.runtime_properties.get('terraform_source')\n resource_config = get_resource_config(target=target)\n source = resource_config.get('source')\n if not isinstance(source, dict):\n resource_config.update({'source': source})\n if not terraform_source:\n terraform_source = update_terraform_source_material(\n source, target=target)\n update_resource_config(resource_config)\n return terraform_source",
"def resources(self):\n return list(self.get_resources_for_type(gdef.ResType_All))",
"def block_device_mappings(self) -> Optional[Sequence['outputs.ContainerRecipeInstanceBlockDeviceMapping']]:\n return pulumi.get(self, \"block_device_mappings\")",
"def resources(self):\n\n return self.FIXTURE.resources_collection(self)",
"def py_resources():\n aomi_mods = [m for\n m, _v in iteritems(sys.modules)\n if m.startswith('aomi.model')]\n mod_list = []\n mod_map = []\n for amod in [sys.modules[m] for m in aomi_mods]:\n for _mod_bit, model in inspect.getmembers(amod):\n if str(model) in mod_list:\n continue\n\n if model == Mount:\n mod_list.append(str(model))\n mod_map.append((model.config_key, model))\n elif (inspect.isclass(model) and\n issubclass(model, Resource) and\n model.config_key):\n mod_list.append(str(model))\n if model.resource_key:\n mod_map.append((model.config_key,\n model.resource_key,\n model))\n elif model.config_key != 'secrets':\n mod_map.append((model.config_key, model))\n\n return mod_map",
"def resource_spec(self) -> pulumi.Output['outputs.ZoneResourceSpec']:\n return pulumi.get(self, \"resource_spec\")",
"def sirsam_bs_conf(sirsam_bootstrap):\n return os.path.join(sirsam_bootstrap, 'bootstrapping.yaml')",
"def sources(self):\n return self._sources.keys()",
"def resource(self):\n return self.properties.get('resource',\n Entity(self.context, ResourcePath(\"resource\", self.resource_path)))",
"def applications(self):\n return [self.app] + self.mounts.values()",
"def getResource(self):\n pass;",
"def find_source(self, name):\n t = filter( lambda x: x.name==name, self.point_sources+self.extended_sources)\n return t[0] if len(t)==1 else None",
"def create_resources(self) -> List[ResourceDescription]:\r\n return self.resources",
"def resources(self) -> Sequence['outputs.GetResourcesResourceResult']:\n return pulumi.get(self, \"resources\")",
"def getSourcePaths(self, makeGlyphs=True, makeKerning=True, makeInfo=True):\n paths = []\n for name in self.sources.keys():\n paths.append(self.sources[name][0].path)\n return paths",
"def get_extended_resources(self, version):\n return {}",
"def get_bootarch(self):\n return self._bootarch",
"def ebs(self) -> Optional['outputs.ContainerRecipeEbsInstanceBlockDeviceSpecification']:\n return pulumi.get(self, \"ebs\")",
"def UseExistingBootDisk(disks):\n return any(disk.get('boot', False) for disk in disks)",
"def generate_resources(self, attr):\n resource = self._branching_cls(\n self.resource_name, attr, predicate=self.predicate)\n return ((path, resource) for path in chain([attr], self.aliases))",
"def _get_resources(self):\n while not self.is_stopped():\n pools = self.poolsonline.get_pools()\n tx_source = self.txonline.get_txsource()\n mempoolstate = self.mempool.state\n if mempoolstate and pools and tx_source:\n return pools, tx_source, mempoolstate\n # Resources aren't available due to some error elsewhere,\n # so get rid of stats to avoid giving stale stats to others.\n self.stats = None\n self.sleep(5)\n raise StopIteration",
"def _load_sources(self):\n ss_dir = SteelScriptDir('AppResponse', 'files')\n\n for svc in [PACKETS_REPORT_SERVICE_NAME,\n GENERAL_REPORT_SERVICE_NAME]:\n svc_version = self.appresponse.versions[svc]\n sw_version = (self.appresponse.get_info()['sw_version']\n .replace(' ', ''))\n sources_filename = ('{}-sources-{}-{}.pcl'\n .format(svc, svc_version, sw_version))\n sources_file = ss_dir.get_data(sources_filename)\n\n sources_file.read()\n\n if not sources_file.data:\n svcdef = self.appresponse.find_service(svc)\n\n # sources is a list of dictionaries\n sources = svcdef.bind('sources').execute('get').data['items']\n\n # the whole set of sources for current service\n all_sources = {}\n\n for source in sources:\n cols = source['columns']\n source['columns'] = \\\n OrderedDict(sorted(zip(map(lambda x: x['id'], cols),\n cols)))\n source['filters_on_metrics'] = \\\n source['capabilities']['filters_on_metrics']\n if 'granularities' not in source:\n source['granularities'] = None\n\n all_sources[source['name']] = source\n\n if source['name'] in report_source_to_groups:\n self._sources[source['name']] = source\n\n # source_file writes the whole set of sources to disk\n sources_file.data = all_sources\n sources_file.write()\n logger.debug(\"Wrote sources data into {}\"\n .format(sources_filename))\n else:\n logger.debug(\"Loading sources data from {}\"\n .format(sources_filename))\n # Only load valid sources based on settings\n for k, v in sources_file.data.iteritems():\n if k in report_source_to_groups:\n self._sources[k] = v\n\n return",
"def boot_in_play(self):\n return self._boot_in_play",
"def resource_spec(self) -> Optional[pulumi.Input['ZoneResourceSpecArgs']]:\n return pulumi.get(self, \"resource_spec\")",
"def get_installation_source(target=False):\n resource_config = get_resource_config(target=target)\n source = resource_config.get('installation_source')\n if not source:\n raise NonRecoverableError(\n 'No download URL for terraform binary executable file was '\n 'provided and use_external_resource is False. '\n 'Please provide a valid download URL.')\n return source"
] | [
"0.5812557",
"0.5714252",
"0.5702293",
"0.5645948",
"0.5568216",
"0.5541851",
"0.5470517",
"0.5470517",
"0.5470517",
"0.54481995",
"0.54481995",
"0.54481995",
"0.54442555",
"0.54271823",
"0.5424539",
"0.53723615",
"0.5370766",
"0.5336021",
"0.530553",
"0.5278246",
"0.5265546",
"0.5231954",
"0.5229536",
"0.52242064",
"0.5203322",
"0.51976836",
"0.5185501",
"0.5144898",
"0.51378727",
"0.5126003",
"0.5123203",
"0.51218385",
"0.51199484",
"0.5118389",
"0.5106389",
"0.5106389",
"0.5106389",
"0.5100597",
"0.50931233",
"0.50881076",
"0.50674087",
"0.50471574",
"0.5041221",
"0.50393873",
"0.503105",
"0.5017747",
"0.5017747",
"0.5017747",
"0.5017747",
"0.50164795",
"0.50150955",
"0.5010929",
"0.50061065",
"0.5005798",
"0.5002766",
"0.49945396",
"0.499046",
"0.4990028",
"0.49892333",
"0.49761847",
"0.49715215",
"0.49502382",
"0.494481",
"0.49376243",
"0.49290338",
"0.49126568",
"0.48939338",
"0.4889345",
"0.48882684",
"0.48882684",
"0.4887098",
"0.4886557",
"0.48839384",
"0.4880762",
"0.48515272",
"0.484603",
"0.48399138",
"0.4838563",
"0.48378485",
"0.48296243",
"0.48211306",
"0.4820285",
"0.4817776",
"0.48110867",
"0.48100507",
"0.48072386",
"0.48062512",
"0.4800295",
"0.47971228",
"0.47948122",
"0.47928375",
"0.47892535",
"0.47797787",
"0.4777318",
"0.47764197",
"0.4769527",
"0.4768036",
"0.4764953",
"0.47646534",
"0.47616112"
] | 0.5864545 | 0 |
Get the Mappings resource. | def _get_bios_mappings_resource(self, data):
try:
map_uri = data['links']['Mappings']['href']
except KeyError:
msg = ('Mappings resource not found.')
raise exception.IloCommandNotSupportedError(msg)
status, headers, map_settings = self._rest_get(map_uri)
if status != 200:
msg = self._get_extended_error(map_settings)
raise exception.IloError(msg)
return map_settings | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def mapping(self):\n return self._mapping",
"def get_mapping(self, ksf: str) -> InfoResMapping:\n irm = self.InfoResMapping(self, ksf)\n return irm",
"def getMapping(self):\n self._process()\n return self._mapping",
"def mapping(self):\n return self.request('_mapping', pylastica.request.Request.GET).data",
"def resource_map(self):",
"def get_mapping(self):\n if self.role:\n return self.role.get_mapping(self.mapping)\n\n return self.mapping",
"def map( self ) :\n\n self.readMap( )\n\n return( self.__map )",
"def mapping_properties(self) -> pulumi.Output['outputs.ConnectorMappingPropertiesResponse']:\n return pulumi.get(self, \"mapping_properties\")",
"def path_mapping(self) -> Optional[Sequence['outputs.ContentPathMapResponse']]:\n return pulumi.get(self, \"path_mapping\")",
"def get_map(self):\n return self.parent.controller.get_map()",
"def mappings(self) -> pulumi.Output[Optional[Sequence['outputs.TypePropertiesMappingResponse']]]:\n return pulumi.get(self, \"mappings\")",
"def readMap( self ) :\n\n if self.__map is None:\n mapFilePath = pathlib.Path(self.path)\n if not mapFilePath.is_absolute():\n mapFilePath = self.derivedPath / mapFilePath\n self.__map = Map.readXML_file(mapFilePath)\n self.__map.setAncestor(self)\n\n return self.__map",
"def mapped(self):\n return self.__mapped",
"def get(self):\n maps = Map.all()\n results = [map_object.serialize() for map_object in maps]\n return results, status.HTTP_200_OK",
"def get_map(self):\n return self.map",
"def get_map(self):\n return self.get_raw_ys()",
"def request_map():\n\n rospy.loginfo(\"Requesting the map\")\n rospy.wait_for_service('dynamic_map')\n getMap = rospy.ServiceProxy('dynamic_map', GetMap)\n g = getMap().map\n\n return g",
"def get_object_mappings(self):\n self.logger.debug(\"Requesting object mappings\")\n sm = yield self.omap.get_trap_mappings(config.pool)\n if sm != self.source_map:\n self.logger.debug(\"Setting object mappings to: %s\", sm)\n self.source_map = sm",
"def get_map(self) -> list:\n return self.map_obstacle",
"def _get_route_map(self):\n return self.__route_map",
"def get_resources(self):\n return []",
"def get_mapping(self, index):\n url = \"{url_home}/{index}/{function}\".format(url_home=self.url_elastic, index=index, function=\"_mapping\")\n res = rw.get(url, headers=self.headers)\n return res",
"def mapping_properties(self) -> pulumi.Input['ConnectorMappingPropertiesArgs']:\n return pulumi.get(self, \"mapping_properties\")",
"def map(self) -> Map:\n return self._map",
"def schema_mappings(self):\n pass",
"def resources(self):\n return self.__resources",
"def get_map(self):\n return self._locmap",
"def get_current_mappings(self):\n return {name: getattr(self, name) for name in self.__mapped_names}",
"def resources(self):\n return [self]",
"def MAP(self):\n return self.__map",
"def field_mappings(self) -> Optional[Sequence['outputs.FieldMappingResponse']]:\n return pulumi.get(self, \"field_mappings\")",
"def locations(self):\r\n return resource.Location(self)",
"def resources(self):\n return self._resources",
"def resources(self):\n return self._resources",
"def resources(self):\n return self._resources",
"def resources(self):\n\n return self.FIXTURE.resources_collection(self)",
"def _get_mapping_record(self):\n return self.__mapping_record",
"def path_mapping(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ContentPathMapArgs']]]]:\n return pulumi.get(self, \"path_mapping\")",
"def getObjectMap(self,fromMod,toMod):\n if self.objectMaps == None: self.loadObjectMaps()\n return self.objectMaps.get((fromMod,toMod),None)",
"def resources(self) -> \"Resources\":\n return self._resources",
"def manage_getPermissionMapping(self):\n wrapper = getattr(self, '_permissionMapper', None)\n if wrapper is None:\n wrapper = PM()\n\n perms = {}\n for p in self.possible_permissions():\n perms[getPermissionIdentifier(p)] = p\n\n r = []\n a = r.append\n for ac_perms in self.ac_inherited_permissions(1):\n p = perms.get(getPermissionMapping(ac_perms[0], wrapper), '')\n a({'permission_name': ac_perms[0], 'class_permission': p})\n return r",
"def getMappingSet(self,mappingSetId:str=None)->dict:\n if mappingSetId is None:\n raise ValueError(\"Require a mapping ID\")\n path = f\"/mappingSets/{mappingSetId}\"\n res = self.connector.getData(self.endpoint+path)\n return res",
"def GetMapping(\n self,\n request: pulumi.codegen.mapper_pb2.GetMappingRequest,\n context: grpc.ServicerContext,\n ) -> pulumi.codegen.mapper_pb2.GetMappingResponse:",
"def resources(self):\r\n return self.page.object_list",
"def pathMap(self):\n pass",
"def getMappingSets(self,name:str=None,prop:str=None,limit:int=100)->list:\n params ={\"limit\":limit}\n if name is not None:\n params['name'] = name\n if prop is not None:\n params['property'] = prop\n path = \"/mappingSets\"\n res = self.connector.getData(self.endpoint+path,params=params)\n data = res[\"data\"]\n return data",
"def get_datasource_mappings(connection: Connection, default_connection_map: Optional[bool] = False,\n application_id: Optional[str] = None, error_msg: Optional[str] = None):\n url = f\"{connection.base_url}/api/datasources/mappings\"\n response = connection.session.get(\n url=url, params={\n \"defaultConnectionMap\": default_connection_map,\n \"projectId\": application_id\n })\n if not response.ok:\n if error_msg is None:\n error_msg = \"Error fetching Datasource mappings\"\n response_handler(response, error_msg)\n return response",
"def get_mapping(cls):\n return {\n \"mappings\": {\n cls.get_mapping_type_name(): {\n \"properties\": {\n 'id': {'type': 'string'},\n 'text': {'type': 'string', 'analyzer': 'snowball'},\n }\n }\n }\n }",
"def security_mappings(self):\n return self._security_mappings",
"def _get_static_route_map(self):\n return self.__static_route_map",
"def resource_mapping():\n return {\n 'OS::Heat::ResourceChain': ResourceChain,\n }",
"def get_data(self) -> Tuple[PoliciesMap, ZonesMap, LinksMap]:\n return self.policies_map, self.zones_map, self.links_map",
"def getMappingSetMappings(self,mappingSetId:str=None)->dict:\n if mappingSetId is None:\n raise ValueError(\"Require a mapping ID\")\n path = f\"/mappingSets/{mappingSetId}/mappings\"\n res = self.connector.getData(self.endpoint+path)\n return res",
"def GetConfiguredObjectMap(self):\n return _gmat_py.GmatBase_GetConfiguredObjectMap(self)",
"def list_maps(self):\n return self._json_object_field_to_list(\n self._get_observation_json(), self.__MISSION_STRING)",
"def get_mapping(self, index):\n dslIndex = Index(using=self.es, name=index)\n return dslIndex.get_mapping()",
"def MetadataMap(self):\r\n return self._metadata_map",
"def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")",
"def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")",
"def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")",
"def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")",
"def mapping(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"mapping\")",
"def mapping(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"mapping\")",
"def maps(self):\n refs = []\n for r in self.context.getBRefs():\n # get all the maps referencing the agency\n if r.Type() == \"SimMap\":\n refs.append(r)\n\n # get all the maps referencing a plan that references the agency\n # Note: perhaps only get the first map reference?\n elif r.Type() == \"Plan\":\n refs.extend([p for p in r.getBRefs() if p.Type() == 'SimMap'])\n return refs",
"def resources(self) -> Sequence['outputs.GetResourcesResourceResult']:\n return pulumi.get(self, \"resources\")",
"def resources(self):\n return list(self.get_resources_for_type(gdef.ResType_All))",
"def download_all_maps(self):\n return self._download_all_maps_recur()",
"def outputs(self):\n return {\"path_to_mapping_json\": File_IO(\n self.node.outputs[0])}",
"def templateMappings(self):\n raise NotImplementedError",
"def lookup(self):\r\n return resources.Lookup(self)",
"def load_maps(self):\r\n fname = self.path + '\\\\phase_maps.p'\r\n if os.path.isfile(fname):\r\n return pickle.load(open(fname, 'rb'))\r\n else:\r\n return {}",
"def mapping_names(self):\n return [self.basename]",
"def getResources(self):\n\t\treturn deepcopy(self.server.resources)",
"def get(self, request):\n doc_types_mappings = DoccodePluginMapping.objects.all()\n rules_json = []\n for rule in doc_types_mappings:\n rules_json.append(\n dict(\n doccode=rule.get_docrule().get_title(),\n id=rule.pk,\n )\n )\n log.info('RulesHandler.read request fulfilled')\n return Response(rules_json, status=status.HTTP_200_OK)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'ConnectorMapping':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = ConnectorMappingArgs.__new__(ConnectorMappingArgs)\n\n __props__.__dict__[\"connector_mapping_name\"] = None\n __props__.__dict__[\"connector_name\"] = None\n __props__.__dict__[\"connector_type\"] = None\n __props__.__dict__[\"created\"] = None\n __props__.__dict__[\"data_format_id\"] = None\n __props__.__dict__[\"description\"] = None\n __props__.__dict__[\"display_name\"] = None\n __props__.__dict__[\"entity_type\"] = None\n __props__.__dict__[\"entity_type_name\"] = None\n __props__.__dict__[\"last_modified\"] = None\n __props__.__dict__[\"mapping_properties\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"next_run_time\"] = None\n __props__.__dict__[\"run_id\"] = None\n __props__.__dict__[\"state\"] = None\n __props__.__dict__[\"tenant_id\"] = None\n __props__.__dict__[\"type\"] = None\n return ConnectorMapping(resource_name, opts=opts, __props__=__props__)",
"def _get_default_mapper(self):\n\n return self._datamappers['*/*']",
"def get_mapper(self) -> fsspec.mapping.FSMap:\n return FSStore(self.root_path, fs=self.fs)",
"def create_resources(self) -> List[ResourceDescription]:\r\n return self.resources",
"def get_map(self, name, return_type='image'):\n m = self.maps.get(name)\n if m is None:\n raise ValueError(\"No map with name '{}' found.\".format(name))\n return self.masker.inverse_transform(m) if return_type == 'image' else m",
"def getMappingFiles(self, study_id):\n try:\n con = self.getMetadataDatabaseConnection()\n results = con.cursor()\n items = []\n con.cursor().callproc('qiime_assets.get_mapping_files', [study_id, results])\n for row in results:\n items.append(row[0])\n return items\n except Exception, e:\n print 'Exception caught: %s.\\nThe error is: %s' % (type(e), e)\n return False",
"def load_mapping():\n return [l.strip() for l in open(ALL_URL_LIST)]",
"def get_material_mapping(self):\n return {name: self.get_material(name) for name in self.parts.keys()}",
"def locations(self):\r\n return Locations(self)",
"def ListConceptMappings(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def _read_resource_map(cls, bag_content_path, hydroshare_host='www.hydroshare.org'):\n rmap_path = os.path.join(bag_content_path, 'data', 'resourcemap.xml')\n if not os.path.exists(rmap_path):\n raise GenericResourceMeta.ResourceMetaException(\"Resource map {0} does not exist\".format(rmap_path))\n if not os.access(rmap_path, os.R_OK):\n raise GenericResourceMeta.ResourceMetaException(\"Unable to read resource map {0}\".format(rmap_path))\n\n res_meta = {}\n\n g = Graph()\n g.parse(rmap_path)\n # Get resource ID\n for s, p, o in g.triples((None, None, None)):\n if s.endswith(\"resourcemap.xml\") and p == rdflib.namespace.DC.identifier:\n res_meta['id'] = str(o)\n if res_meta['id'] is None:\n msg = \"Unable to determine resource ID from resource map {0}\".format(rmap_path)\n raise GenericResourceMeta.ResourceMetaException(msg)\n logger.debug(\"Resource ID is {0}\".format(res_meta['id']))\n\n # Build URI reference for #aggregation section of resource map\n res_root_uri = \"http://{host}/resource/{res_id}\".format(host=hydroshare_host, res_id=res_meta['id'])\n root_uri = res_root_uri\n res_agg_subj = \"{res_root_url}/data/resourcemap.xml#aggregation\".format(res_root_url=res_root_uri)\n res_agg = URIRef(res_agg_subj)\n\n # Get resource type\n type_lit = g.value(res_agg, rdflib.namespace.DCTERMS.type)\n if type_lit is None:\n raise GenericResourceMeta.ResourceMetaException(\n \"No resource type found in resource map {0}\".format(rmap_path))\n # Type literal is represented as 'http://example.com/terms/GenericResource', we want the part after\n # the final '/', or 'GenericResource'\n res_type_part = str(type_lit).rpartition('/')\n if res_type_part[1] == '':\n raise GenericResourceMeta.ResourceMetaException(\n \"No resource type found in resource map {0}\".format(rmap_path))\n res_meta['type'] = res_type_part[-1]\n logger.debug(\"\\tType is {0}\".format(res_meta['type']))\n\n # Get resource title\n title_lit = g.value(res_agg, rdflib.namespace.DC.title)\n if title_lit is None:\n raise GenericResourceMeta.ResourceMetaException(\n \"No resource title found in resource map {0}\".format(rmap_path))\n res_meta['title'] = str(title_lit)\n logger.debug(\"\\tTitle is {0}\".format(res_meta['title']))\n\n # Get list of files in resource\n res_meta['files'] = []\n res_root_uri_withslash = res_root_uri + '/'\n res_meta_path = None\n ore = rdflib.namespace.Namespace('http://www.openarchives.org/ore/terms/')\n for s, p, o in g.triples((res_agg, ore.aggregates, None)):\n if o.endswith('resourcemetadata.xml'):\n if res_meta_path is not None and o != res_meta_path:\n msg = \"More than one resource metadata URI found. \"\n msg += \"(first: {first}, second: {second}\".format(first=res_meta_path,\n second=o)\n raise GenericResourceMeta.ResourceMetaException(msg)\n res_meta_path = o.split(res_root_uri_withslash)[1]\n continue\n\n res_meta['files'].append(o.split(res_root_uri_withslash)[1])\n\n if res_meta_path is None:\n raise GenericResourceMeta.ResourceMetaException(\n \"No resource metadata found in resource map {0}\".format(rmap_path))\n\n logger.debug(\"\\tResource metadata path {0}\".format(res_meta_path))\n\n for uri in res_meta['files']:\n logger.debug(\"\\tContents: {0}\".format(uri))\n\n return (root_uri, res_meta_path, res_meta)",
"def map_to_app_resources(self, app):\n # TODO: Extract resources app data\n pass",
"def road_map(self) -> RoadMap:\n return self._road_map",
"def getMappingSetMapping(self,mappingSetId:str=None,mappingId:str=None)->dict:\n if mappingSetId is None:\n raise ValueError(\"Require a mappingSet ID\")\n if mappingId is None:\n raise ValueError(\"Require a mapping ID\")\n path = f\"/mappingSets/{mappingSetId}/mappings/{mappingId}\"\n res = self.connector.getData(self.endpoint + path)\n return res",
"def getMappedInfo(self):\n \n return self.mapped_info",
"def get_locations(self):\n try:\n output_json = {}\n total_locations = list(self.mongo_db_object.find_all(AppConfigurations.MONGO_DATABASE,\n AppConstants.LOCATION.MONGO_LOCATION_COLLECTION_NAME))\n output_json = total_locations\n return AppConstants.result_success_template(output_json)\n\n except Exception as e:\n print(\"Error while fetching the Location Data.\", str(e))",
"def get_mapping_type(cls):\n ...",
"def _get_mapping(self, cr, uid, referential_id, convertion_type='from_external_to_openerp',\n mapping_line_filter_ids=None, mapping_id=None, context=None):\n if not mapping_id:\n mapping_id = self._get_mapping_id(cr, uid, referential_id, context=context)\n if not mapping_id:\n raise except_osv(_('External Import Error'),\n _(\"The object %s doesn't have an external mapping\" % self._name))\n else:\n #If a mapping exists for current model, search for mapping lines\n\n mapping_type = convertion_type == 'from_external_to_openerp' and 'in' or 'out'\n mapping_line_filter = [('mapping_id', '=', mapping_id),\n ('type', 'in', ['in_out', mapping_type])]\n if mapping_line_filter_ids:\n mapping_line_filter += ['|',\n ('id', 'in', mapping_line_filter_ids),\n ('evaluation_type', '=', 'sub-mapping')]\n mapping_line_ids = self.pool.get('external.mapping.line').search(cr, uid, mapping_line_filter, context=context)\n if mapping_line_ids:\n mapping_lines = self.pool.get('external.mapping.line').read(cr, uid, mapping_line_ids, [], context=context)\n else:\n mapping_lines = []\n res = self.pool.get('external.mapping').read(cr, uid, mapping_id, context=context)\n alternative_key = [x['internal_field'] for x in mapping_lines if x['alternative_key']]\n res['alternative_keys'] = alternative_key or False\n res['key_for_external_id'] = res['key_for_external_id'] or 'id'\n res['mapping_lines'] = mapping_lines\n return res",
"def get_external_db_mapping(self) -> dict:\n external_map_path = self.param(\"external_db_map\")\n db_map = dict()\n if external_map_path is None: return db_map\n\n # Load the map\n with open(external_map_path, \"r\") as map_file:\n for line in map_file:\n if line.startswith(\"#\"): continue\n line = re.sub(r'#.*', '', line)\n if re.match(r'^\\s*$', line): continue\n (from_name, to_name, *rest) = line.strip().split(\"\\t\")\n if len(rest) > 0 and rest[0].upper() != \"SEQ_REGION\": continue\n if to_name == \"_IGNORE_\": continue\n db_map[from_name] = to_name\n return db_map",
"def block_device_mappings(self) -> Optional[Sequence['outputs.ContainerRecipeInstanceBlockDeviceMapping']]:\n return pulumi.get(self, \"block_device_mappings\")",
"def loadMapping(self, mapfile=\"./mapping.json\"):\n\t\ttry:\n\t\t\tfd = open(mapfile, \"r\")\n\t\t\tmappings = json.load(fd)\n\t\t\tif \"Sharing\" in mappings.keys():\n\t\t\t\tself.share_levels = mappings[\"Sharing\"]\n\t\t\tif \"Type\" in mappings.keys():\n\t\t\t\tself.type_map = mappings[\"Type\"]\n\t\t\tif \"Extra-Tag\" in mappings.keys():\n\t\t\t\tself.extra_tag = mappings[\"Extra-Tag\"]\n\t\t\tif \"Privacy\" in mappings.keys():\n\t\t\t\tself.privacy_levels = mappings[\"Privacy\"]\n\t\t\tfd.close()\n\t\texcept Exception as e:\n\t\t\tprint(\"IMPOSSIBLE TO LOAD MAPPINGS from %s\" % mapfile)\n\t\t\tprint(e)\n\t\t\tsys.exit(0)\n\t\treturn",
"def get_resources():\n return Response(f\"{Resource.get_all_resources()}\", 200, mimetype='text/plain')",
"def _get_mmap(self):\n if self.mmap is None:\n self.mmap = []\n for line in self._get_template_menu_lst():\n if line.startswith(\"mmap\"):\n self.mmap.append(line)\n\n debug.debug(\"got MMAP:\\n %s\" % \" \".join(self.mmap))\n return self.mmap",
"def get_resources():\n user_id = session[\"email\"]\n resources = fm.get_resources(user_id)\n returned_val = dict(resources=resources)\n return jsonify(returned_val)",
"def devices(self) -> Mapping[str, Device]:\n return MappingProxyType(self._devices)",
"def zone_mappings(self) -> Sequence['outputs.GetLoadBalancersBalancerZoneMappingResult']:\n return pulumi.get(self, \"zone_mappings\")"
] | [
"0.673186",
"0.6723476",
"0.67003196",
"0.6668532",
"0.6564718",
"0.6539853",
"0.6527213",
"0.6339044",
"0.63287383",
"0.6280277",
"0.62163085",
"0.60885906",
"0.607769",
"0.6042368",
"0.6038287",
"0.60309374",
"0.60278445",
"0.5881427",
"0.585224",
"0.5841346",
"0.58215725",
"0.57991636",
"0.57975084",
"0.5745911",
"0.57450783",
"0.5743332",
"0.5736136",
"0.5704447",
"0.5702205",
"0.57018113",
"0.56938344",
"0.568914",
"0.5672245",
"0.5672245",
"0.5672245",
"0.5666473",
"0.5658697",
"0.564213",
"0.56411546",
"0.5593814",
"0.55934775",
"0.55906796",
"0.553968",
"0.55250615",
"0.54965293",
"0.54788667",
"0.54378814",
"0.54337245",
"0.5430381",
"0.54180866",
"0.54095644",
"0.54030406",
"0.53932124",
"0.53924924",
"0.53623885",
"0.5360108",
"0.5358996",
"0.5315207",
"0.5315207",
"0.5315207",
"0.5315207",
"0.5309813",
"0.5309813",
"0.52985436",
"0.529247",
"0.5286074",
"0.52798766",
"0.5273037",
"0.52680534",
"0.52680326",
"0.526683",
"0.52498734",
"0.524537",
"0.52432674",
"0.5225746",
"0.5216794",
"0.5211163",
"0.51968515",
"0.5185392",
"0.5180161",
"0.51769906",
"0.51764536",
"0.5163866",
"0.5163085",
"0.5161777",
"0.5153684",
"0.51439905",
"0.51411927",
"0.5134279",
"0.51339954",
"0.5133881",
"0.5129312",
"0.5127361",
"0.51233786",
"0.51199824",
"0.5113041",
"0.5112348",
"0.510894",
"0.51073444",
"0.5105954"
] | 0.6955971 | 0 |
Checks if patch is supported on iscsi. | def _check_iscsi_rest_patch_allowed(self):
headers, bios_uri, bios_settings = self._check_bios_resource()
# Check if the bios resource exists.
if('links' in bios_settings and 'iScsi' in bios_settings['links']):
iscsi_uri = bios_settings['links']['iScsi']['href']
status, headers, settings = self._rest_get(iscsi_uri)
if status != 200:
msg = self._get_extended_error(settings)
raise exception.IloError(msg)
if not self._operation_allowed(headers, 'PATCH'):
headers, iscsi_uri, settings = (
self._get_iscsi_settings_resource(settings))
self._validate_if_patch_supported(headers, iscsi_uri)
return iscsi_uri
else:
msg = ('"links/iScsi" section in bios'
' does not exist')
raise exception.IloCommandNotSupportedError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _validate_if_patch_supported(self, headers, uri):\n if not self._operation_allowed(headers, 'PATCH'):\n msg = ('PATCH Operation not supported on the resource '\n '\"%s\"' % uri)\n raise exception.IloError(msg)",
"def check_supported_features(self):",
"def test_patch_hyperflex_server_firmware_version(self):\n pass",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def is_ida_version_supported():\n major, minor = map(int, idaapi.get_kernel_version().split(\".\"))\n if major >= 7:\n return True\n print(\"GhIDA:: [!] IDA Pro 7.xx supported only\")\n return False",
"def verify_support():\n ostype, majorrelease, _ = get_os_release_data()\n if ostype not in _supported_os:\n _logger.info('OS type %s is not supported.', ostype)\n return False\n if majorrelease not in _supported_release:\n _logger.info('OS %s %s is not supported', ostype, majorrelease)\n return False\n return True",
"def is_ctu_capable():\n\n context = package_context.get_context()\n ctu_func_map_cmd = context.ctu_func_map_cmd\n try:\n version = subprocess.check_output([ctu_func_map_cmd, '-version'])\n except (subprocess.CalledProcessError, OSError):\n version = 'ERROR'\n return version != 'ERROR'",
"def test_patch_hyperflex_capability_info(self):\n pass",
"def testCheckAvailable(self):\n img = self.img\n img.inspect()\n with converter.RootMounted(img.converter._h,\n '/dev/VolGroup00/LogVol00'):\n c = img.converter\n installer = redhat.LocalInstaller(\n c._h, '/dev/VolGroup00/LogVol00',\n db.DB(['{}/conf/guestconv.db'.format(env.topdir)]),\n log.get_logger_object(test_helper.logger)\n )\n\n kernel = redhat.Package('kernel',\n version='2.6.9', release='89.EL',\n arch='i686')\n self.assertTrue(installer.check_available([kernel]))",
"def supported():\n return os.path.isfile(OPENCOR)",
"def is_system_usable_block_device(pydev_device):\n if pydev_device.get(\"ID_BUS\") == \"usb\":\n # Skip USB devices\n return False\n if pydev_device.get(\"DM_VG_NAME\") or pydev_device.get(\"DM_LV_NAME\"):\n # Skip LVM devices\n return False\n if constants.DEVICE_NAME_MPATH in pydev_device.get(\"DM_NAME\", \"\") and pydev_device.get(\"DM_PART\", \"\"):\n # Skip mpath partition devices\n return False\n if pydev_device.get(\"ID_FS_TYPE\") == constants.DEVICE_FS_TYPE_MPATH:\n # Skip mpath member devices\n return False\n id_path = pydev_device.get(\"ID_PATH\", \"\")\n if \"iqn.\" in id_path or \"eui.\" in id_path:\n # Skip all iSCSI devices, they are links for volume storage.\n # As per https://www.ietf.org/rfc/rfc3721.txt, \"iqn.\" or \"edu.\"\n # have to be present when constructing iSCSI names.\n return False\n if ((\"-fc-\" in id_path or \"-lun-\" in id_path) and\n is_valid_multipath(pydev_device.get('DEVNAME'))):\n return False\n if pydev_device.get(\"ID_VENDOR\") == constants.VENDOR_ID_LIO:\n # LIO devices are iSCSI, should be skipped above!\n LOG.error(\"Invalid id_path. Device %s (%s) is iSCSI!\" %\n (id_path, pydev_device.get('DEVNAME')))\n return False\n return True",
"def _check_patch_requirements(region_name,\n applied_patches=None,\n available_patches=None):\n\n api_token = None\n if applied_patches:\n patches_applied = patch_api.patch_is_applied(\n token=api_token,\n timeout=constants.PATCH_DEFAULT_TIMEOUT_IN_SECS,\n region_name=region_name,\n patches=applied_patches\n )\n if not patches_applied:\n raise wsme.exc.ClientSideError(_(\n \"The following patches must be applied before doing \"\n \"the kubernetes upgrade: %s\" % applied_patches))\n\n if available_patches:\n patches_available = patch_api.patch_is_available(\n token=api_token,\n timeout=constants.PATCH_DEFAULT_TIMEOUT_IN_SECS,\n region_name=region_name,\n patches=available_patches\n )\n if not patches_available:\n raise wsme.exc.ClientSideError(_(\n \"The following patches must be available before doing \"\n \"the kubernetes upgrade: %s\" %\n available_patches))",
"def is_supported(self) -> bool:\n\n # TODO logging ?\n # TODO ICMP error if ttl is zero\n return self._version == 4 and self._ihl >= 5 and self._ttl != 0",
"def test_patch_pci_switch(self):\n pass",
"def requirements():\n if fabric.api.sudo(\"grep 'release 7' /etc/redhat-release\",quiet=True).succeeded:\n \tprint blue(\"This is a Centos/RedHat 7 server. Please install AIDE.\")\n \treturn 1\n if not rpm_is_installed('glibc.*i686'):\n print red(\"GlibC i686 is not installed\")\n if not file_exists(\"/usr/local/tripwire/tfs/bin/tripwire\", use_sudo=True):\n print red(\"Tripwire is not installed\")",
"def isSonyMtpAppInstaller(info):\n operations = frozenset([\n SonyMtpAppInstaller.PTP_OC_GetProxyMessageInfo,\n SonyMtpAppInstaller.PTP_OC_GetProxyMessage,\n SonyMtpAppInstaller.PTP_OC_SendProxyMessageInfo,\n SonyMtpAppInstaller.PTP_OC_SendProxyMessage,\n ])\n return info.manufacturer == SONY_MANUFACTURER and 'sony.net/SEN_PRXY_MSG:' in info.vendorExtension and operations <= info.operationsSupported",
"def testSupported(t, env):\n c = env.c1.new_client(env.testname(t))\n sess = c.create_session()\n\n # Do a simple SECINFO_NO_NAME\n res = sess.compound([op.putrootfh(), op.secinfo_no_name(0)])\n check(res)",
"def testSupported2(t, env):\n c = env.c1.new_client(env.testname(t))\n sess = c.create_session()\n\n # GETFH after do a SECINFO_NO_NAME should get error NFS4ERR_NOFILEHANDLE\n res = sess.compound([op.putrootfh(), op.secinfo_no_name(0), op.getfh()])\n print res\n check(res, NFS4ERR_NOFILEHANDLE)",
"def test_patch_hyperflex_ext_iscsi_storage_policy(self):\n pass",
"def isTestCfgSupported(self, asTestCfg):\n\n # Check whether the disk variant is supported by the selected format.\n asVariants = self.getDiskFormatVariantsForTesting(asTestCfg[self.kiDiskFmt], [ asTestCfg[self.kiDiskVar] ]);\n if not asVariants:\n return False;\n\n # For iSCSI check whether we have targets configured.\n if asTestCfg[self.kiDiskFmt] == 'iSCSI' and not self.asIscsiTargets:\n return False;\n\n # Check for virt mode, CPU count and selected VM.\n if asTestCfg[self.kiVirtMode] == 'raw' \\\n and (asTestCfg[self.kiCpuCount] > 1 or asTestCfg[self.kiVmName] == 'tst-storage'):\n return False;\n\n # IDE does not support the no host I/O cache setting\n if asTestCfg[self.kiHostIoCache] == 'no-hostiocache' \\\n and asTestCfg[self.kiStorageCtrl] == 'IDE':\n return False;\n\n return True;",
"def is_vtd_supported(self):\n\t\treturn bool(call_sdk_function('PrlSrvCfg_IsVtdSupported', self.handle))",
"def test_patch_pci_device(self):\n pass",
"def allowed_csi_drivers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AllowedCSIDriverPatchArgs']]]]:\n return pulumi.get(self, \"allowed_csi_drivers\")",
"def checkPatchValidity(val):\n\n tag_list = val.split('-')\n if len(tag_list) < 5:\n return False\n\n if tag_list[0] not in os.environ.get('environment'):\n return False\n\n if tag_list[1] not in os.environ.get('platform'):\n return False\n\n if tag_list[2] not in os.environ.get('role'):\n return False \n\n if tag_list[3] not in os.environ.get('urgency'):\n return False \n\n if tag_list[4] not in os.environ.get('order'):\n return False\n\n return True",
"def test_client_can_do_patch_request(self):\n response = self.httpbin_4.test_requests_patch_method()\n self.assertEqual(response.request.method, 'PATCH')\n self.assertEqual(response.status_code, 200)",
"def OSSupportsIPv6(self) -> bool:",
"def check_platform():\n system = platform.system()\n distro = platform.platform()\n is_raspberry_pi = False\n try:\n info = open(\"/proc/cpuinfo\").read()\n except FileNotFoundError:\n is_raspberry_pi = False\n else:\n # bcm2708: Raspberry Pi 1\n # bcm2709: Raspberry Pi 2\n # bcm2710: Raspberry Pi 3\n is_raspberry_pi = 'BCM27' in info or 'ODROID' in info\n\n return system == \"Linux\" and (\n os.path.isfile('/proc/device-tree/hat/uuid') or\n 'boot2docker' in distro.lower() or\n is_raspberry_pi or\n os.path.isfile('/sys/hypervisor/uuid') or\n os.path.isdir('/var/lib/digitalocean')\n )",
"def test_update_hyperflex_server_firmware_version(self):\n pass",
"def _network_trunk_supported(self):\n if 'trunk' in self.network_extensions:\n return True\n return False",
"def _ens_psec_supported(self):\n pass",
"def check_supported(self, op):\n if op == \"series\":\n return (self.cgi_show_series is not None) or (self.cgi_show_series_wrapper is not None)\n elif op == \"info\":\n return self.cgi_jsoc_info is not None\n elif op == \"query\":\n return self.cgi_jsoc_info is not None\n elif op == \"email\":\n return self.cgi_check_address is not None\n elif op == \"export\":\n return (self.cgi_jsoc_info is not None) and (self.cgi_jsoc_fetch is not None)\n else:\n raise ValueError(f\"Unknown operation: {op!r}\")",
"def detect(self):\n # Get PCI devices\n lines = subprocess.check_output([\"lspci\", \"-n\"]).decode().split(\"\\n\")\n for line in lines:\n if len(line) > 0:\n class_id = \"0x{0}\".format(line.split()[1].rstrip(\":\")[0:2])\n if class_id == self.class_id:\n dev = line.split()[2].split(\":\")\n vendor_id = \"0x{0}\".format(dev[0])\n product_id = \"0x{0}\".format(dev[1])\n if vendor_id == self.vendor_id and product_id in self.devices:\n return True\n return False",
"def check_supported(x, indices, v, y, kernel_name=\"inplace_update\"):\n shape_indices = indices.get(\"shape\")\n shape_v = v.get(\"shape\")\n dtype_v = v.get(\"dtype\").lower()\n reg_v_len = 1\n for i in range(1, len(shape_v)):\n reg_v_len = reg_v_len * shape_v[i]\n\n if dtype_v in (\"float32\", \"int32\"):\n dtype_size = 4\n else:\n dtype_size = 2\n reg_v_size = reg_v_len * dtype_size\n\n try:\n if len(shape_indices) != 1 or (reg_v_size % 32 != 0):\n return False\n\n except RuntimeError:\n return False\n\n return True",
"def supports_operation(self, operation: str) -> bool:\n return True",
"def get_di_change_detection_supported( channel ):\n supported = bool32(0)\n CALL('GetPhysicalChanDIChangeDetectSupported', channel, byref(supported))\n return bool( supported.value )",
"def test_update_hyperflex_capability_info(self):\n pass",
"def get_supported_boot_devices(self, task):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n return super(IRMCManagement, self).get_supported_boot_devices(task)\n else:\n return super(ipmitool.IPMIManagement,\n self).get_supported_boot_devices(task)",
"def _platform_compatible():\r\n raise NotImplementedError",
"def check_tbe_support(json_desc):\n if \"buffer_stitch\" in json_desc:\n logger.info(\"TBE not supports buffer stitch\")\n return False\n\n if \"parallel_fusion\" in json_desc:\n logger.info(\"TBE not supports parallel fusion\")\n return False\n\n if not json_desc.get(\"input_desc\"):\n logger.info(\"TBE not supports empty inputs\")\n return False\n\n for op in json_desc[\"op_desc\"]:\n op_name = op[\"name\"]\n if not get_op_reg_info(op_name, \"func\", False):\n logger.info(\"TBE op not registered: {}\".format(op_name))\n return False\n return True",
"def legacy_pep_syntax(self):\n returned = False\n if self.get_a_device_id():\n if self.valid_status_code(falcon.get_device_details_v1(DEVICE_ID)):\n returned = True\n return returned",
"def _os_supported(self, plugin):\r\n return sys.platform in plugin.plugin_object.get_supported_os()",
"def _check_image_is_supported(self):\n\t\tSUPPORTED = {}\n\t\tSUPPORTED['RECORD_TYPE'] = 'FIXED_LENGTH',\n\t\tSUPPORTED['SAMPLE_BITS'] = 8, 16\n\t\tSUPPORTED['SAMPLE_TYPE'] = ( 'UNSIGNED_INTEGER',\n\t\t\t\t'MSB_UNSIGNED_INTEGER',\n\t\t\t\t'LSB_INTEGER',\n\t\t\t\t'MSB_INTEGER'\n\t\t\t\t)\n\n\t\timageIsSupported = True\n\n\t\tif not self.labels.has_key('IMAGE'):\n\t\t\tif self.log: self.log.warn(\"No image data found\")\n\t\t\timageIsSupported = False\n\n\t\trecordType = self.labels['RECORD_TYPE']\n\t\timageSampleBits = int(self.labels['IMAGE']['SAMPLE_BITS'])\n\t\timageSampleType = self.labels['IMAGE']['SAMPLE_TYPE']\n\n\t\tif recordType not in SUPPORTED['RECORD_TYPE']:\n\t\t\terrorMessage = (\"RECORD_TYPE '%s' is not supported\") % (recordType)\n\t\t\tif self.raisesImageNotSupportedError:\n\t\t\t\traise ImageNotSupportedError(errorMessage)\n\t\t\timageIsSupported = False\n\t\tif imageSampleBits not in SUPPORTED['SAMPLE_BITS']:\n\t\t\terrorMessage = (\"SAMPLE_BITS '%s' is not supported\") % (imageSampleBits)\n\t\t\tif self.raisesImageNotSupportedError:\n\t\t\t\traise ImageNotSupportedError(errorMessage)\n\t\t\timageIsSupported = False\n\t\tif imageSampleType not in SUPPORTED['SAMPLE_TYPE']:\n\t\t\terrorMessage = (\"SAMPLE_TYPE '%s' is not supported\") % (imageSampleType)\n\t\t\tif self.raisesImageNotSupportedError:\n\t\t\t\traise ImageNotSupportedError(errorMessage)\n\t\t\timageIsSupported = False\n\n\t\treturn imageIsSupported",
"def supported_tifs(self):\n buf = ctypes.c_uint32()\n self._dll.JLINKARM_TIF_GetAvailable(ctypes.byref(buf))\n return buf.value",
"def is_vserver_kernel():\n\n kinfo = commands.getoutput('/bin/uname -a').split()[2]\n return '-vs' in kinfo",
"def distributionRequiresNoTtyPatch():\n\tdistributor = Distribution().distributor.lower()\n\n\treturn bool('redhat' in distributor or 'centos' in distributor)",
"def _check_ops(self):\n required_ops = ['san_ip', 'san_login', 'san_password']\n for attr in required_ops:\n if not getattr(self.configuration, attr, None):\n raise exception.InvalidInput(reason=_('%s is not set.') % attr)\n\n replica = self.configuration.safe_get('replication_device')\n if replica and isinstance(replica, list):\n replica_ops = ['backend_id', 'login', 'password', 'rpo']\n for attr in replica_ops:\n if attr not in replica[0]:\n msg = _('replication_device %s is not set.') % attr\n raise exception.InvalidInput(reason=msg)\n self.replica = Replication(replica[0])",
"def legacy_opid_syntax(self):\n returned = False\n if self.get_a_device_id():\n if self.valid_status_code(falcon.GetDeviceDetailsV1(ids=DEVICE_ID)):\n returned = True\n return returned",
"def isSonyMtpCamera(info):\n operations = frozenset([\n SonyMtpCamera.PTP_OC_SonyDiExtCmd_write,\n SonyMtpCamera.PTP_OC_SonyDiExtCmd_read,\n SonyMtpCamera.PTP_OC_SonyReqReconnect,\n ])\n return info.manufacturer == SONY_MANUFACTURER and info.vendorExtension == '' and operations <= info.operationsSupported",
"def is_supported_interaction(self, td, name):\n\n raise NotImplementedError()",
"def has_intel_os(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def _is_supported_version(cls, sdk: WindowsSDK) -> bool:\n try:\n version_split = sdk.version.split(\".\")\n if not sdk.version.startswith(cls.SDK_VERSION):\n return False\n if int(version_split[2]) < cls.SDK_MIN_VERSION:\n return False\n except (AttributeError, ValueError, IndexError):\n return False\n\n return True",
"def incompatible_device(self) -> bool:\n return pulumi.get(self, \"incompatible_device\")",
"def new_pep_syntax(self):\n returned = False\n if self.get_a_device_id():\n if self.valid_status_code(falcon.get_device_details_v2(ids=DEVICE_ID)):\n returned = True\n return returned",
"def new_opid_syntax(self):\n returned = False\n if self.get_a_device_id():\n if self.valid_status_code(falcon.GetDeviceDetailsV2(ids=DEVICE_ID)):\n returned = True\n return returned",
"def check_subsystem_commands(self):\n self.communications.check_controls()\n self.__check_video()\n self.__check_picture()\n self.__check_ping()\n self.__check_motion()",
"def is_widget_supported(self, major, minor=None):\n assert isinstance(major, int)\n assert isinstance(minor, int) or minor is None\n\n # no restrictions exists\n if 'supported_by' not in self.config:\n return True\n\n if minor is not None:\n version_specific = 'wx%s%s' % (major, minor)\n if version_specific in self.config['supported_by']:\n return True\n\n version_generic = 'wx%s' % major\n if version_generic in self.config['supported_by']:\n return True\n\n return False",
"def verify_host(self):\n super().verify_host()\n if not self.use_docker:\n if self.tools.host_os != \"Linux\":\n raise UnsupportedHostError(self.supported_host_os_reason)",
"def is_io_uring_supported():\n return compare_versions(get_kernel_version(), MIN_KERNEL_VERSION_FOR_IO_URING) >= 0",
"def is_system(self) -> bool:",
"def is_available():",
"def platform_supported(self):\n return platform.system().lower() in self.platforms if self.platforms else False",
"def test_update_pci_switch(self):\n pass",
"def test_patch(self, patch):\n self.clean()\n error = self.apply_patch(patch)\n diff = self.run(['git', 'diff', 'origin/master'])\n self.clean()\n if error != '':\n return False, error\n if diff == '':\n # No error message is returned for empty diff. The patch might be\n # empty or has been exported.\n return False, ''\n return True, ''",
"def is_mocking():\n from . import core\n return len(core.PATCHERS.targets) > 0",
"def has_legacy_image(self):\n pass",
"def has_legacy_image(self):\n pass",
"def isValidrequest(cls, mgr, fid, op, tmpcls, slot, session=None):\n ormop = clsmanager.getConfigOperation(op)\n if session is not None:\n cls.getclsoptions(tmpcls, session)\n if ormop in optionsdict[tmpcls]['OPTIONS']:\n if cls.getClsStageSupported(tmpcls, op, slot) is True:\n inputs = mgr.get(fid, tmpcls, op, slot, session)\n if len(inputs) > 0:\n return True\n return False",
"def _check_kvm():\n if not os.path.exists(os.path.join(os.sep, 'dev', 'kvm')):\n raise CommandError('KVM interface not found - check that /dev/kvm '\n 'exists. Alternatively, you can disable KVM (-n '\n 'option) or download pre-built images (-d option)')",
"def check_os():\n if sys.platform == \"win32\":\n print(\"WARNING:\")\n print(\"This program use Scapy. Scapy is primarily being developed for Unix-like systems and works best on those platforms.\")\n print(\"You should to change your OS, because some Scapy functions may not be available.\")\n time.sleep(5)",
"def test_check_system_python_api(self):\n\n errors, successes = check_system.check_system()\n self.assertTrue(len(errors) + len(successes) >= 4)",
"def test_supported_protocol(self):\n assert self.handler.SUPPORTED_PROTOCOL is None",
"def test_cdhit_supported_version(self):\r\n self.assertTrue(which('cd-hit'),\r\n \"cd-hit not found. This may or may not be a problem depending on \" +\r\n \"which components of QIIME you plan to use.\")\r\n # cd-hit does not have a version print in their program\r",
"def CheckKVM():\n return os.path.exists('/dev/kvm')",
"def supports(self, x):\n return True",
"def required():\n kernel = __salt__['grains.item']('os') # pylint: disable=E0602,E0603\n\n # Disable rebooting for HDP clusters until that works reliably\n hadoop_distro = __salt__['pillar.get']('hadoop.distro') # pylint: disable=E0602,E0603\n if hadoop_distro == 'HDP':\n return False\n\n if kernel['os'] == \"CentOS\" or kernel['os'] == \"RedHat\":\n try:\n current_version = __salt__['cmd.run']('uname -r') # pylint: disable=E0602,E0603\n latest_version = __salt__['cmd.run']('rpm -q --last kernel') # pylint: disable=E0602,E0603\n latest_version = latest_version.split(\" \")\n latest_version = [\n version for version in latest_version if 'kernel' in version]\n latest_version = str(latest_version[0]).strip('kernel-') # pylint: disable=E1310\n if current_version == latest_version:\n return False\n except: # pylint: disable=W0702\n return False\n return True\n\n return __salt__['file.file_exists']('/var/run/reboot-required') # pylint: disable=E0602,E0603",
"def supported_firmware_interfaces(self):\n return [fake.FakeFirmware] + super().supported_firmware_interfaces",
"def bm_and_dvr_supported(self):",
"def supports_reboot(self):\n self.__not_implemented()",
"def is_product_supported(cls, product, role):\n return False",
"def chip_has_panicked(self):\n try:\n program_counter = self.where_is_pc() # a SourceInfo\n except ct.BundleMissingError:\n # The PC is in a downloadable capability which elf is not loaded\n # in ACAT. One is for sure the the PC is not in panic because\n # the panic is implemented in the main elf which is always present.\n return False\n\n if re.search(\"panic_diatribe\", program_counter.module_name) is not None:\n return True\n\n return False",
"def check_unsupported_ops(self, program):\n\n unsupported_ops = set()\n for block in program.blocks:\n for op in block.ops:\n if op.type == \"fetch\":\n continue\n if op.type not in _convert_map:\n unsupported_ops.add(op.type)\n if len(unsupported_ops) > 0:\n msg = \"The following operators are not supported for frontend Paddle: \"\n msg += \", \".join(unsupported_ops)\n raise tvm.error.OpNotImplemented(msg)",
"def on_powerpc():\n return processor() == 'powerpc' or machine().startswith('ppc')",
"def test_patch_hyperflex_auto_support_policy(self):\n pass",
"def SupportsIPv6(self) -> bool:",
"def is_patched(self) -> bool:\n client = Client()\n # Get the relevant service from the cluster\n service = client.get(Service, name=self.service_name, namespace=self._namespace)\n # Construct a list of expected ports, should the patch be applied\n expected_ports = [(p.port, p.targetPort) for p in self.service.spec.ports]\n # Construct a list in the same manner, using the fetched service\n fetched_ports = [(p.port, p.targetPort) for p in service.spec.ports] # type: ignore[attr-defined] # noqa: E501\n return expected_ports == fetched_ports"
] | [
"0.6394003",
"0.6082379",
"0.5752724",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.57124674",
"0.5584858",
"0.55626357",
"0.55101615",
"0.5490769",
"0.547495",
"0.54601276",
"0.54075706",
"0.5383899",
"0.53760725",
"0.53601485",
"0.5349725",
"0.53497034",
"0.53414375",
"0.5331483",
"0.5329133",
"0.5302826",
"0.52971023",
"0.52832663",
"0.52516234",
"0.52330077",
"0.5229102",
"0.52252305",
"0.5202618",
"0.51575327",
"0.51539856",
"0.5124117",
"0.5112868",
"0.5102589",
"0.5098159",
"0.50922394",
"0.5088139",
"0.50869113",
"0.508071",
"0.5079211",
"0.5062785",
"0.5058656",
"0.5048999",
"0.5048865",
"0.5046078",
"0.50446963",
"0.5039021",
"0.50314826",
"0.5019629",
"0.50146234",
"0.5003642",
"0.5001839",
"0.4984248",
"0.49772727",
"0.49715066",
"0.49706155",
"0.49691644",
"0.49630862",
"0.49607417",
"0.495837",
"0.495792",
"0.49509066",
"0.49451312",
"0.49388787",
"0.49223098",
"0.4917466",
"0.4916621",
"0.4916621",
"0.4910897",
"0.4908706",
"0.4907485",
"0.49068412",
"0.48962346",
"0.4891755",
"0.48896587",
"0.4883736",
"0.48827642",
"0.48732552",
"0.48725954",
"0.48665377",
"0.4861869",
"0.48467922",
"0.48459587",
"0.48436308",
"0.48427203",
"0.48406228",
"0.48393777"
] | 0.7114112 | 0 |
Change secure boot settings on the server. | def _change_secure_boot_settings(self, property, value):
system = self._get_host_details()
# find the BIOS URI
if ('links' not in system['Oem']['Hp'] or
'SecureBoot' not in system['Oem']['Hp']['links']):
msg = (' "SecureBoot" resource or feature is not '
'supported on this system')
raise exception.IloCommandNotSupportedError(msg)
secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']
# Change the property required
new_secure_boot_settings = {}
new_secure_boot_settings[property] = value
# perform the patch
status, headers, response = self._rest_patch(
secure_boot_uri, None, new_secure_boot_settings)
if status >= 300:
msg = self._get_extended_error(response)
raise exception.IloError(msg)
# Change the bios setting as a workaround to enable secure boot
# Can be removed when fixed for Gen9 snap2
val = self._get_bios_setting('CustomPostMessage')
val = val.rstrip() if val.endswith(" ") else val+" "
self._change_bios_setting({'CustomPostMessage': val}) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_secure_boot_mode(self, secure_boot_enable):\n sushy_system = self._get_sushy_system()\n try:\n sushy_system.secure_boot.enable_secure_boot(secure_boot_enable)\n except exception.InvalidInputError as e:\n msg = (self._('Invalid input. Error %(error)s')\n % {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish controller failed to set secure '\n 'boot settings on the server. Error: %(error)s')\n % {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def set_secure_boot_state(self, task, state):\n return irmc_common.set_secure_boot_mode(task.node, state)",
"def set_secure_boot_mode(self, secure_boot_enable):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('SecureBootEnable',\n secure_boot_enable)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def reset_secure_boot_keys(self):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('ResetToDefaultKeys', True)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def enable_secure_boot(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_secure_boot\")",
"def setBootargs(self):\n\t\tif self.testType == 'auto' or self.testType == 'manual':\n\t\t\tself.bootargs = self.settings.getKeyValue('nfs.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<nfsroot>', self.nfsroot)\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\n\t\telse:\n\t\t\tself.bootargs = self.settings.getKeyValue('ramdisk.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\t\t\n\t\treturn None",
"def get_secure_boot_mode(self):\n sushy_system = self._get_sushy_system()\n try:\n secure_boot_enabled = GET_SECUREBOOT_CURRENT_BOOT_MAP.get(\n sushy_system.secure_boot.current_boot)\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish controller failed to provide '\n 'information about secure boot on the server. '\n 'Error: %(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexCommandNotSupportedError(msg)\n\n if secure_boot_enabled:\n LOG.debug(self._(\"Secure boot is Enabled\"))\n else:\n LOG.debug(self._(\"Secure boot is Disabled\"))\n return secure_boot_enabled",
"def start_salt():\n with fabric_settings(warn_only=True):\n if env.host == env.master_server.public_ip:\n sudo(\"systemctl start salt-master\")\n time.sleep(3)\n sudo(\"systemctl start salt-minion\")",
"def system_protection_config():\n\n\tprint_section_header(\"GENERAL SYSTEM PROTECTION\", Fore.BLUE)\n\n\t# Enable Gatekeeper\n\tif prompt_yes_no(top_line=\"-> Enable Gatekeeper?\",\n\t bottom_line=\"Defend against malware by enforcing code signing and verifying downloaded applications before letting them to run.\"):\n\t\tprint_confirmation(\"Enabling Gatekeeper...\")\n\t\tsp.run('sudo spctl --master-enable', shell=True, stdout=sp.PIPE)\n\t\tsp.run('sudo spctl --enable --label \"Developer ID\"', shell=True, stdout=sp.PIPE)\n\n\t# Disable automatic software whitelisting\n\tif prompt_yes_no(top_line=\"-> Prevent automatic software whitelisting?\",\n\t bottom_line=\"Both built-in and downloaded software will require user approval for whitelisting.\"):\n\t\tprint_confirmation(\"Preventing automatic whitelisting...\")\n\t\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsigned', 'off'], stdout=sp.PIPE)\n\t\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsignedapp', 'off'], stdout=sp.PIPE)\n\n\t# Captive Portal\n\tif prompt_yes_no(top_line=\"-> Disable Captive Portal Assistant and force login through browser on untrusted networks?\",\n\t bottom_line=\"Captive Portal could be triggered and direct you to a malicious site WITHOUT any user interaction.\"):\n\t\tprint_confirmation(\"Disabling Captive Portal Assistant...\")\n\t\tsp.run(['sudo', 'defaults', 'write', '/Library/Preferences/SystemConfiguration/com.apple.captive.control', 'Active', '-bool', 'false'], stdout=sp.PIPE)",
"def setprivileged(miner: Miner, login, allowsetting):\n commands = get_changeconfigcommands(getminerfilename(miner), 'api-allow', allowsetting)\n sendcommands_and_restart(miner, login, commands)",
"def set_power(sid):\n # Resolve the passed parameters if any\n timer = None\n os = None\n if request.json:\n if timer in request.json:\n timer = request.json.get('timer')\n if os in request.json:\n os = request.json.get('os')\n db = sqlite3.connect('/home/tropius/TROPIUS/TROPIUS.db')\n state = hosts.get(db, sid)['state']\n \n if state == 'on':\n # The host is on -- turn it off\n # TODO make a unix shell util file\n # TODO make a windows util file\n return\n elif state == 'off':\n # The host is off -- turn it on\n if timer is not None:\n sleep(timer)\n netutil.wake_on_lan(db, sid)\n ret = {'power': {'state': 'on'}}\n return jsonify(ret)\n # TODO find a keyboard driver and implement OS parameter",
"async def _hardcore_setheist(self, ctx):\r\n guild = ctx.guild\r\n config = await self.thief.get_guild_settings(guild)\r\n\r\n if config[\"Hardcore\"]:\r\n config[\"Hardcore\"] = False\r\n msg = \"Hardcore mode now OFF.\"\r\n else:\r\n config[\"Hardcore\"] = True\r\n msg = \"Hardcore mode now ON! **Warning** death will result in credit **and chip wipe**.\"\r\n await self.thief.config.guild(guild).Config.set(config)\r\n await ctx.send(msg)",
"def test_update_bios_boot_mode(self):\n pass",
"def set_password(self, system):\n if system[\"embedded_available\"] and system[\"controller_addresses\"]:\n for url in [\"https://%s:8443/devmgr\" % system[\"controller_addresses\"][0],\n \"https://%s:443/devmgr\" % system[\"controller_addresses\"][0],\n \"http://%s:8080/devmgr\" % system[\"controller_addresses\"][0]]:\n try:\n rc, response = self._request(\"%s/utils/login?uid=admin&xsrf=false&onlycheck=true\" % url, ignore_errors=True, url_username=\"admin\",\n url_password=\"\", validate_certs=False)\n\n if rc == 200: # successful login without password\n system[\"password_set\"] = False\n if system[\"password\"]:\n try:\n rc, storage_system = self._request(\"%s/v2/storage-systems/1/passwords\" % url, method=\"POST\", url_username=\"admin\",\n headers=self.DEFAULT_HEADERS, url_password=\"\", validate_certs=False,\n data=json.dumps({\"currentAdminPassword\": \"\", \"adminPassword\": True,\n \"newPassword\": system[\"password\"]}))\n\n except Exception as error:\n system[\"failed\"] = True\n self.module.warn(\"Failed to set storage system password. Array [%s].\" % system[\"ssid\"])\n break\n\n elif rc == 401: # unauthorized\n system[\"password_set\"] = True\n break\n except Exception as error:\n pass\n else:\n self.module.warn(\"Failed to retrieve array password state. Array [%s].\" % system[\"ssid\"])\n system[\"failed\"] = True",
"def lockdown_procedure():\n\tprint(\"----------\")\n\tprint_section_header(\"LOCKDOWN\", Fore.BLUE)\n\tprint_confirmation(\"Set secure configuration without user interaction.\")\n\n\t# Get sudo priv\n\tsp.run(\"sudo -E -v\", shell=True, stdout=sp.PIPE)\n\n\t####\n\t# FIREWALL\n\t####\n\n\tsp.run(['sudo', 'launchctl', 'load', '/System/Library/LaunchDaemons/com.apple.alf.agent.plist'], stdout=sp.PIPE)\n\tsp.run(['sudo', 'launchctl', 'load', '/System/Library/LaunchAgents/com.apple.alf.useragent.plist'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setglobalstate', 'on'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setloggingmode', 'on'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setstealthmode', 'on'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsigned', 'off'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsignedapp', 'off'], stdout=sp.PIPE)\n\tsp.run(['sudo', 'pkill', '-HUP', 'socketfilterfw'], stdout=sp.PIPE)\n\n\t####\n\t# SYSTEM PROTECTION\n\t####\n\n\tsp.run('sudo spctl --master-enable', shell=True, stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsigned', 'off'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsignedapp', 'off'], stdout=sp.PIPE)\n\tsp.run(['sudo defaults write /Library/Preferences/SystemConfiguration/com.apple.captive.control Active -bool false'], stdout=sp.PIPE)\n\n\t####\n\t# METADATA STORAGE\n\t####\n\n\tsp.run(['rm', '-rfv', '\"~/Library/LanguageModeling/*\"', '\"~/Library/Spelling/*\"', '\"~/Library/Suggestions/*\"'])\n\tsp.run(['rm', '-rfv', '\"~/Library/Application Support/Quick Look/*\"'], stdout=sp.PIPE)\n\tsp.run([':>~/Library/Preferences/com.apple.LaunchServices.QuarantineEventsV2'], shell=True, stdout=sp.PIPE)\n\n\t####\n\t# USER SAFETY\n\t####\n\n\tsp.run(['defaults', 'write', 'com.apple.screensaver', 'askForPassword', '-int', '1'], stdout=sp.PIPE)\n\tsp.run(['defaults', 'write', 'com.apple.screensaver', 'askForPasswordDelay', '-int', '0'], stdout=sp.PIPE)\n\tsp.run(['defaults', 'write', 'NSGlobalDomain', 'AppleShowAllExtensions', '-bool', 'true'], stdout=sp.PIPE)\n\tsp.run(['defaults', 'write', 'NSGlobalDomain', 'NSDocumentSaveNewDocumentsToCloud', '-bool', 'false'], stdout=sp.PIPE)\n\tsp.run(['defaults', 'write', 'com.apple.finder', 'AppleShowAllFiles', '-boolean', 'true'], shell=True, stdout=sp.PIPE)\n\tsp.run(['killAll', 'Finder'], stdout=sp.PIPE)\n\n\t####\n\t# RESTART\n\t####\n\n\tfinal_configuration()",
"def get_secure_boot_mode(self):\n system = self._get_host_details()\n\n if ('links' not in system['Oem']['Hp'] or\n 'SecureBoot' not in system['Oem']['Hp']['links']):\n msg = ('\"SecureBoot\" resource or feature is not supported'\n ' on this system')\n raise exception.IloCommandNotSupportedError(msg)\n\n secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']\n\n # get the Secure Boot object\n status, headers, secure_boot_settings = self._rest_get(secure_boot_uri)\n\n if status >= 300:\n msg = self._get_extended_error(secure_boot_settings)\n raise exception.IloError(msg)\n\n return secure_boot_settings['SecureBootCurrentState']",
"def safe_boot_disabled(self, safe_boot_disabled):\n\n self._safe_boot_disabled = safe_boot_disabled",
"def boot(self, boot):\n\n self._boot = boot",
"def test_patch_bios_boot_mode(self):\n pass",
"def reboot_fpga(self):\n log.info(\"Booting FPGA from SPI prom\")\n self.set(\"FPGA_CTRL\", \"boot_fpga\", 1);",
"def __secure_boot(efivars_dir):\n enabled = False\n sboot = glob.glob(os.path.join(efivars_dir, \"SecureBoot-*/data\"))\n if len(sboot) == 1:\n # The minion is usually running as a privileged user, but is\n # not the case for the master. Seems that the master can also\n # pick the grains, and this file can only be readed by \"root\"\n try:\n with salt.utils.files.fopen(sboot[0], \"rb\") as fd:\n enabled = fd.read()[-1:] == b\"\\x01\"\n except PermissionError:\n pass\n return enabled",
"def libc_prctl_set_securebits():\n # straight from man capabilities(7):\n # \"An application can use the following call to lock itself, and all of\n # its descendants, into an environment where the only way of gaining\n # capabilities is by executing a program with associated file capabilities\"\n _call_c_style(\n libc,\n \"prctl\",\n PR_SET_SECUREBITS,\n (\n SECBIT_KEEP_CAPS_LOCKED\n | SECBIT_NO_SETUID_FIXUP\n | SECBIT_NO_SETUID_FIXUP_LOCKED\n | SECBIT_NOROOT\n | SECBIT_NOROOT_LOCKED\n ),\n 0,\n 0,\n 0,\n )",
"def init_settings(self):\n self.app.config.setdefault('SIMPLE_DOMAINS', [])\n self.app.config.setdefault('AWS_ACCESS_KEY_ID', environ.get('AWS_ACCESS_KEY_ID'))\n self.app.config.setdefault('AWS_SECRET_ACCESS_KEY', environ.get('AWS_SECRET_ACCESS_KEY'))\n self.app.config.setdefault('AWS_REGION', environ.get('AWS_REGION', self.DEFAULT_REGION))",
"def set_pending_boot_mode(self, boot_mode):\n boot_mode = boot_mode.lower()\n if boot_mode not in ['uefi', 'legacy']:\n msg = 'Invalid Boot mode specified'\n raise exception.IloInvalidInputError(msg)\n\n boot_properties = {'BootMode': boot_mode}\n\n if boot_mode == 'legacy':\n boot_properties['BootMode'] = 'LegacyBios'\n else:\n # If Boot Mode is 'Uefi' set the UEFIOptimizedBoot first.\n boot_properties['UefiOptimizedBoot'] = \"Enabled\"\n\n # Change the Boot Mode\n self._change_bios_setting(boot_properties)",
"def boot_config():\n # quick check to grab a config file from /boot partition.\n # this function helps users who cannot SSH/access the Pi,\n # but can access the microSD card\n if os.path.exists(BOOT_CONFIG_PATH):\n print(\"Configuration loaded from /boot directory.\")\n with open(BOOT_CONFIG_PATH) as boot_file:\n with open(CONFIG_FILE_PATH, 'w+') as config_file:\n for line in boot_file:\n config_file.write(line)",
"def boot(self):\n\n pass",
"def set_bootloader_mode(self, mode):\n self.check_validity()\n\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 9, 'B')",
"def setrestricted(miner: Miner, login, allowsetting):\n commands = get_changeconfigcommands(getminerfilename(miner), 'api-allow', allowsetting)\n sendcommands_and_restart(miner, login, commands)",
"def _editSysconfig():\n dbUrl = \"jdbc:postgresql://\" + getDbHostName() + \":\" + getDbPort() + \"/\" + basedefs.DB_NAME\n if \"DB_SECURE_CONNECTION\" in controller.CONF.keys() and controller.CONF[\"DB_SECURE_CONNECTION\"] == \"yes\":\n dbUrl = dbUrl + \"?ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory\"\n\n proxyEnabled = utils.compareStrIgnoreCase(controller.CONF[\"OVERRIDE_HTTPD_CONFIG\"], \"yes\")\n utils.editEngineSysconfig(proxyEnabled=proxyEnabled,\n dbUrl=dbUrl,\n dbUser=utils.getDbUser(),\n fqdn=controller.CONF[\"HOST_FQDN\"],\n http=controller.CONF[\"HTTP_PORT\"],\n https=controller.CONF[\"HTTPS_PORT\"],\n javaHome=controller.CONF[\"JAVA_HOME\"])",
"def __fill_boot_settings_fields(profile, profile_elements):\n result = True\n selenium2lib = ui_lib.get_s2l()\n # Validate the profile in XML file\n __validate_boot_settings_properties_in_xml_file(profile)\n # If XML is fine, go ahead filling Boot Setting UI fields\n result &= ui_lib.wait_for_element_and_click(profile_elements.ID_COMBO_MENU_VIEW)\n result &= ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_LINK_BOOTSETTINGS,\n PerfConstants.DEFAULT_SYNC_TIME)\n ui_lib.wait_for_element_visible(profile_elements.ID_CHKBOX_MANAGE_BOOT)\n if profile.has_property(XML_MANAGE_BOOT_MODE_ATTRIBUTE) and profile.manageBoot == \"false\":\n result &= ui_lib.wait_for_checkbox_and_unselect(profile_elements.ID_CHKBOX_MANAGE_BOOT)\n elif profile.has_property(XML_BOOT_MODE_ATTRIBUTE):\n boot_mode_option = profile.bootMode\n logger._log_to_console_and_log_file(\" --> Selecting Boot Mode..\")\n __select_value_from_a_profile_combo_box(profile_elements.ID_COMBO_PROFILE_BOOT_MODE, profile_elements.ID_COMBO_PROFILE_BOOT_MODE_LIST % boot_mode_option)\n if boot_mode_option == CONSTANT_UEFI or boot_mode_option == CONSTANT_UEFI_OPTIMIZED:\n if profile.has_property(XML_BOOT_POLICY_ATTRIBUTE):\n boot_policy_option = profile.bootPolicy\n result &= __select_value_from_a_profile_combo_box(profile_elements.ID_COMBO_PROFILE_PXE_BOOT_POLICY, profile_elements.ID_COMBO_PROFILE_PXE_BOOT_POLICY_LIST % boot_policy_option)\n result &= ui_lib.wait_for_element_visible(profile_elements.ID_CHKBOX_PROFILE_BOOT_ORDER)\n if profile.has_property(XML_MANAGE_BOOT_ORDER_ATTRIBUTE) and profile.manageBootOrder == \"false\":\n selenium2lib.unselect_checkbox(profile_elements.ID_CHKBOX_PROFILE_BOOT_ORDER)\n else:\n selenium2lib.select_checkbox(profile_elements.ID_CHKBOX_PROFILE_BOOT_ORDER)\n # Set primary boot device\n if profile.has_property(XML_PRIMARY_BOOT_DEVICE):\n primary_boot_device = profile.primaryBootDevice\n result &= __select_value_from_a_profile_combo_box(profile_elements.ID_COMBO_PROFILE_PRIMARY_BOOT_DEVICE, profile_elements.ID_COMBO_PROFILE_PRIMARY_BOOT_DEVICE_LIST % primary_boot_device)\n elif boot_mode_option == CONSTANT_LEGACY_BIOS:\n __fill_boot_order(profile, profile_elements)\n else:\n __fill_boot_order(profile, profile_elements)\n return result",
"def get_secure_boot_state(self, task):\n return irmc_common.get_secure_boot_mode(task.node)",
"def stop_salt():\n with fabric_settings(warn_only=True):\n if env.host == env.master_server.public_ip:\n sudo(\"systemctl stop salt-master\")\n sudo(\"systemctl stop salt-minion\")",
"def set_one_time_boot(self, device, mac=None):\n self._update_persistent_boot([device], persistent=False, mac=mac)",
"def SetBootloaderEnv(script, name, val):\n script.AppendExtra('set_bootloader_env(\"%s\", \"%s\");' % (name, val))",
"def set_settings_devices(self):\n self.set_thermostat, self.set_humidifier, self.set_sprinklers, self.set_ventilation = self.settings[3:]",
"def reconfigure_keystone_to_use_ldap(self):\n self.show_step(1, initialize=True)\n self.env.revert_snapshot(\"basic_env_for_reconfiguration\")\n cluster_id = self.fuel_web.get_last_created_cluster()\n devops_pr_controller = self.fuel_web.get_nailgun_primary_node(\n self.env.d_env.nodes().slaves[0])\n\n pr_controller = self.fuel_web.get_nailgun_node_by_devops_node(\n devops_pr_controller)\n\n self.show_step(2)\n config = utils.get_config_template('keystone_ldap')\n structured_config = get_structured_config_dict(config)\n self.fuel_web.client.upload_configuration(\n config,\n cluster_id,\n role=\"controller\")\n\n self.show_step(3)\n task = self.fuel_web.client.apply_configuration(cluster_id,\n role=\"controller\")\n\n self.show_step(4)\n self.fuel_web.task_wait(task, timeout=3600, interval=30)\n\n self.show_step(5)\n self.check_config_on_remote([pr_controller], structured_config)\n logger.info(\"New configuration was applied\")\n\n self.env.make_snapshot(\"reconfigure_keystone_to_use_ldap\")",
"def set_boot_device(self, device, persistent=False):\n\n operation = \"set_boot_device\"\n try:\n self.sp_manager.create_boot_policy()\n self.sp_manager.set_boot_device(device)\n\n except UcsException as ex:\n raise exception.UcsOperationError(operation=operation, error=ex)",
"def _setBootable(self, bootable):\n if self.partedPartition:\n if arch.isS390():\n return\n if self.flagAvailable(parted.PARTITION_BOOT):\n if bootable:\n self.setFlag(parted.PARTITION_BOOT)\n else:\n self.unsetFlag(parted.PARTITION_BOOT)\n else:\n raise errors.DeviceError(\"boot flag not available for this partition\", self.name)\n\n self._bootable = bootable\n else:\n self.req_bootable = bootable",
"def initial_config(self, server_id):\n\n if server_id not in self.settings:\n self.settings[server_id] = {'inactive': True,\n 'output': [],\n 'cleanup': False,\n 'usercache': [],\n 'multiout': False\n }\n self.save_json()",
"def set_boot_device(self, task, device, persistent=False):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified.\") % device)\n\n uefi_mode = (\n boot_mode_utils.get_boot_mode(task.node) == 'uefi')\n\n # disable 60 secs timer\n timeout_disable = \"0x00 0x08 0x03 0x08\"\n ipmitool.send_raw(task, timeout_disable)\n\n # note(naohirot):\n # Set System Boot Options : ipmi cmd '0x08', bootparam '0x05'\n #\n # $ ipmitool raw 0x00 0x08 0x05 data1 data2 0x00 0x00 0x00\n #\n # data1 : '0xe0' persistent + uefi\n # '0xc0' persistent + bios\n # '0xa0' next only + uefi\n # '0x80' next only + bios\n # data2 : boot device defined in the dict _BOOTPARAM5_DATA2\n\n bootparam5 = '0x00 0x08 0x05 %s %s 0x00 0x00 0x00'\n if persistent:\n data1 = '0xe0' if uefi_mode else '0xc0'\n else:\n data1 = '0xa0' if uefi_mode else '0x80'\n data2 = _BOOTPARAM5_DATA2[device]\n\n cmd8 = bootparam5 % (data1, data2)\n ipmitool.send_raw(task, cmd8)\n else:\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified. \"\n \"Current iRMC firmware condition doesn't support IPMI \"\n \"but Redfish.\") % device)\n super(ipmitool.IPMIManagement, self).set_boot_device(\n task, device, persistent)",
"def boot(self):\n\t\tmesslen, received = self.socket.send('bootm\\r', 25)\t\t\n\t\treturn None",
"def setup_salt():\n server = [s for s in env.bootmachine_servers if s.public_ip == env.host][0]\n\n if env.host == env.master_server.public_ip:\n run(\"touch /etc/salt/master\")\n append(\"/etc/salt/master\", \"file_roots:\\n base:\\n - {0}\".format(\n settings.REMOTE_STATES_DIR))\n append(\"/etc/salt/master\", \"pillar_roots:\\n base:\\n - {0}\".format(\n settings.REMOTE_PILLARS_DIR))\n run(\"systemctl enable salt-master\")\n run(\"touch /etc/salt/minion\")\n append(\"/etc/salt/minion\", \"master: {0}\".format(env.master_server.private_ip))\n append(\"/etc/salt/minion\", \"id: {0}\".format(server.name))\n append(\"/etc/salt/minion\", \"grains:\\n roles:\")\n for role in server.roles:\n append(\"/etc/salt/minion\", \" - {0}\".format(role))\n run(\"systemctl enable salt-minion\")",
"def set_boot_mode(self, task, mode):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='set_boot_mode')",
"def set_config(self): # called from button_set object \n self.settings['lights_on'] = self.lights_on.get()\n self.settings['lights_off'] = self.lights_off.get()\n self.settings['ambient_min'] = self.ambient_min.get()\n self.settings['soil_1'] = self.smc1.get()\n self.settings['soil_2'] = self.smc2.get()\n self.settings['soil_3'] = self.smc3.get()\n self.settings['soil_4'] = self.smc4.get()\n self.settings['overhead_level'] = self.overhead_level.get()\n\n # Save settings to config file in case of reboot / power-loss\n print \"UPDATING SETTINGS FILE\"\n with open(self.settings_path, 'w') as jsonfile:\n jsonfile.write(json.dumps(self.settings, indent=4))\n self.active_changes = True # (flag) changes are active!",
"def make_BootSettings(order, manageBoot=False):\n return {'manageBoot': manageBoot,\n 'order': order\n }",
"def make_BootModeSetting(manageMode, mode, pxeBootPolicy):\n return {'manageMode': manageMode,\n 'mode': mode,\n 'pxeBootPolicy': pxeBootPolicy\n }",
"def apply_secrets():\n for name, value in Secrets.__dict__.items():\n if name[0] != '_':\n os.environ[name] = value",
"def add_settings_early(self):\n\n # config settings\n config = {\n # some generic settings for every site, to point to location of some stuff\n mconst.DEF_SETTINGNAME_pkgdirimps_sitempacks: [pkgdirimp_sitempacks],\n mconst.DEF_SETTINGNAME_controllerroot: pkgdirimp_controllers,\n mconst.DEF_SETTINGNAME_sitefilepath: misc.calc_modulefiledirpath(__file__),\n # should we also load mewlo site installed setuptools plugins\n mconst.DEF_SETTINGNAME_flag_importsetuptoolspacks: True,\n mconst.DEF_SETTINGNAME_replaceshadowpath: '${sitefilepath}/replaceshadow',\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_config, config)\n\n # config settings\n config = {\n # Name of site\n mconst.DEF_SETTINGNAME_sitename: 'Mewlo',\n # Specify where this site serves from\n # these siteurls should not end in / so if you are serving a site at root just use relative of '' and absolute of 'http://sitename.com'\n mconst.DEF_SETTINGNAME_siteurl_relative: '',\n mconst.DEF_SETTINGNAME_siteurl_absolute: 'http://127.0.0.1:8080',\n #mconst.DEF_SETTINGNAME_siteurl_relative: '/public/publicity',\n #mconst.DEF_SETTINGNAME_siteurl_absolute: 'http://127.0.0.1:8080/public/publicity',\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_config, config)\n\n # config settings\n config = {\n # online status information\n mconst.DEF_SETTINGNAME_isenabled: True,\n mconst.DEF_SETTINGNAME_isonline: True,\n mconst.DEF_SETTINGNAME_offline_mode: 'maintenance',\n mconst.DEF_SETTINGNAME_offline_message: 'We are down for leap-year maintenance; we will be back soon.',\n mconst.DEF_SETTINGNAME_offline_allowadmin: False,\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_config, config)\n\n\n\n # extension pack config -- we need to explicitly enable plugins\n packconfig = {\n 'mouser.mewlotestplug' : {\n 'isenabled': False,\n },\n 'mouser.testpack' : {\n 'isenabled': False,\n },\n 'mewlo.siteaddon.account' : {\n 'isenabled': True,\n },\n 'mewlo.siteaddon.group' : {\n 'isenabled': True,\n },\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_packs, packconfig)\n\n\n # database config\n databaseconfig = {\n 'settings' : {\n 'sqlalchemy_loglevel' : logging.NOTSET,\n #'sqlalchemy_loglevel' : logging.INFO,\n },\n 'default' : {\n 'url' : 'sqlite:///${dbfilepath}/mewlo_testsite1.sqlite',\n #'tablename_prefix': 'mewlo_',\n 'flag_echologging' : False,\n },\n 'mysql_unused' : {\n # Sample configuration for mysql\n 'url' : 'mysql://mewlo_user:mewlo_pass@localhost:3306/mewlo_testsite1',\n 'tablename_prefix': 'mewlo_'\n },\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_database, databaseconfig)\n self.settings.listappend_settings_key(mconst.DEF_SETTINGSEC_make_dirs, '${dbfilepath}')\n\n # email config settings\n mailconfig = {\n # online status information\n 'smtp_host': self.get_configval('mail_smtp_host'),\n 'smtp_login': self.get_configval('mail_smtp_login'),\n 'smtp_port': self.get_configval('mail_smtp_port'),\n 'smtp_mode': self.get_configval('mail_smtp_mode'),\n 'smtp_password': self.get_configval('mail_smtp_password'),\n 'mail_from' : self.get_configval('mail_from'),\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_mail, mailconfig)\n\n\n # account siteaddon settings\n siteaddonconfig = {\n # online status information\n 'registration_mode': 'immediate',\n 'flag_require_email_verified_before_login': False,\n }\n self.settings.merge_settings_key('siteaddon_account', siteaddonconfig)\n\n\n\n # ATTN: UNFINISHED\n # asset mounts config\n if (False):\n assetmountconfig = {\n 'default' : {\n # an internal assetmount just needs a url route\n 'type': 'internal',\n 'routeid': 'static_files',\n },\n 'external' : {\n 'type': 'external',\n 'filepath': '${mewlofilepath}/public_assets',\n 'urlpath': 'http://127.0.0.1/mewlo/public_assets',\n },\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_asset_mounts, assetmountconfig)\n\n\n\n\n\n #print \"TESTING CONFIG1:\"\n #self.run_configfunc('sayhello',1,2,3)\n #print \"TESTING CONFIG2:\"\n #self.run_allconfigfuncs('sayhello',1,2,3)",
"def configure_service_password_encryption(device):\n\n try:\n device.configure(\"service password-encryption\")\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure service password encryption\"\n )",
"def reconfiguration_scalability(self):\n\n self.check_run('reconfiguration_scalability')\n self.show_step(1, initialize=True)\n self.env.revert_snapshot(\"reconfigure_nova_ephemeral_disk\")\n\n self.show_step(2)\n cluster_id = self.fuel_web.get_last_created_cluster()\n config = utils.get_config_template('nova_disk')\n structured_config_nova = get_structured_config_dict(config)\n config = utils.get_config_template('keystone')\n structured_config_keystone = get_structured_config_dict(config)\n self.fuel_web.client.upload_configuration(config,\n cluster_id,\n role='controller')\n controllers = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, ['controller'])\n\n self.show_step(3)\n task = self.fuel_web.client.apply_configuration(cluster_id,\n role='controller')\n self.fuel_web.assert_task_success(task, timeout=900, interval=5)\n\n self.show_step(4)\n self.check_config_on_remote(controllers, structured_config_keystone)\n\n self.show_step(5)\n os_conn = os_actions.OpenStackActions(\n self.fuel_web.get_public_vip(cluster_id))\n time_expiration = config[\n 'keystone_config']['token/expiration']['value']\n self.check_token_expiration(os_conn, time_expiration)\n\n self.show_step(6)\n bs_nodes = [x for x in self.env.d_env.get_nodes()\n if x.name == 'slave-05' or x.name == 'slave-06']\n self.env.bootstrap_nodes(bs_nodes)\n self.fuel_web.update_nodes(\n cluster_id,\n {'slave-05': ['compute', 'cinder']})\n self.fuel_web.update_nodes(\n cluster_id,\n {'slave-06': ['controller']})\n\n self.show_step(7)\n self.fuel_web.verify_network(cluster_id)\n\n self.show_step(8)\n self.fuel_web.deploy_cluster_wait(cluster_id)\n\n self.show_step(9)\n self.fuel_web.run_ostf(cluster_id=cluster_id)\n\n self.show_step(10)\n controllers = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, ['controller'])\n computes = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, ['compute'])\n target_controller = [x for x in controllers\n if 'slave-06' in x['name']]\n target_compute = [x for x in computes\n if 'slave-05' in x['name']]\n self.check_config_on_remote(target_controller,\n structured_config_keystone)\n\n self.show_step(11)\n self.check_config_on_remote(target_compute, structured_config_nova)\n\n self.show_step(12)\n self.show_step(13)\n self.show_step(14)\n self.show_step(15)\n self.show_step(16)\n\n os_conn = os_actions.OpenStackActions(\n self.fuel_web.get_public_vip(cluster_id))\n hypervisor_name = target_compute[0]['fqdn']\n self.check_nova_ephemeral_disk(os_conn, cluster_id,\n hypervisor_name=hypervisor_name)\n\n self.show_step(17)\n self.check_token_expiration(os_conn, time_expiration)\n\n self.env.make_snapshot(\"reconfiguration_scalability\", is_make=True)",
"def reboot(self, client, sec):\r\n result = client.reboot(sec)\r\n return result",
"async def enable_protection(self) -> None:\n await self._request(\n \"dns_config\", method=\"POST\", json_data={\"protection_enabled\": True},\n )",
"def _change_bios_setting(self, properties):\n keys = properties.keys()\n # Check if the BIOS resource/property exists.\n headers, bios_uri, settings = self._check_bios_resource(keys)\n if not self._operation_allowed(headers, 'PATCH'):\n headers, bios_uri, _ = self._get_bios_settings_resource(settings)\n self._validate_if_patch_supported(headers, bios_uri)\n\n request_headers = self._get_bios_hash_password(self.bios_password)\n status, headers, response = self._rest_patch(bios_uri, request_headers,\n properties)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def upgrade_security_controlpanel_settings(context):\n def _get_enable_self_reg():\n app_perms = portal.rolesOfPermission(permission='Add portal member')\n for appperm in app_perms:\n if appperm['name'] == 'Anonymous' and \\\n appperm['selected'] == 'SELECTED':\n return True\n return False\n\n # get the old site properties\n portal_url = getToolByName(context, 'portal_url')\n portal = portal_url.getPortalObject()\n portal_properties = getToolByName(portal, \"portal_properties\")\n site_properties = portal_properties.site_properties\n\n # get the new registry\n registry = getUtility(IRegistry)\n\n # XXX: Somehow this code is executed for old migration steps as well\n # ( < Plone 4 ) and breaks because there is no registry. Looking up the\n # registry interfaces with 'check=False' will not work, because it will\n # return a settings object and then fail when we try to access the\n # attributes.\n try:\n settings = registry.forInterface(\n ISecuritySchema,\n prefix='plone',\n )\n except KeyError:\n settings = False\n if settings:\n settings.enable_self_reg = _get_enable_self_reg()\n validate_email = portal.getProperty('validate_email', True)\n if validate_email:\n settings.enable_user_pwd_choice = False\n else:\n settings.enable_user_pwd_choice = True\n pmembership = getToolByName(portal, 'portal_membership')\n settings.enable_user_folders = pmembership.getMemberareaCreationFlag()\n settings.allow_anon_views_about = site_properties.getProperty(\n 'allowAnonymousViewAbout', False)\n settings.use_email_as_login = site_properties.getProperty(\n 'use_email_as_login', False)\n settings.use_uuid_as_userid = site_properties.getProperty(\n 'use_uuid_as_userid', False)",
"def update_config():\n \n dburl = dbconn.DbURL()\n conn = dbconn.connect(dburl, utility=True)\n \n logger.info('Updating catalog...')\n sql = \"SELECT gp_activate_standby()\"\n dbconn.execSQL(conn, sql)\n\n conn.commit()\n conn.close()\n\n logger.info('Database catalog updated successful')",
"def bootstrap_setting(value):\n return get_bootstrap_setting(value)",
"def _update_persistent_boot(self, device_type=[], persistent=False,\n mac=None):\n tenure = 'Once'\n new_device = device_type[0]\n # If it is a standard device, we need to convert in RIS convention\n if device_type[0].upper() in DEVICE_COMMON_TO_RIS:\n new_device = DEVICE_COMMON_TO_RIS[device_type[0].upper()]\n\n if persistent:\n tenure = 'Continuous'\n\n systems_uri = \"/rest/v1/Systems/1\"\n # Need to set this option first if device is 'UefiTarget'\n if new_device is 'UefiTarget':\n if not mac:\n msg = ('Mac is needed for iscsi uefi boot')\n raise exception.IloInvalidInputError(msg)\n\n headers, bios_uri, bios_settings = self._check_bios_resource()\n # Get the Boot resource and Mappings resource.\n boot_settings = self._get_bios_boot_resource(bios_settings)\n StructuredBootString = None\n\n for boot_setting in boot_settings['BootSources']:\n if(mac.upper() in boot_setting['UEFIDevicePath'] and\n 'iSCSI' in boot_setting['UEFIDevicePath']):\n StructuredBootString = boot_setting['StructuredBootString']\n break\n if not StructuredBootString:\n msg = ('MAC provided is Invalid \"%s\"' % mac)\n raise exception.IloInvalidInputError(msg)\n\n new_boot_settings = {}\n new_boot_settings['Boot'] = {'UefiTargetBootSourceOverride':\n StructuredBootString}\n status, headers, response = self._rest_patch(systems_uri, None,\n new_boot_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)\n\n new_boot_settings = {}\n new_boot_settings['Boot'] = {'BootSourceOverrideEnabled': tenure,\n 'BootSourceOverrideTarget': new_device}\n status, headers, response = self._rest_patch(systems_uri, None,\n new_boot_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def config():\n sudo(\n r\"sed -i '/#password=/c\\password=abcdefghijklmnopq' /etc/minv/minv.conf\"\n )\n sudo(\n r\"sed -i '/log_level = INFO/c\\log_level = DEBUG' /etc/minv/minv.conf\"\n )",
"def boot(request, server_ids, server_id):\n try:\n if int(server_id) not in server_ids:\n raise Exception(\"Forbidden: specified Server does not belong to specified Service.\")\n\n server = Server.objects.get(pk=server_id) \n\n pysph = Vsphere(settings.VMWARE[\"address\"], settings.VMWARE[\"username\"], settings.VMWARE[\"password\"], server.sid)\n result = pysph.boot()\n\n if result:\n ActionLogger().log(request.user, \"modified\", \"Booted\", \"vServer %s\" % server.sid)\n return format_ajax_response(True, \"Server booted successfully.\")\n else:\n raise Exception(\"Pysphere's boot() returned False.\")\n except Exception as ex:\n logger.error(\"Failed to boot: %s\" % ex)\n return format_ajax_response(False, \"There was a error booting the server.\")",
"async def pixy_set_servos(self, s0, s1):\n data = [PrivateConstants.PIXY_SET_SERVOS, s0 & 0x7f, (s0 >> 7) & 0x7f,\n s1 & 0x7f, (s1 >> 7) & 0x7f]\n await self._send_sysex(PrivateConstants.PIXY_CONFIG, data)",
"async def pixy_set_servos(self, s0, s1):\n data = [PrivateConstants.PIXY_SET_SERVOS, s0 & 0x7f, (s0 >> 7) & 0x7f, s1 & 0x7f, (s1 >> 7) & 0x7f]\n await self._send_sysex(PrivateConstants.PIXY_CONFIG, data)",
"def set_boot_order(profile_obj):\n status = True\n logger._log_to_console_and_log_file(\"\")\n logger._log_to_console_and_log_file(\"### Testing the 'Boot Settings' session ###\")\n logger._log_to_console_and_log_file(\"- Select the 'Legacy BIOS' mode\")\n createprofile_elements = ProfileContainer(ProfileContainerType.ADD)\n __select_value_from_a_profile_combo_box(createprofile_elements.ID_COMBO_PROFILE_BOOT_MODE, createprofile_elements.ID_COMBO_PROFILE_BOOT_MODE_LIST % \"Legacy BIOS\")\n # Set invalid values\n logger._log_to_console_and_log_file(\"Testing using invalid values\")\n for profile in profile_obj:\n items = [[\"CD\", profile.cd], [\"USB\", profile.usb], [\"HardDisk\", profile.harddisk]]\n for data in items:\n ui_lib.wait_for_element_and_input_text(\"name=%s\" % data[0], data[1])\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_CREATE_SERVER_PROFILE_FORM)\n if data[0] == \"HardDisk\":\n data[0] = \"Hard Disk\"\n if ui_lib.wait_for_element_text(FusionServerProfilesPage.ID_BOOT_ORDER_POSITION % data[0], data[1], timeout=1):\n logger._log_to_console_and_log_file(\"- \" + \"'\" + data[0] + \"'\" + \" field was not cleared to the default value and persisted as '\" + str(data[1]) + \"'\")\n status = False\n else:\n logger._log_to_console_and_log_file(\"- \" + \"'\" + data[0] + \"'\" + \" field was correctly cleared to the default value\")\n return status",
"def settings_app_password(self, settings_app_password):\n\n self._settings_app_password = settings_app_password",
"def bootstrap():\n validate_configurator_version()\n\n # put new mkinitcpio.conf in place\n run(\"mv /etc/mkinitcpio.conf.pacnew /etc/mkinitcpio.conf\")\n sed(\"/etc/mkinitcpio.conf\",\n 'MODULES=\"\"',\n 'MODULES=\"xen-blkfront xen-fbfront xen-kbdfront xen-netfront xen-pcifront xenbus_probe_frontend xenfs\"') # nopep8\n sed(\"/etc/mkinitcpio.conf\",\n 'HOOKS=\"base udev autodetect modconf block filesystems keyboard fsck',\n 'HOOKS=\"base udev block filesystems shutdown autodetect\"')\n\n # upgrade pacakges\n run(\"pacman --noconfirm -Syu\")\n\n # put new pacman.conf in place\n run(\"mv /etc/pacman.conf.pacnew /etc/pacman.conf\")\n\n # install essential packages\n run(\"pacman --noconfirm -S base-devel\")\n run(\"pacman --noconfirm -S curl git rsync\")\n\n # create a user, named 'aur', to safely install AUR packages under fakeroot\n # uid and gid values auto increment from 1000\n # to prevent conficts set the 'aur' user's gid and uid to 902\n run(\"groupadd -g 902 aur && useradd -m -u 902 -g 902 -G wheel aur\")\n\n # allow users in the wheel group to sudo without a password\n uncomment(\"/etc/sudoers\", \"wheel.*NOPASSWD\")\n\n # install yaourt and upgrade non-pacman rackspace installed packages\n sudo(\"rm -rf /home/aur/.builds && mkdir /home/aur/.builds/\", user=\"aur\")\n with cd(\"/home/aur/.builds/\"):\n sudo(\"bash <(curl aur.sh) -si --noconfirm package-query yaourt\", user=\"aur\")\n sudo(\"yaourt --noconfirm -S xe-guest-utilities\", user=\"aur\")\n\n # allow fabric to sftp with contrib.files.put\n # http://stackoverflow.com/questions/10221839/cant-use-fabric-put-is-there-any-server-configuration-needed # nopep8\n # change before reboot because then the sshd config will be reloaded\n # sed(\"/etc/ssh/sshd_config\", \"Subsystem sftp /usr/lib/openssh/sftp-server\",\n # \"Subsystem sftp internal-sftp\")\n\n # systemd\n sed(\"/boot/grub/menu.lst\",\n \"kernel /boot/vmlinuz-linux root=/dev/xvda1 ro console=hvc0\",\n \"kernel /boot/vmlinuz-linux root=/dev/xvda1 ro console=hvc0 init=/usr/lib/systemd/systemd\")\n reboot()\n if not contains(\"/proc/1/comm\", \"systemd\"):\n abort(\"systemd is not installed properly\")\n server = [s for s in env.bootmachine_servers if s.public_ip == env.host][0]\n run(\"hostnamectl set-hostname {0}\".format(server.name))\n run(\"mv /etc/locale.gen.pacnew /etc/locale.gen.conf\")\n uncomment(\"/etc/locale.gen\", \"en_US.UTF-8 UTF-8\")\n uncomment(\"/etc/locale.gen\", \"en_US ISO-8859-1\")\n run(\"locale-gen\")\n run(\"localectl set-locale LANG='en_US.utf8'\")\n run(\"timedatectl set-timezone US/Central\")",
"def bootstrap():\n create_virtualenv()\n install_init_script()\n if not files.exists(env.code_root):\n clone_all()\n #deploy_from_local()\n pull_and_checkout_all()\n update_requirements()\n print '\\nNow add your database password to localsettings.py and run syncdb'",
"def sdss_env(request):\n m = request.getfixturevalue(\"monkeypatch\")\n for p in ('PHOTO_CALIB', 'PHOTO_DATA', 'BOSS_PHOTOOBJ', 'PHOTO_REDUX',\n 'PHOTO_RESOLVE', 'PHOTO_SKY', 'PHOTO_SWEEP'):\n m.setenv(p, '/' + p)\n return m",
"def server_activate(self):\n\t\tpass",
"def _configure_ipsec_secrets(self, ipsec_confs):\n secrets_tpl = '../config/tpl/ipsec/ipsec.secrets'\n secret_confs = []\n\n for name, conf in ipsec_confs.items():\n secret_conf = {\n 'right_public_ip': conf['right_public_ip'],\n 'psk': env.get('ipsec_psk_%s' % name),\n }\n secret_confs.append(secret_conf)\n\n # Configure the /etc/ipsec.d/<name>.conf file with passwords\n with hide(*fab_output_hides):\n return upload_template_changed(\n secrets_tpl,\n '/etc/ipsec.secrets',\n context={'confs': secret_confs},\n use_sudo=True,\n mode=0600,\n use_jinja=True\n )",
"def set_enable(self, pwd, type='secret'):\n\n if type == 'secret':\n cmd = 'enable secret %s' %(pwd)\n else:\n cmd = 'enable password %s' %(pwd)\n\n output = self.iosapi.bcp_send_config_command(self.iosapi.netmiko_session, cmd)\n self.iosapi.bcp_log(\"info\", \"(%s) set_enable : Attempting to set enable\" %(__name__))\n return(output)",
"def set_bios_bootmode_uefi(ip, login_account, login_password, system_id):\n result = {}\n login_host = \"https://\" + ip\n try:\n # Connect using the BMC address, account name, and password\n # Create a REDFISH object\n REDFISH_OBJ = redfish.redfish_client(base_url=login_host, username=login_account, timeout=utils.g_timeout,\n password=login_password, default_prefix='/redfish/v1', cafile=utils.g_CAFILE)\n # Login into the server and create a session\n REDFISH_OBJ.login(auth=utils.g_AUTH)\n except:\n traceback.print_exc()\n result = {'ret': False, 'msg': \"Please check the username, password, IP is correct\"}\n return result\n\n # GET the ComputerSystem resource\n system = utils.get_system_url(\"/redfish/v1\", system_id, REDFISH_OBJ)\n if not system:\n result = {'ret': False, 'msg': \"This system id is not exist or system member is None\"}\n REDFISH_OBJ.logout()\n return result\n for i in range(len(system)):\n system_url = system[i]\n response_system_url = REDFISH_OBJ.get(system_url, None)\n if response_system_url.status != 200:\n error_message = utils.get_extended_error(response_system_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (system_url, response_system_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else:\n # Get the bios resource\n bios_url = response_system_url.dict['Bios']['@odata.id']\n response_bios_url = REDFISH_OBJ.get(bios_url, None)\n if response_bios_url.status != 200:\n error_message = utils.get_extended_error(response_bios_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (bios_url, response_bios_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else: # Get bios success\n # Seek boot mode from bios attributes\n attribute_bootmode = None\n attributes = response_bios_url.dict['Attributes']\n for attribute in attributes:\n if attribute == \"BootMode\" or attribute == \"SystemBootMode\":\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"SystemBootMode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"Boot\" in attribute and \"Mode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n result = {'ret': False, 'msg': \"Can not found BootMode attribute in response of url %s\" %(bios_url)}\n REDFISH_OBJ.logout()\n return result\n\n # Get boot mode setting guide from bios registry\n WarningText = None\n ValueName = None\n bios_registry_url = \"/redfish/v1/Registries/\" + response_bios_url.dict['AttributeRegistry']\n response_bios_registry_url = REDFISH_OBJ.get(bios_registry_url, None)\n if response_bios_registry_url.status == 200:\n locations = response_bios_registry_url.dict['Location']\n bios_regjson_url = None\n for location in locations:\n if 'en' in location['Language']:\n bios_regjson_url = location['Uri']\n break\n if bios_regjson_url:\n response_bios_regjson_url = REDFISH_OBJ.get(bios_regjson_url, None)\n if response_bios_regjson_url.status == 200:\n regattributes = response_bios_regjson_url.dict['RegistryEntries']['Attributes']\n for regattribute in regattributes:\n if regattribute['AttributeName'] == attribute_bootmode:\n if 'WarningText' in regattribute:\n WarningText = regattribute['WarningText']\n for value in regattribute['Value']:\n if 'legacy' in value['ValueName'].lower():\n continue\n if 'uefi' in value['ValueName'].lower():\n ValueName = value['ValueName']\n break\n ValueName = value['ValueName']\n break\n \n # Perform patch to set\n if ValueName == None:\n ValueName = \"UEFIMode\"\n pending_url = response_bios_url.dict['@Redfish.Settings']['SettingsObject']['@odata.id']\n parameter = {attribute_bootmode: ValueName}\n attribute = {\"Attributes\": parameter}\n headers = {\"If-Match\": '*'}\n response_pending_url = REDFISH_OBJ.patch(pending_url, body=attribute, headers=headers)\n if response_pending_url.status in [200,204]:\n if WarningText:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful. WarningText: %s'% (WarningText) }\n else:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful'}\n elif response_pending_url.status == 405:\n result = {'ret': False, 'msg': \"Resource not supported\"}\n else:\n error_message = utils.get_extended_error(response_pending_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (\n pending_url, response_pending_url.status, error_message)}\n\n # Logout of the current session\n try:\n REDFISH_OBJ.logout()\n except:\n pass\n return result",
"def boot():\r\n print \"\"\"\r\n ###### ## ## ### ## ## ## ## ######## ########\r\n ## ## ## ## ## ## ### ## ## ## ## ## ##\r\n ## #### ## ## #### ## ## ## ## ## ##\r\n ## ## ## ## ## ## ## ## ## ######## ######\r\n ## ## ######### ## #### ## ## ## ## ##\r\n ## ## ## ## ## ## ### ## ## ## ## ##\r\n ###### ## ## ## ## ## ####### ## ## ########\r\n\r\n Version %s-%s\r\n\r\n Multi Purpose Artificial Inelegance Program\r\n Copyright (c) Alexandre Gauthier 2010-2011\r\n All Rights Reserved\r\n \"\"\" % ( constants.VERSION, constants.TAGNAME )\r\n\r\n # Initialize log\r\n # TODO: The values should be read from config file.\r\n log.init_log('cyanure.log', 'DEBUG')\r\n\r\n logger.info(\"Cyanure system init: Version %s (%s)\" % (\r\n constants.VERSION, constants.TAGNAME ))",
"def flashUboot(self):\n\t\tif self.settings.getKeyValue('flash.uboot?') == 'y':\n\t\t\tloadAddress = self.settings.getKeyValue('u-boot.flash.address')\n\t\t\tcmd = self.settings.getKeyValue('u-boot.load.command')\n\t\t\tcmd = cmd.replace('<u-boot>', 'u-boot.bin.12x.2430')\n\t\t\tself.socket.send(cmd, 5)\n\t\t\t#self.socket.send('protect off 1:0-1\\r', 2)\n\t\t\t#self.socket.send('erase 1:0-1\\r', 2)\n\t\t\t#self.socket.send('cp.b 80000000 %s 2ffff\\r' % loadAddress)\n\t\t\treturn None\n\t\t\t#cmd = cmd.replace('<u-bootloadadress>', self.u-bootloadaddress)",
"def change_settings(settings, methods=['GET', 'POST']):\n message = resolve_settings(settings)\n socketio.emit('settings_update', SETTINGS)\n socketio.emit('log', message)",
"async def disable_protection(self) -> None:\n await self._request(\n \"dns_config\", method=\"POST\", json_data={\"protection_enabled\": False},\n )",
"def update_runtime_variables(self) -> None:\n # Opportunistic, works if SELinux not enforced\n super().update_runtime_variables()\n self.parse_sysconfig_var()",
"def set_password(self, password):\n self.cloudserver.change_password(password)",
"def set_protection_enabled(self, c, state):\n self.enable_protection = state",
"def _update_site_configuration(self):\n self.site.configuration.site_values = {'THIRD_PARTY_AUTH_ONLY_DOMAIN': self.email_domain_name}\n self.site.configuration.save()",
"def set_soak(self, server, to):\n to_exec = \"UPDATE server SET enable_soak = %s WHERE server_id = %s\"\n self.__cursor.execute(to_exec, (to, str(server.id),))\n self.__connection.commit()",
"def set_http_boot_uri(self, url):\n try:\n sushy_system = self._get_sushy_system()\n sushy_system.http_boot_uri.set_http_boot_uri(url)\n except sushy.exceptions.SushyError as e:\n msg = (self._('Unable to set HTTP Boot URI. Error '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def _setup_applications(self):\n if 'host_nfs_path' in self.config['settings'] and 'guest_nfs_path' in self.config['settings']:\n self.settings['nfs'] = NFSSettings(host_vm_nfs_path=self.config['settings']['host_nfs_path'],\n guest_vm_nfs_path=self.config['settings']['guest_nfs_path'])\n\n self._setup_printer()",
"def set_bootloader_mode(self, mode):\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 'B')",
"def set_http_boot_url(self, url):\n if(self._is_boot_mode_uefi() is True):\n self._change_bios_setting({'UefiShellStartupUrl': url})\n else:\n msg = 'set_http_boot_url is not supported in the BIOS boot mode'\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def setConfig(debug):\n #Set debug flag\n if debug:\n app.config['DEBUG'] = True\n\n #Apply config\n if os.getenv(tag+'secret',base_config['secret']) == \"RANDOM\":\n secret = os.urandom(36)\n app.config['SECRET_KEY'] = secret \n else:\n app.config['SECRET_KEY'] = os.getenv(tag+'secret',base_config['secret']) \n\n\n #Flask-mail config\n app.config['MAIL_SERVER'] = os.getenv(tag+'mail_server',base_config['mail_server'])\n app.config['MAIL_PORT'] = int(os.getenv(tag+'mail_port',base_config['mail_port'])) \n app.config['MAIL_USE_SSL'] = True\n app.config['MAIL_USERNAME'] = os.getenv(tag+'email',base_config['email']) \n app.config['MAIL_PASSWORD'] = os.getenv(tag+'password',base_config['password']) \n mail = Mail(app)",
"def SetDebugMode(self, debug):\n config = ConfigParser.ConfigParser()\n config.read(self.app_conf)\n config.set(\"Settings\", \"debug_mode\", debug)\n configfile = open(self.app_conf, \"w\")\n config.write(configfile)\n self.debug_mode = misc.to_bool(debug)\n self.wifi.debug = self.debug_mode\n self.wired.debug = self.debug_mode",
"def patched_settings():\n settings.ENABLE_EMAIL_SUBSCRIPTIONS = False\n settings.BCRYPT_LOG_ROUNDS = 1",
"def apply_startup_params(self):\n config = self._protocol.get_startup_config()\n \n if not isinstance(config, dict):\n raise InstrumentParameterException(\"Incompatible initialization parameters\")\n \n log.trace(\"BARS driver applying config: %s\", config)\n self._protocol.set_readonly_values()\n self.set_resource(config)",
"def enable_server(self, server):\n log.info(\"Enabling %s in netscaler\", server)\n return self.post(\"server?action=enable\", {\"server\": {\"name\": server}}, content_type=self.content_type(\"server\"))",
"def dvs_secure(self):\n self.env.revert_snapshot(\"ready_with_5_slaves\")\n\n self.show_step(1)\n self.show_step(2)\n plugin.install_dvs_plugin(self.ssh_manager.admin_ip)\n\n self.show_step(3)\n cluster_id = self.fuel_web.create_cluster(\n name=self.__class__.__name__,\n mode=DEPLOYMENT_MODE,\n settings={\n \"net_provider\": 'neutron',\n \"net_segment_type\": NEUTRON_SEGMENT_TYPE\n }\n )\n plugin.enable_plugin(cluster_id, self.fuel_web)\n\n self.show_step(4)\n self.fuel_web.update_nodes(cluster_id,\n {'slave-01': ['controller'],\n 'slave-02': ['compute-vmware',\n 'cinder-vmware']})\n\n # Configure VMWare vCenter settings\n target_node_2 = self.node_name('slave-02')\n self.fuel_web.vcenter_configure(cluster_id,\n target_node_2=target_node_2,\n multiclusters=True)\n\n self.show_step(5)\n file_url = VCENTER_CERT_URL\n r = requests.get(file_url)\n cert_data = {'content': r.text, 'name': file_url.split('/')[-1]}\n vmware_attr = self.fuel_web.client.get_cluster_vmware_attributes(\n cluster_id)\n vc_values = vmware_attr['editable']['value']['availability_zones'][0]\n vc_values['vcenter_insecure'] = VCENTER_CERT_BYPASS\n vc_values['vcenter_ca_file'] = cert_data\n self.fuel_web.client.update_cluster_vmware_attributes(cluster_id,\n vmware_attr)\n\n self.show_step(6)\n self.fuel_web.deploy_cluster_wait(cluster_id)\n\n self.show_step(7)\n nodes = self.fuel_web.client.list_cluster_nodes(cluster_id)\n vmware_attr = self.fuel_web.client.get_cluster_vmware_attributes(\n cluster_id)\n az = vmware_attr['editable']['value']['availability_zones'][0]\n nova_computes = az['nova_computes']\n\n data = []\n ctrl_nodes = self.fuel_web.get_nailgun_cluster_nodes_by_roles(\n cluster_id, [\"controller\"])\n for nova in nova_computes:\n target_node = nova['target_node']['current']['id']\n conf_path = '/etc/neutron/plugins/ml2/vmware_dvs-vcenter-{0}' \\\n '.ini'.format(nova['service_name'])\n ca_path = '/etc/neutron/vmware-vcenter-{0}-ca.pem'.format(\n nova['service_name'])\n conf_dict = {\n 'insecure': False,\n 'ca_file': ca_path\n }\n if target_node == 'controllers':\n for node in ctrl_nodes:\n params = (node['hostname'], node['ip'], conf_path,\n conf_dict)\n data.append(params)\n else:\n for node in nodes:\n if node['hostname'] == target_node:\n params = (node['hostname'], node['ip'], conf_path,\n conf_dict)\n data.append(params)\n\n for hostname, ip, conf_path, conf_dict in data:\n logger.info(\"Check dvs agent conf of {0}\".format(hostname))\n self.check_config(ip, conf_path, conf_dict)\n\n self.show_step(8)\n self.fuel_web.run_ostf(cluster_id=cluster_id, test_sets=['smoke'])",
"def setServerip(self):\n\t\tself.serverip = self.settings.getKeyValue('serverip')\n\t\tself.socket.send('setenv serverip ' + self.serverip+'\\r', 1)\n\t\treturn None",
"def _deploy_salt_minion(name, session, vm_):\n # Get bootstrap values\n vm_[\"ssh_host\"] = get_vm_ip(name, session)\n vm_[\"user\"] = vm_.get(\"user\", \"root\")\n vm_[\"password\"] = vm_.get(\"password\", \"p@ssw0rd!\")\n vm_[\"provider\"] = vm_.get(\"provider\", \"xen\")\n log.debug(\"%s has IP of %s\", name, vm_[\"ssh_host\"])\n # Bootstrap Salt minion!\n if vm_[\"ssh_host\"] is not None:\n log.info(\"Installing Salt minion on %s\", name)\n boot_ret = __utils__[\"cloud.bootstrap\"](vm_, __opts__)\n log.debug(\"boot return: %s\", boot_ret)",
"def update_website_configuration():\n put('config/supervisor_website.conf', \n '/etc/supervisor/conf.d/gunicorn.conf', \n use_sudo=True)\n sudo('supervisorctl update')\n sudo('supervisorctl reload')",
"def restart_salt():\n stop_salt()\n start_salt()",
"def wait_boot(self, value: int) -> None:\n self._data[ATTR_WAIT_BOOT] = value",
"def environment_vars_set_wowww():\n os.environ[\"YESSSSMS_LOGIN\"] = \"03211234567\"\n os.environ[\"YESSSSMS_PASSWD\"] = \"MySecr3t\"\n os.environ[\"YESSSSMS_PROVIDER\"] = \"wowww\"\n os.environ[\"YESSSSMS_RECIPIENT\"] = \"066356789780\"",
"def test_settings_doesnt_break(self):\r\n self.settingsDeploy()",
"def setbacklight(self, backlight=True):\n if backlight:\n self._backlight = 0x08\n else:\n self._backlight = 0x00\n\n self.lcd_byte(0x00 ,LCD_CMD)",
"def set_boot_options(self, image_name, **vendor_specifics):\n current_boot = self.show(\"show running-config | inc ^boot system \")\n file_system = vendor_specifics.get(\"file_system\")\n if file_system is None:\n file_system = self._get_file_system()\n\n file_system_files = self.show(f\"dir {file_system}\")\n if re.search(image_name, file_system_files) is None:\n log.error(\"Host %s: File not found error for image %s.\", self.host, image_name)\n raise NTCFileNotFoundError(\n # TODO: Update to use hostname\n hostname=self.host,\n file=image_name,\n directory=file_system,\n )\n\n current_images = current_boot.splitlines()\n commands_to_exec = [f\"no {image}\" for image in current_images]\n commands_to_exec.append(f\"boot system {file_system}/{image_name}\")\n self.config(commands_to_exec)\n\n self.save()\n if self.boot_options[\"sys\"] != image_name:\n log.error(\"Host %s: Setting boot command did not yield expected results\", self.host)\n raise CommandError(\n command=f\"boot system {file_system}/{image_name}\",\n message=\"Setting boot command did not yield expected results\",\n )\n\n log.info(\"Host %s: boot options have been set to %s\", self.host, image_name)",
"def set_bios_settings(self, data=None):\n\n if not data:\n raise exception.SDFlexError(\"Could not apply settings with\"\n \" empty data\")\n sushy_system = self._get_sushy_system()\n\n try:\n for key in data.keys():\n sushy_system.bios.set_attribute(key, data[key])\n except sushy.exceptions.SushyError as e:\n message_extended_info = e.body.get('@Message.ExtendedInfo')\n error_message = message_extended_info[0]['Message']\n\n msg = (self._(\"Setting the value of Bios attribute \"\n \"'%(atrribute)s' is not succesfull. \"\n \"Error: %(error)s\") %\n {'error': str(error_message), 'atrribute': key})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def sleep_for_cloudinit():\n run(\"sleep 15\")"
] | [
"0.7178006",
"0.69563264",
"0.6825512",
"0.68102604",
"0.6628108",
"0.61587363",
"0.59844947",
"0.58227813",
"0.57520616",
"0.5736573",
"0.5724402",
"0.5678746",
"0.56520265",
"0.56394017",
"0.5624491",
"0.5624137",
"0.56189376",
"0.5582487",
"0.55506265",
"0.5549751",
"0.55264896",
"0.55135757",
"0.5503427",
"0.5483059",
"0.5471922",
"0.54675525",
"0.54548615",
"0.5436217",
"0.5388359",
"0.53854364",
"0.53847283",
"0.53741425",
"0.537095",
"0.5362904",
"0.5339592",
"0.5338526",
"0.5314042",
"0.5309868",
"0.5286563",
"0.52351505",
"0.5220978",
"0.51894027",
"0.5168483",
"0.5168072",
"0.51642793",
"0.51620525",
"0.51482487",
"0.5141294",
"0.512392",
"0.5120173",
"0.51111126",
"0.5105363",
"0.510441",
"0.5103108",
"0.50951034",
"0.5094547",
"0.50818795",
"0.5078228",
"0.50773925",
"0.5068767",
"0.5067183",
"0.5055339",
"0.50528306",
"0.50502455",
"0.50465083",
"0.5044519",
"0.5032295",
"0.5022688",
"0.50198334",
"0.5017791",
"0.5004298",
"0.49991882",
"0.49930748",
"0.49922422",
"0.49891642",
"0.49876747",
"0.49754015",
"0.49699262",
"0.49665123",
"0.49619097",
"0.49593726",
"0.49591038",
"0.4952791",
"0.49504727",
"0.49472004",
"0.49302748",
"0.49292225",
"0.49236724",
"0.49188998",
"0.49188974",
"0.4911487",
"0.49081334",
"0.49078068",
"0.489944",
"0.48978823",
"0.48917764",
"0.48902512",
"0.48860565",
"0.4883736",
"0.48733205"
] | 0.72850573 | 0 |
Checks if the system is in uefi boot mode. | def _is_boot_mode_uefi(self):
boot_mode = self.get_current_boot_mode()
if boot_mode == 'UEFI':
return True
else:
return False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_bootable(self):\n return self.bootable_flag == 0x80",
"def has_efi():\n return os.path.exists(\"/sys/firmware/efi\")",
"def get_boot_mode():\n boot_mode = 'Legacy'\n try:\n reg_key = winreg.OpenKey(\n winreg.HKEY_LOCAL_MACHINE, r'System\\CurrentControlSet\\Control')\n reg_value = winreg.QueryValueEx(reg_key, 'PEFirmwareType')[0]\n if reg_value == 2:\n boot_mode = 'UEFI'\n except:\n boot_mode = 'Unknown'\n\n return boot_mode",
"def is_booted(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def test_update_bios_boot_mode(self):\n pass",
"def get_boot_mode(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def pilotIsBootValid (self):\n return self.isBootValid()",
"def test_patch_bios_boot_mode(self):\n pass",
"def is_http_boot_requested(node):\n http_boot_requested = (\n str(node.driver_info.get('enable_uefi_httpboot', 'false')).lower())\n return http_boot_requested == 'true'",
"def non_root_available(self):\n return self._adb_available and self._dev_emu",
"def is_in_use(self):\n\t\treturn bool(call_sdk_function('PrlBootDev_IsInUse', self.handle))",
"def system_valid(self):\n return self.udev.devices_exist",
"def test_get_bios_boot_mode_by_moid(self):\n pass",
"def CheckBoot(self, instance):\n try:\n serial_out = self.GetSerialPortOutput(instance=instance, port=1)\n self.CheckBootFailure(serial_out, instance)\n return ((self.BOOT_COMPLETED_MSG in serial_out)\n or (self.BOOT_STARTED_MSG in serial_out))\n except errors.HttpError as e:\n if e.code == 400:\n logger.debug(\"CheckBoot: Instance is not ready yet %s\", str(e))\n return False\n raise",
"def check_fw_mode(self, cat_cpuinfo_out):\n for line in cat_cpuinfo_out.splitlines():\n if \"firmware\" in line:\n if \"OPAL\" in line:\n return True\n else:\n return False\n return False",
"def sstbf_enabled():\n return common.SSTBF_CAP in SYSTEM_CAPS",
"def test_get_bios_boot_mode_list(self):\n pass",
"def safe_boot_disabled(self):\n return self._safe_boot_disabled",
"def is_boot_code_present(self):\n\n\t\treturn struct.unpack('<H', self.boot_sector_data[0 : 2])[0] != 0 and struct.unpack('<H', self.boot_sector_data[510 : 512])[0] == 0xAA55",
"def get_pending_boot_mode(self):\n headers, uri, bios_settings = self._check_bios_resource(['BootMode'])\n _, _, settings = self._get_bios_settings_resource(bios_settings)\n boot_mode = settings.get('BootMode')\n if boot_mode == 'LegacyBios':\n boot_mode = 'legacy'\n return boot_mode.upper()",
"def is_system(self) -> bool:",
"def is_allow_select_boot_device(self):\n\t\treturn bool(call_sdk_function('PrlVmCfg_IsAllowSelectBootDevice', self.handle))",
"def available(self):\n return self._adb_available and self._dev_emu and (self._is_root\n or self._is_su)",
"def pilotValidateBoot (self):\n return self.validateBoot()",
"def is_sys(self):\n if self.mountpoint is not None and self.mountpoint in ['/', '/boot']:\n return True\n return False",
"def is_sys(self):\n if self.mountpoint is not None and self.mountpoint in ['/', '/boot']:\n return True\n return False",
"def __verify__(cls):\n\n try:\n UpstartSystem()\n return True\n except Exception as e:\n try:\n UpstartSystem(bus=DirectUpstartBus())\n return True\n except Exception as e:\n return False",
"def CheckBootFailure(self, serial_out, instance):\n pass",
"def is_system(self) -> undefined.UndefinedOr[bool]:",
"def check_kernel_module(params) -> None:\n if os.system(\"lsmod | grep v4l2loopback >/dev/null 2>&1\") == 0:\n print(\"Kernel module is loaded\")\n else:\n print(\"Kernel module is NOT loaded\")",
"def HasSystemd(self):\n _, stderr = self.RunCmdOnDevice(['systemctl'], quiet=True)\n return stderr == ''",
"def HasSystemd(self):\n _, stderr = self.RunCmdOnDevice(['systemctl'], quiet=True)\n return stderr == ''",
"def checkWakeup(self):\n # TODO include check for external wakeup sources\n if self.dbus2vdr.checkVDRstatus():\n\n return self.dbus2vdr.Shutdown.ManualStart()\n else:\n return True",
"def check_platform():\n system = platform.system()\n distro = platform.platform()\n is_raspberry_pi = False\n try:\n info = open(\"/proc/cpuinfo\").read()\n except FileNotFoundError:\n is_raspberry_pi = False\n else:\n # bcm2708: Raspberry Pi 1\n # bcm2709: Raspberry Pi 2\n # bcm2710: Raspberry Pi 3\n is_raspberry_pi = 'BCM27' in info or 'ODROID' in info\n\n return system == \"Linux\" and (\n os.path.isfile('/proc/device-tree/hat/uuid') or\n 'boot2docker' in distro.lower() or\n is_raspberry_pi or\n os.path.isfile('/sys/hypervisor/uuid') or\n os.path.isdir('/var/lib/digitalocean')\n )",
"def get_current_boot_mode(self):\n boot_mode = self._get_bios_setting('BootMode')\n if boot_mode == 'LegacyBios':\n boot_mode = 'legacy'\n\n return boot_mode.upper()",
"def _check_vmlinux():\n try:\n for f in glob.glob(os.path.join(os.sep, 'boot', 'vmlinu*')):\n with open(f, 'rb'):\n pass\n except IOError:\n raise CommandError('Make sure that the kernels in /boot are readable. '\n 'This is required for guestfish. Please run the '\n 'following command:\\n\\n'\n 'sudo chmod ugo+r /boot/vmlinu*') from None",
"def is_available() -> bool:\n # This function never throws and returns 0 if driver is missing or can't\n # be initialized\n return device_count() > 0",
"def sstcp_enabled():\n return common.POWER_CAP in SYSTEM_CAPS",
"def get_secure_boot_mode(self):\n sushy_system = self._get_sushy_system()\n try:\n secure_boot_enabled = GET_SECUREBOOT_CURRENT_BOOT_MAP.get(\n sushy_system.secure_boot.current_boot)\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish controller failed to provide '\n 'information about secure boot on the server. '\n 'Error: %(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexCommandNotSupportedError(msg)\n\n if secure_boot_enabled:\n LOG.debug(self._(\"Secure boot is Enabled\"))\n else:\n LOG.debug(self._(\"Secure boot is Disabled\"))\n return secure_boot_enabled",
"def __secure_boot(efivars_dir):\n enabled = False\n sboot = glob.glob(os.path.join(efivars_dir, \"SecureBoot-*/data\"))\n if len(sboot) == 1:\n # The minion is usually running as a privileged user, but is\n # not the case for the master. Seems that the master can also\n # pick the grains, and this file can only be readed by \"root\"\n try:\n with salt.utils.files.fopen(sboot[0], \"rb\") as fd:\n enabled = fd.read()[-1:] == b\"\\x01\"\n except PermissionError:\n pass\n return enabled",
"def is_aux_heat_on(self):\n return self._device.mode == self._device.MODE_HEAT_EMERGENCY",
"def check_device_state(self):",
"def enable_secure_boot(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_secure_boot\")",
"def _verify_boot_up_log(self, start_time):\n parser_result = self.device.event_parser.get_last_event([\"basic.bootup\"])\n self.assertGreater(parser_result.count, 0,\n \"Error: event label 'basic.bootup' not found.\")\n timestamp = parser_result.results_list[0][\"system_timestamp\"]\n self.assertGreater(\n timestamp, start_time,\n \"Expected basic bootup timestamp {} to be > start time {}\".format(\n timestamp, start_time))",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def available_on_system(cls):\n return (cls.reason_to_be_disabled() is None)",
"def supported_boot_interfaces(self):\n return [fake.FakeBoot] + super().supported_boot_interfaces",
"def is_vserver_kernel():\n\n kinfo = commands.getoutput('/bin/uname -a').split()[2]\n return '-vs' in kinfo",
"def isBootValid (self):\n if not self._wasSdIdentified:\n self._log(\"is-boot-valid\").notice(\"secure-digital was not identified, its boot partition is not valid.\")\n return False\n\n if not self.isBootPartitionExist():\n self._log(\"is-boot-valid\").notice(\"the secure-digital boot partition does not exist (not valid).\")\n return False\n\n try:\n self.mountBootPartition()\n except:\n self._log(\"is-boot-valid\").exception(\"failed mounting partition, partition is invalid\")\n return False\n\n stateFile = self._getBootInstallationFilePath()\n isValid = os.path.exists(stateFile)\n if isValid:\n self._log(\"is-boot-valid\").notice(\"secure-digital boot partition's state file %s exists, the boot partitions is valid.\", stateFile)\n else:\n self._log(\"is-boot-valid\").notice(\"secure-digital boot partition's state file %s does not exist, the boot partitions is invalid.\", stateFile)\n\n return isValid",
"def get_boot_mode(self, task):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='get_boot_mode')",
"def set_bios_bootmode_uefi(ip, login_account, login_password, system_id):\n result = {}\n login_host = \"https://\" + ip\n try:\n # Connect using the BMC address, account name, and password\n # Create a REDFISH object\n REDFISH_OBJ = redfish.redfish_client(base_url=login_host, username=login_account, timeout=utils.g_timeout,\n password=login_password, default_prefix='/redfish/v1', cafile=utils.g_CAFILE)\n # Login into the server and create a session\n REDFISH_OBJ.login(auth=utils.g_AUTH)\n except:\n traceback.print_exc()\n result = {'ret': False, 'msg': \"Please check the username, password, IP is correct\"}\n return result\n\n # GET the ComputerSystem resource\n system = utils.get_system_url(\"/redfish/v1\", system_id, REDFISH_OBJ)\n if not system:\n result = {'ret': False, 'msg': \"This system id is not exist or system member is None\"}\n REDFISH_OBJ.logout()\n return result\n for i in range(len(system)):\n system_url = system[i]\n response_system_url = REDFISH_OBJ.get(system_url, None)\n if response_system_url.status != 200:\n error_message = utils.get_extended_error(response_system_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (system_url, response_system_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else:\n # Get the bios resource\n bios_url = response_system_url.dict['Bios']['@odata.id']\n response_bios_url = REDFISH_OBJ.get(bios_url, None)\n if response_bios_url.status != 200:\n error_message = utils.get_extended_error(response_bios_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (bios_url, response_bios_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else: # Get bios success\n # Seek boot mode from bios attributes\n attribute_bootmode = None\n attributes = response_bios_url.dict['Attributes']\n for attribute in attributes:\n if attribute == \"BootMode\" or attribute == \"SystemBootMode\":\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"SystemBootMode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"Boot\" in attribute and \"Mode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n result = {'ret': False, 'msg': \"Can not found BootMode attribute in response of url %s\" %(bios_url)}\n REDFISH_OBJ.logout()\n return result\n\n # Get boot mode setting guide from bios registry\n WarningText = None\n ValueName = None\n bios_registry_url = \"/redfish/v1/Registries/\" + response_bios_url.dict['AttributeRegistry']\n response_bios_registry_url = REDFISH_OBJ.get(bios_registry_url, None)\n if response_bios_registry_url.status == 200:\n locations = response_bios_registry_url.dict['Location']\n bios_regjson_url = None\n for location in locations:\n if 'en' in location['Language']:\n bios_regjson_url = location['Uri']\n break\n if bios_regjson_url:\n response_bios_regjson_url = REDFISH_OBJ.get(bios_regjson_url, None)\n if response_bios_regjson_url.status == 200:\n regattributes = response_bios_regjson_url.dict['RegistryEntries']['Attributes']\n for regattribute in regattributes:\n if regattribute['AttributeName'] == attribute_bootmode:\n if 'WarningText' in regattribute:\n WarningText = regattribute['WarningText']\n for value in regattribute['Value']:\n if 'legacy' in value['ValueName'].lower():\n continue\n if 'uefi' in value['ValueName'].lower():\n ValueName = value['ValueName']\n break\n ValueName = value['ValueName']\n break\n \n # Perform patch to set\n if ValueName == None:\n ValueName = \"UEFIMode\"\n pending_url = response_bios_url.dict['@Redfish.Settings']['SettingsObject']['@odata.id']\n parameter = {attribute_bootmode: ValueName}\n attribute = {\"Attributes\": parameter}\n headers = {\"If-Match\": '*'}\n response_pending_url = REDFISH_OBJ.patch(pending_url, body=attribute, headers=headers)\n if response_pending_url.status in [200,204]:\n if WarningText:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful. WarningText: %s'% (WarningText) }\n else:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful'}\n elif response_pending_url.status == 405:\n result = {'ret': False, 'msg': \"Resource not supported\"}\n else:\n error_message = utils.get_extended_error(response_pending_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (\n pending_url, response_pending_url.status, error_message)}\n\n # Logout of the current session\n try:\n REDFISH_OBJ.logout()\n except:\n pass\n return result",
"def get_secure_boot_mode(self):\n system = self._get_host_details()\n\n if ('links' not in system['Oem']['Hp'] or\n 'SecureBoot' not in system['Oem']['Hp']['links']):\n msg = ('\"SecureBoot\" resource or feature is not supported'\n ' on this system')\n raise exception.IloCommandNotSupportedError(msg)\n\n secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']\n\n # get the Secure Boot object\n status, headers, secure_boot_settings = self._rest_get(secure_boot_uri)\n\n if status >= 300:\n msg = self._get_extended_error(secure_boot_settings)\n raise exception.IloError(msg)\n\n return secure_boot_settings['SecureBootCurrentState']",
"def detect(self):\n # Get PCI devices\n lines = subprocess.check_output([\"lspci\", \"-n\"]).decode().split(\"\\n\")\n for line in lines:\n if len(line) > 0:\n class_id = \"0x{0}\".format(line.split()[1].rstrip(\":\")[0:2])\n if class_id == self.class_id:\n dev = line.split()[2].split(\":\")\n vendor_id = \"0x{0}\".format(dev[0])\n product_id = \"0x{0}\".format(dev[1])\n if vendor_id == self.vendor_id and product_id in self.devices:\n return True\n return False",
"def check_reboot():\n return os.path.exist(\"run/reboot-required\")",
"def is_io_uring_supported():\n return compare_versions(get_kernel_version(), MIN_KERNEL_VERSION_FOR_IO_URING) >= 0",
"def detect():\n try:\n s = serial.Serial(port = 0, baudrate = 19200, parity = 'O', timeout=1)\n except Exception, e:\n log = logging.getLogger('root')\n log.exception(e)\n return False\n else:\n return True\n finally:\n s.close()",
"def _is_device_active(self):\n return self.power_mode == STATE_ON",
"def os_is_linux():\n return platform.system() == \"Linux\" and \"raspberrypi\" not in platform.uname()",
"def check_reboot():\n return os.path.exists(\"/run/reboot-required\")",
"def is_openelec():\n if os.path.exists(\"/etc/openelec-release\"):\n return True\n osrelfile=\"/etc/os-release\"\n if os.path.exists(osrelfile) and \"openelec\" in open(osrelfile,'r').read().lower():\n return True\n return False",
"def os_is_pi():\n return \"raspberrypi\" in platform.uname()",
"def is_rooted(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def _is_amd(sysfs_gpu_name):\n with open(sysfs_gpu_name) as src:\n return src.read().strip() == 'amdgpu'",
"def UseExistingBootDisk(disks):\n return any(disk.get('boot', False) for disk in disks)",
"def is_if_up(ifname):\n with open('/sys/class/net/' + ifname + '/carrier', 'r') as f:\n status = f.readline()\n return (status == '1')",
"def flashUboot(self):\n\t\tif self.settings.getKeyValue('flash.uboot?') == 'y':\n\t\t\tloadAddress = self.settings.getKeyValue('u-boot.flash.address')\n\t\t\tcmd = self.settings.getKeyValue('u-boot.load.command')\n\t\t\tcmd = cmd.replace('<u-boot>', 'u-boot.bin.12x.2430')\n\t\t\tself.socket.send(cmd, 5)\n\t\t\t#self.socket.send('protect off 1:0-1\\r', 2)\n\t\t\t#self.socket.send('erase 1:0-1\\r', 2)\n\t\t\t#self.socket.send('cp.b 80000000 %s 2ffff\\r' % loadAddress)\n\t\t\treturn None\n\t\t\t#cmd = cmd.replace('<u-bootloadadress>', self.u-bootloadaddress)",
"def tftGizmoPresent():\n present = True\n try:\n with digitalio.DigitalInOut(board.A3) as backlight_pin:\n backlight_pin.pull = digitalio.Pull.UP\n present = not backlight_pin.value\n except ValueError:\n ### The Gizmo is already initialised, i.e. showing console output\n pass\n\n return present",
"def is_system_ready_for_benchmarking():\n\n # check if scaling_governor is set to 'performance' for all cpu cores\n cpu_governors = glob.glob('/sys/devices/system/cpu/cpu*/cpufreq/scaling_governor')\n if not cpu_governors:\n logger.error('no scaling_governor found. Do you run on a Linux System?')\n return False\n for governor in sorted(cpu_governors):\n with open(governor, 'r') as f:\n line = f.read().splitlines()[0]\n logger.debug('%s is set to \\\"%s\\\"', governor, line)\n if line != 'performance':\n logger.warning('please set all scaling_governor to \\\"performance\\\" (using \"sudo ./ondemand.sh start\")')\n return False\n\n return True",
"def is_available():",
"def CheckKVM():\n return os.path.exists('/dev/kvm')",
"def is_system_openwrt():\n wrt_filename = '/etc/openwrt_release'\n try:\n return exists(wrt_filename)\n except: # pragma: no cover\n SysTools.logger.debug(\"Reading file: '%s' failed\", wrt_filename)\n return False",
"def _check_mounted_system(self):\n res = self.su_cmd('touch /system/.dwarf_check')\n if res == '':\n res = self._do_adb_command('shell ls -la /system')\n if '.dwarf_check' in res:\n res = self.su_cmd('rm /system/.dwarf_check')\n if res == '':\n return True\n elif res == 'Read-only file system':\n return False\n\n return False",
"def is_wasabi_running():\n wasabi_process_id = run('pidof wassabee')\n if wasabi_process_id:\n return True\n else:\n return False",
"def win():\n if platform.system() in WINDOWS:\n return True\n return False",
"def in_runtime(self):\n\n return self.is_valid_platform() and self['ENVIRONMENT']",
"def has_intel_os(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def _is_booted_from_volume(self, instance, disk_mapping=None):\n return not bool(instance.get('image_ref'))",
"def testCheckAvailable(self):\n img = self.img\n img.inspect()\n with converter.RootMounted(img.converter._h,\n '/dev/VolGroup00/LogVol00'):\n c = img.converter\n installer = redhat.LocalInstaller(\n c._h, '/dev/VolGroup00/LogVol00',\n db.DB(['{}/conf/guestconv.db'.format(env.topdir)]),\n log.get_logger_object(test_helper.logger)\n )\n\n kernel = redhat.Package('kernel',\n version='2.6.9', release='89.EL',\n arch='i686')\n self.assertTrue(installer.check_available([kernel]))",
"def is_ctu_capable():\n\n context = package_context.get_context()\n ctu_func_map_cmd = context.ctu_func_map_cmd\n try:\n version = subprocess.check_output([ctu_func_map_cmd, '-version'])\n except (subprocess.CalledProcessError, OSError):\n version = 'ERROR'\n return version != 'ERROR'",
"def unset_iscsi_boot_info(self, mac):\n if(self._is_boot_mode_uefi() is True):\n iscsi_info = {'iSCSIBootEnable': 'Disabled'}\n self._change_iscsi_settings(mac.upper(), iscsi_info)\n else:\n msg = 'iscsi boot is not supported in the BIOS boot mode'\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def is_using_emergency_heat(self) -> bool:\r\n # TODO: Figure out how to implement this.\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"is_using_emergency_heat\"))\r\n return False",
"def _isoff(self):\n return self.dp.state()==PyTango.DevState.OFF",
"def is_system(self):\n\t\treturn self.__is_system",
"def is_unreal():\n\n try:\n import unreal\n except ImportError:\n return False\n\n return True",
"def reset_secure_boot_keys(self):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('ResetToDefaultKeys', True)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)"
] | [
"0.7057053",
"0.69969964",
"0.6908815",
"0.6746094",
"0.67178434",
"0.6590464",
"0.6447706",
"0.64065146",
"0.6263757",
"0.6129953",
"0.61029017",
"0.6102866",
"0.6059007",
"0.6056331",
"0.6024979",
"0.5940045",
"0.5927244",
"0.590934",
"0.585367",
"0.5818194",
"0.58100754",
"0.57906264",
"0.5783992",
"0.57787377",
"0.5778638",
"0.5778638",
"0.5736396",
"0.56873864",
"0.56853974",
"0.56681496",
"0.56472784",
"0.56472784",
"0.5633005",
"0.5602462",
"0.55923194",
"0.55769116",
"0.5557981",
"0.5549435",
"0.5531474",
"0.55082077",
"0.54919213",
"0.5458724",
"0.54483026",
"0.5446136",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54090863",
"0.54071134",
"0.5402988",
"0.5400702",
"0.5374421",
"0.53626627",
"0.53532964",
"0.5347555",
"0.5334663",
"0.5334024",
"0.5333179",
"0.5332985",
"0.53249836",
"0.53183615",
"0.5315106",
"0.5300965",
"0.53004074",
"0.5285855",
"0.5281893",
"0.5275175",
"0.52739006",
"0.526038",
"0.5251993",
"0.52355754",
"0.5206347",
"0.52057683",
"0.5192321",
"0.51885223",
"0.5179027",
"0.51772875",
"0.51752126",
"0.5171419",
"0.51682436",
"0.5166098",
"0.5161461",
"0.51594144",
"0.51527137",
"0.5152364",
"0.51474303",
"0.5140771",
"0.5133769"
] | 0.8531207 | 0 |
Gets the product name of the server. | def get_product_name(self):
system = self._get_host_details()
return system['Model'] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def product_name(self):\n return self._stub.List(self._message).product_name",
"def product_name(self):\n return self._product_name",
"def product_name(self) -> Optional[str]:\n return pulumi.get(self, \"product_name\")",
"def server_name(self) -> str:\n return pulumi.get(self, \"server_name\")",
"def product(self):\n return self.__values['product_name']",
"def product_name(self):\n buf = (ctypes.c_char * self.MAX_BUF_SIZE)()\n self._dll.JLINKARM_EMU_GetProductName(buf, self.MAX_BUF_SIZE)\n return ctypes.string_at(buf).decode()",
"def get_product_name(self):\n sushy_system = self._get_sushy_system()\n return sushy_system.model",
"def server_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"server_name\")",
"def server_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"server_name\")",
"def get_product_name(self):\n\n try:\n product_name = self.trees.get_element_by_id(\"productTitle\").text\n except:\n pass\n if product_name is None:\n product_name = \"Not available\"\n product_name = product_name.replace(\"\\n\", \"\")\n return product_name",
"def _get_servername(self):\n #recuperation objet bdd tango\n db = PyTango.Database()\n #recuperation de la liste des servers dans la bdd\n server_list = db.get_server_list()\n server_name = ''\n #pour chaque servers de la liste\n for server in server_list:\n #recuperation de la liste des noms des devices\n lst_devices_address = db.get_device_class_list(server).value_string\n #mise de la liste en lower case\n lst_devices_address_lower = [ i.lower() for i in lst_devices_address]\n #si le nom du device est dans la liste, alors on retourne le nom du serveur\n if self.device_name.lower() in lst_devices_address_lower:\n server_name = server\n return server_name",
"def product(self) -> str:\n return pulumi.get(self, \"product\")",
"def product(self):\n return self.appName",
"def fetch_name(self, product_id):\n product_url = urljoin(self.endpoint, str(product_id)) + \"?excludes={}\".format(self.excludes) + \"&key={}\".format(self.key)\n\n result = requests.get(product_url)\n\n if result.status_code != requests.codes[\"ok\"]:\n raise ProductNotFoundError(\"could not find product name for ID {}\".format(product_id))\n\n data = result.json()\n\n try:\n name = data[\"product\"][\"item\"][\"product_description\"][\"title\"]\n except KeyError:\n name = None\n\n return name",
"def get_prod_name(product):\n prod_name = prod_dict[product][\"prod_name\"]\n thredds_product = prod_dict[product][\"dataset_name\"]\n\n return prod_name,thredds_product",
"def get_raw_server_name():\n from google.appengine.api import app_identity\n return '%s.%s.appspot.com' % (os.environ[\n 'CURRENT_VERSION_ID'].split('.')[0], app_identity.get_application_id())",
"def primary_name_server(self) -> str:\n return pulumi.get(self, \"primary_name_server\")",
"def get_server_name(self):\n configured_value = self.charm_config[\"server-name\"]\n if configured_value:\n return configured_value\n else:\n fqdn = socket.getfqdn()\n return fqdn",
"def get_product_name(self, package_name):\n return package_name",
"def getName(self):\n return _libsbml.GeneProduct_getName(self)",
"def get_product_name(self, url):\n self.driver.get(url)\n try:\n product_name = self.driver.find_element_by_id(\"productTitle\").text\n except:\n pass\n\n if product_name is None:\n product_name = \"Not available\"\n return product_name",
"def server_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"server_name\")",
"def server_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"server_name\")",
"def get_sys_name(self):\n\t\treturn call_sdk_function('PrlSrvCfgHddPart_GetSysName', self.handle)",
"def get_db_server_name(self):\n if self.db_config_file.key_exists(\"server_name\"):\n return self.db_config_file_value(\"server_name\").strip('\"')\n return self.get_system_id()",
"def server_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"server_name\")",
"def server_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"server_name\")",
"def get_name(self):\n\t\treturn call_sdk_function('PrlSrvCfgDev_GetName', self.handle)",
"def ServerHostName(self):\n if self.force_auto_sync:\n self.get('ServerHostName')\n return self._ServerHostName",
"def get_host_name(self):\n\t\treturn call_sdk_function('PrlSrvInfo_GetHostName', self.handle)",
"def get_systemname(self) -> str:\n\n return self.send(self.cmd.GET_SYSTEMNAME)",
"async def get_hostname(self):\n\n # Display info message\n log.info(\"get_hostname\")\n\n # Get hostname\n output = await self.send_command(self.cmd_get_hostname)\n\n # Display info message\n log.info(f\"get_hostname: output: '{output}'\")\n\n # Remove the useless information in the returned string\n output = output.split(\"System Name: \")[1].strip()\n\n # Display info message\n log.info(f\"get_hostname: hostname found: '{output}'\")\n\n # Return the name of the device\n return output",
"def get_name(self):\n\t\treturn call_sdk_function('PrlSrvCfgHddPart_GetName', self.handle)",
"def get_hostname(self):\n return self.name",
"def name(self) -> Dict[str, str]:\n self.__logger.debug('Eva.name called')\n return self.__http_client.name()",
"def get_dev_name(self):\n\t\treturn call_sdk_function('PrlSrvCfgHdd_GetDevName', self.handle)",
"def name(self):\n return \"{} {}\".format(self._clientname, self._name)",
"async def get_hostname(self):\n\n # Display info message\n log.info(\"get_hostname\")\n\n # Get hostname\n output = await self.send_command(self.cmd_get_hostname)\n\n # Display info message\n log.info(f\"get_hostname: output: '{output}'\")\n\n # Remove the useless information in the returned string\n output = output.split()[0]\n\n # Display info message\n log.info(f\"get_hostname: hostname found: '{output}'\")\n\n # Return the name of the device\n return output",
"def computer_name(self) -> str:\n return pulumi.get(self, \"computer_name\")",
"def product_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"product_id\")",
"def product(self):\n if self._data:\n return self._get_info(\"PRODUCT\")\n\n\n product_id = int(self.tracking_number[2:4])\n\n if product_id in range(10, 68):\n return \"Business-Parcel\"\n elif product_id == 71:\n return \"Cash-Service (+DAC)\"\n elif product_id == 72:\n return \"Cash-Service+Exchange-Service\"\n elif product_id == 74:\n return \"DeliveryAtWork-Service\"\n elif product_id == 75:\n return \"Guaranteed 24-Service\"\n elif product_id == 76:\n return \"ShopReturn-Service\"\n elif product_id == 78:\n return \"Intercompany-Service\"\n elif product_id == 85:\n return \"Express-Parcel\"\n elif product_id == 87:\n return \"Exchange-Service Hintransport\"\n elif product_id == 89:\n return \"Pick&Return/Ship\"\n else:\n # Not explicitly mentiond in the docs, but apparently just a regular parcel\n return \"Business-Parcel\"",
"def name(self):\n return f\"{self._client.friendly_name} {CLIENT_SUFFIX}\"",
"def get_host_name(self):\n return self.get_command_output(\"hostname\").strip(\"\\n\")",
"def name(self):\n return self.device.name()",
"def name(self):\n _LOGGER.debug(self._shelly_cloud_device_name + ' >>> ' +\n self._shelly_cloud_entity_name + ' >>> name() >>> ' +\n self._shelly_cloud_device_name)\n return self._shelly_cloud_device_name",
"def GetHostName(self):\n try:\n return self.server.GetHostName()\n except dbus.DBusException:\n return None",
"def name(self):\n return self._device.name",
"def name(self):\n return self._device.name",
"def name(self):\n return self._device.name",
"def GetOSName():\n return Config.osName_",
"def get_product(conn, product_id: int) -> str:\n with conn.cursor() as cursor:\n cursor.execute(\"\"\"select name from products\n where id = {0}\"\"\".format(product_id))\n try:\n return cursor.fetchone()[0]\n except TypeError:\n raise errors.StoreError",
"def get_sys_name(self):\n\t\treturn call_sdk_function('PrlVmDevHdPart_GetSysName', self.handle)",
"def name(self):\n if ( self._typeSensor == _production):\n name = \"myEnedis.%s.production\" %(self._myDataSensorEnedis.get_PDL_ID())\n else:\n name = \"myEnedis.%s\" %(self._myDataSensorEnedis.get_PDL_ID())\n return name",
"def server_agent_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"server_agent_name\")",
"def getName(self):\n return _libsbml.GeneProductRef_getName(self)",
"def read_product_identifier(self):\n self.execute(SdpI2cCmdPrepareProductIdentifier())\n return self.execute(SdpI2cCmdReadProductIdentifier())",
"def server_group_name(self) -> str:\n return pulumi.get(self, \"server_group_name\")",
"def service_name(self) -> str:\n return pulumi.get(self, \"service_name\")",
"def service_name(self) -> str:\n return pulumi.get(self, \"service_name\")",
"def device_provisioning_host_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"device_provisioning_host_name\")",
"def version_name(self) -> str:\n return pulumi.get(self, \"version_name\")",
"def get_hostname(self):\n module = 'hostname'\n method = 'GET'\n response = self.axapi_call(module, method)\n hostname = response.json()['hostname']['value']\n print(self.device + ' Device hostname is: ' + hostname)",
"def name(self):\n return self._device.device_data[self._uuid]['name']",
"def get_system_name(self):\n\n\t\treturn self.__system_name",
"def service_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"service_name\")",
"def service_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"service_name\")",
"def service_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"service_name\")",
"def name(self):\n return \"myhomeserver1_\" + self._light_id",
"def product_version(self):\n return self.get_version(self.ProdVerMS) + \".\" + self.get_version(self.ProdVerLS)",
"def get_sys_name(self):\n\t\treturn call_sdk_function('PrlVmDev_GetSysName', self.handle)",
"def name(self):\n return self.device.device_data[self.device_id]['name']",
"def get_server_uuid(self):\n\t\treturn call_sdk_function('PrlSrvInfo_GetServerUuid', self.handle)",
"def _getName(self):\n return self.id().split('.')[-2]",
"def name(self):\n return '{} {}'.format(self._device,\n self._endpoint)",
"def get_package_name(self):\n return self.name + '-' + self.version + '-' + self.release",
"def GetServerHost():\n return GetHostName(True)",
"def getElementName(self):\n return _libsbml.ListOfGeneProducts_getElementName(self)",
"def get_package_name(self):\n return self.name + '-' + self.version",
"def smb_server_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"smb_server_name\")",
"def server_hostname(self):\n result = c_char_p(self.lib.iperf_get_test_server_hostname(self._test)).value\n if result:\n self._server_hostname = result.decode('utf-8')\n else:\n self._server_hostname = None\n return self._server_hostname",
"def _build_product_name(self, username: str, target_title: str) -> Tuple[str, str]:\n assert username\n assert target_title\n # AS Products are named using the user and the session\n # (there's a 1:1 mapping to DM Projects)\n\n # The Product name characters are not restricted\n identifier: str = f'{username}::{target_title}'\n name: str = f'{_SQ2_NAME_PREFIX} {self.__CFG_SQUONK2_SLUG} {identifier}'\n return name[:_SQ2_MAX_NAME_LENGTH], name",
"def machine_name(self) -> str:\n return pulumi.get(self, \"machine_name\")",
"def machine_name(self) -> str:\n return pulumi.get(self, \"machine_name\")",
"def svn_client_ctx_t_client_name_get(svn_client_ctx_t_self): # real signature unknown; restored from __doc__\n return \"\"",
"def get_name(self):\n \n return 'TCP/IP Server'",
"def get_os_name(cls):\n return cls.get_os_type().name",
"def server_type_name(self):\n ...",
"def os_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"os_name\")",
"def server_id(self) -> str:\n return pulumi.get(self, \"server_id\")",
"def server_id(self) -> str:\n return pulumi.get(self, \"server_id\")",
"def get_name() -> str:\n pass",
"def name(self):\n return f\"{get_device_name(self._data, 0, self._name)}\"",
"def get_hostname(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetHostname', self.handle)",
"def name(self):\n return f\"{self.client_name} {self._name}\"",
"def name(self):\n return f\"{self.client_name} {self._name}\"",
"def server_site_name(self):\n return dsdb._samdb_server_site_name(self)",
"def get_host_name(self, wwpn):\n cmd = \"svcinfo lsfabric -wwpn=%s -delim :\" % (wwpn)\n output = self._svc_command(cmd)[0]\n\n if len(output) < 2:\n return None\n\n header = output[0].split(':')\n values = output[1].split(':')\n index = header.index(SVC_KEY_HOST_NAME)\n name = values[index]\n return name",
"def getName(self):\n return self._get_name( )",
"def device_name(self) -> Optional[str]:\n return pulumi.get(self, \"device_name\")",
"def device_name(self) -> Optional[str]:\n return pulumi.get(self, \"device_name\")"
] | [
"0.8168809",
"0.7893617",
"0.7663046",
"0.7437491",
"0.7431132",
"0.7421063",
"0.73451436",
"0.73016155",
"0.73016155",
"0.71450585",
"0.6981237",
"0.6889687",
"0.6856621",
"0.67716855",
"0.6767265",
"0.6752493",
"0.66959023",
"0.66839856",
"0.6644297",
"0.6551729",
"0.65425736",
"0.65107495",
"0.65107495",
"0.6476917",
"0.6455544",
"0.64409745",
"0.64409745",
"0.6405943",
"0.64051366",
"0.6371516",
"0.6361048",
"0.63590354",
"0.6327409",
"0.6315273",
"0.6277749",
"0.62732315",
"0.62677497",
"0.62588114",
"0.6252249",
"0.62517565",
"0.6232174",
"0.6229165",
"0.62251806",
"0.62066376",
"0.61729586",
"0.6171021",
"0.6168502",
"0.6168502",
"0.6168502",
"0.61672395",
"0.61398447",
"0.6125307",
"0.61213595",
"0.6119855",
"0.61177796",
"0.60971236",
"0.6090893",
"0.6077397",
"0.6077397",
"0.60757625",
"0.6074925",
"0.60722786",
"0.6065471",
"0.6049433",
"0.6047375",
"0.6047375",
"0.6047375",
"0.60467416",
"0.604647",
"0.6036838",
"0.603394",
"0.60190326",
"0.6006052",
"0.6005555",
"0.60041493",
"0.59984106",
"0.597612",
"0.5975078",
"0.5956936",
"0.59325653",
"0.5931689",
"0.5926879",
"0.5926879",
"0.59244215",
"0.5923517",
"0.59204197",
"0.591397",
"0.5912673",
"0.5909109",
"0.5909109",
"0.59057707",
"0.590111",
"0.58830816",
"0.5880047",
"0.5880047",
"0.58727247",
"0.5871888",
"0.5869566",
"0.58687913",
"0.58687913"
] | 0.7758751 | 2 |
Get the status of secure boot. | def get_secure_boot_mode(self):
system = self._get_host_details()
if ('links' not in system['Oem']['Hp'] or
'SecureBoot' not in system['Oem']['Hp']['links']):
msg = ('"SecureBoot" resource or feature is not supported'
' on this system')
raise exception.IloCommandNotSupportedError(msg)
secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']
# get the Secure Boot object
status, headers, secure_boot_settings = self._rest_get(secure_boot_uri)
if status >= 300:
msg = self._get_extended_error(secure_boot_settings)
raise exception.IloError(msg)
return secure_boot_settings['SecureBootCurrentState'] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_secure_boot_mode(self):\n sushy_system = self._get_sushy_system()\n try:\n secure_boot_enabled = GET_SECUREBOOT_CURRENT_BOOT_MAP.get(\n sushy_system.secure_boot.current_boot)\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish controller failed to provide '\n 'information about secure boot on the server. '\n 'Error: %(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexCommandNotSupportedError(msg)\n\n if secure_boot_enabled:\n LOG.debug(self._(\"Secure boot is Enabled\"))\n else:\n LOG.debug(self._(\"Secure boot is Disabled\"))\n return secure_boot_enabled",
"def get_secure_boot_state(self, task):\n return irmc_common.get_secure_boot_mode(task.node)",
"def enable_secure_boot(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_secure_boot\")",
"def status(self):\n ret = self.dev.ctrl_transfer(0xc0, 0x01, 0x0081, 0x0000, 0x0001)\n if ret[0] == 0xa0:\n return self.POWER_ON\n return self.POWER_OFF",
"def get_status(self):\n return self.o.read_register(self.dev_id, STATUS)",
"def get_bootvar(self):\n module = 'bootimage/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n bootdefault = response.json()['bootimage']['oper']['hd-default']\n print(self.device + ' The device is set to boot from: ' + bootdefault + ' in the future')\n return bootdefault",
"def get_status(self):\n return self.read_register(259, 0, 3)",
"def wait_boot(self) -> int:\n return self._data[ATTR_WAIT_BOOT]",
"def safe_boot_disabled(self):\n return self._safe_boot_disabled",
"def _check_status(self):\n self.system_status_lock.acquire()\n info = self.system_status_proxy._getvalue()\n self.system_status_lock.release()\n return info",
"def set_secure_boot_state(self, task, state):\n return irmc_common.set_secure_boot_mode(task.node, state)",
"def get_pending_boot_mode(self):\n headers, uri, bios_settings = self._check_bios_resource(['BootMode'])\n _, _, settings = self._get_bios_settings_resource(bios_settings)\n boot_mode = settings.get('BootMode')\n if boot_mode == 'LegacyBios':\n boot_mode = 'legacy'\n return boot_mode.upper()",
"async def get_status(self) -> str:\n return await self.hw_device.status()",
"def status(self):\n return self.microblaze.state",
"def __secure_boot(efivars_dir):\n enabled = False\n sboot = glob.glob(os.path.join(efivars_dir, \"SecureBoot-*/data\"))\n if len(sboot) == 1:\n # The minion is usually running as a privileged user, but is\n # not the case for the master. Seems that the master can also\n # pick the grains, and this file can only be readed by \"root\"\n try:\n with salt.utils.files.fopen(sboot[0], \"rb\") as fd:\n enabled = fd.read()[-1:] == b\"\\x01\"\n except PermissionError:\n pass\n return enabled",
"def get_status(self):\n if self.vm.get_cloud_status() != \"ACTIVE\":\n return \"stopped\"\n #wait for the vm to be ready and SSH-able\n self.vm.wait_ready()\n status = self.vm.run_command(\"ctool status\", indent=0, prefix='')\n return status.strip()",
"def is_bootable(self):\n return self.bootable_flag == 0x80",
"def get_boot_mode():\n boot_mode = 'Legacy'\n try:\n reg_key = winreg.OpenKey(\n winreg.HKEY_LOCAL_MACHINE, r'System\\CurrentControlSet\\Control')\n reg_value = winreg.QueryValueEx(reg_key, 'PEFirmwareType')[0]\n if reg_value == 2:\n boot_mode = 'UEFI'\n except:\n boot_mode = 'Unknown'\n\n return boot_mode",
"def _get_SS_State(self):\r\n try :\r\n state = win32gui.SystemParametersInfo(win32con.SPI_GETSCREENSAVEACTIVE)\r\n return state\r\n except:\r\n self.__error = True\r\n return False",
"def get_status(self):\n\n return self._system",
"def check_reboot_in_progress(con):\n k, v = con.kv.get(\"service/rebootmgr/reboot_in_progress\")\n if v and \"Value\" in v.keys() and v[\"Value\"]:\n return v[\"Value\"].decode()\n return False",
"def get_status():\n return ('off', 'off')",
"def status(self):\n self.scion_sh('status')",
"def status():\n\n\treturn libcrypto.RAND_status()",
"def get_one_time_boot(self):\n system = self._get_host_details()\n try:\n if system['Boot']['BootSourceOverrideEnabled'] == 'Once':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n else:\n # value returned by RIBCL if one-time boot setting are absent\n return 'Normal'\n\n except KeyError as e:\n msg = \"get_one_time_boot failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)",
"def get_host_power_status(self):\n sushy_system = self._get_sushy_system()\n return GET_POWER_STATE_MAP.get(sushy_system.power_state)",
"def status(self):\n if self.qemu.is_running():\n status = 0\n self.log.info(\"vm-status\", result=\"online\")\n for device in list(self.qemu.block_info().values()):\n self.log.info(\n \"disk-throttle\",\n device=device[\"device\"],\n iops=device[\"inserted\"][\"iops\"],\n )\n else:\n status = 1\n self.log.info(\"vm-status\", result=\"offline\")\n for volume in self.ceph.volumes:\n locker = volume.lock_status()\n self.log.info(\"rbd-status\", volume=volume.fullname, locker=locker)\n consul = locate_live_service(self.consul, \"qemu-\" + self.name)\n if consul:\n self.log.info(\n \"consul\", service=consul[\"Service\"], address=consul[\"Address\"]\n )\n else:\n self.log.info(\"consul\", service=\"<not registered>\")\n return status",
"def CheckBoot(self, instance):\n try:\n serial_out = self.GetSerialPortOutput(instance=instance, port=1)\n self.CheckBootFailure(serial_out, instance)\n return ((self.BOOT_COMPLETED_MSG in serial_out)\n or (self.BOOT_STARTED_MSG in serial_out))\n except errors.HttpError as e:\n if e.code == 400:\n logger.debug(\"CheckBoot: Instance is not ready yet %s\", str(e))\n return False\n raise",
"def __call__(self):\n status = self.os.popen('circusctl status monitor').read().strip()\n\n if status == 'active':\n return True\n elif status == 'stopped':\n return False",
"def boot(self):\n\t\tmesslen, received = self.socket.send('bootm\\r', 25)\t\t\n\t\treturn None",
"def get_ready_status():\n statusObj = get_server_status(fields=['ready'])\n return statusObj['ready']",
"def get_boot_mode(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def get_state(self):\n ret = self.send(\"?S\", recv=True)\n assert ret in \"WDR\"\n return ret",
"def getSystemAwake(self):\n print 'start of getSystemAwak() system_awake = {0}'.format(self.system_awake) # TESTING ++++++++++++++++\n try:\n self.db = shelve.open(os.path.join(self.xlocal, 'Launch Manager Utils\\\\launch.data'))\n if self.db['system_awake'] == False:\n print 'start of if true - getSystemAwak() system_awake = {0}'.format(self.system_awake) # TESTING ++++++++++++++++\n self.system_awake = self.db['system_awake']\n self.db.close()\n else:\n self.system_awake = True\n self.db['system_awake'] = self.system_awake\n self.db.close()\n \n print 'End of getSystemAwak() system_awake = {0}'.format(self.system_awake) # TESTING ++++++++++++++++\n \n except Exception, e:\n self.log_file.logEntry('{0}\\nUnable to load previous system_awake value, setting value to True'.format(e))\n self.system_awake = True",
"def get_provisioning_state(self):\n url = \"/api/v1/machine/{}\".format(self.machine_id)\n return self.urlhandler.get(url)",
"def __call__(self):\n status = self.os.popen('circusctl status validator').read().strip()\n\n if status == 'active':\n return True\n elif status == 'stopped':\n return False",
"def _get_system_status(self):\n sysinfo_strings = self._command(self.commands[\"SYSTEM_STATUS\"])\n sysinfo_dict = {\"name\": sysinfo_strings[0]}\n for line in sysinfo_strings:\n if \":\" in line:\n key, value = line.split(\":\", 1)\n sysinfo_dict[key.lower()] = value.strip()\n\n return sysinfo_dict",
"def system_state(self):\n byte = ord(self._serial_io(b'\\x49', 1))\n self._serial_ack = bool(byte & (1 << 4))\n self._serial_chk = bool(byte & (1 << 6))\n return byte",
"def status():\n xd = display.XKCDDisplayService()\n if xd.is_running():\n click.echo(click.style(\"xkcd service is running.\", fg=\"green\"))\n else:\n click.echo(click.style(\"xkcd service is stopped.\", fg=\"red\"))",
"def status():\n with spinner():\n is_enabled = is_witness_enabled()\n signing_key = current_signing_key()\n misses = total_missed()\n\n t = PrettyTable([\"Enabled\", \"Misses\", \"Key\"])\n t.align = \"l\"\n t.add_row([is_enabled, misses, signing_key])\n\n output(t, 'Status')\n output(get_config(), 'Configuration')",
"def get_host_power_status(self):\n\n data = self._get_host_details()\n return data['Power'].upper()",
"def get_device_state(self):\n\t\treturn call_sdk_function('PrlSrvCfgDev_GetDeviceState', self.handle)",
"def getStatus(self):\r\n return self.controller.getStatus()",
"def Get_BootStatusOnINT1_Enabled(self):\r\n return self.__readFromRegisterWithDictionaryMatch(self.__REG_RW_CTRL_REG3, self.__MASK_CTRL_REG3_I1_BOOT, self.__EnabledDict)",
"def is_host_on(self):\n status = False\n cmd = \"/usr/local/bin/wedge_power.sh status\"\n data = run_shell_cmd(cmd)\n Logger.info(\"[FSCD Testing] Executing cmd= [{}]\".format(cmd))\n Logger.info(\"[FSCD Testing] Received data= [{}]\".format(data))\n if \"on\" in data:\n status = True\n Logger.info(\"[FSCD Testing] userver power status {}\".format(status))\n return status",
"def cert_status(self) -> str:\n return pulumi.get(self, \"cert_status\")",
"def assess_status(self):\n if not self.configuration_complete():\n hookenv.status_set('blocked',\n 'Kerberos configuration incomplete')\n elif os_utils.is_unit_upgrading_set():\n hookenv.status_set('blocked',\n 'Ready for do-release-upgrade and reboot. '\n 'Set complete when finished.')\n else:\n hookenv.status_set('active',\n 'Unit is ready')",
"def get_current_boot_mode(self):\n boot_mode = self._get_bios_setting('BootMode')\n if boot_mode == 'LegacyBios':\n boot_mode = 'legacy'\n\n return boot_mode.upper()",
"def test_update_bios_boot_mode(self):\n pass",
"def get_power_state(self):\n\n doc = self.client.enumerate(uris.CIM_ComputerSystem)\n\n enabled_state = doc.find(\n './/s:Body/wsen:EnumerateResponse/wsman:Items/wsinst:CIM_HostComputerSystem/wsinst:EnabledState', wsman.NS_MAP_COMPUTER_SYSTEM)\n return constants._get_enabled_state(enabled_state.text)",
"def get_status(self):\n\t\treturn call_sdk_function('PrlLic_GetStatus', self.handle)",
"def get_status(self):\n if self.status:\n print(f\"Server '{self.server_name}' is online\")\n else:\n print(f\"Server '{self.server_name}' is offline\")",
"def get_running_ver(self):\n module = 'version/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n runningver = response.json()['version']['oper']['sw-version']\n currentpart = response.json()['version']['oper']['boot-from']\n print(self.device + ' The current running version is: ' + runningver)\n print(self.device + ' The device is currently booted from: ' + currentpart)\n return runningver",
"def protection_status(self) -> 'outputs.ProtectionStatusDetailsResponse':\n return pulumi.get(self, \"protection_status\")",
"def get_state(self):\n _, state, _, _, _, _, _ = win32serviceutil.QueryServiceStatus(self.name)\n return state",
"def readback_status(self):\n status = ctypes.c_int()\n\n result = self._lib.NRFJPROG_readback_status(ctypes.byref(status))\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)\n\n return ReadbackProtection(status.value).name",
"def firmware_version(self):\n return self._get_system_status()[\"firmware\"]",
"def get_system_state(self):\n byte = self.system_state\n return {\n 'chksum': bool(byte & (1 << 6)),\n 'ack': bool(byte & (1 << 4)),\n 'FPGAboot': bool(byte & (1 << 2)),\n 'FPGArun': bool(byte & (1 << 1)),\n 'FPGAcom': bool(byte & (1 << 0)),\n }",
"def state(self):\n return self.device.status(station=self.station_number)",
"def status(self):\n return self.readvar('\\x5F\\x95',0)",
"def get_firmware_update_status(self):\n\n response = self.execute_command(CMD_GET_FIRMWARE_UPDATE_STATUS)[0]\n inprogress = (response & 0x80) == 0x80\n return {\n \"inprogress\": inprogress,\n \"error\": response & 0x7f,\n }",
"def pilotIsBootValid (self):\n return self.isBootValid()",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version\"], None)",
"def get_state(self, state):\n status = [u'noState', u'poweredOn', u'blocked', u'suspended', \n u'poweredOff', u'poweredOff', u'crashed']\n return status[int(state)]",
"def get_boot_dev_count(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetBootDevCount', self.handle)",
"def status(self):\n self._refresh_state()\n return self._data.get('status')",
"def gateway_slb_status(self) -> str:\n return pulumi.get(self, \"gateway_slb_status\")",
"def get_service_status(self):\n return self.service.status()",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"cat /proc/uptime\", \"hostname\", \"show version\"], None, 'text')",
"def get_wearout_ssd_status():\n\t\tdisks = Disk.get_all_disk()\n\t\tssd_status = {}\n\t\tfor i in disks:\n\t\t\ttmp = i.get_wearout_status()\n\t\t\t# tmp[0] is dev_name, tmp[1] is wearout %\n\t\t\tif tmp is not None:\n\t\t\t\tssd_status[tmp[0]] = tmp[1]\n\t\tif len(ssd_status) == 0:\n\t\t\treturn None\n\t\treturn ssd_status",
"def getStatus():",
"def status(self):\n if self._vsanobj.id is None:\n raise VsanNotPresent(\"Vsan \" + str(self._vsanobj._id) + \" is not present on the switch.\")\n out = self.__show_zone_status()\n return out.get(get_key(zonekeys.STATUS, self._SW_VER), None)",
"def check_device_state(self):",
"def checkstatus(self):\n # define cross-platform /dev/null\n devnull = open(os.devnull, 'w')\n\n # if the OS is windows\n if os.name == 'nt':\n ping = ['ping', '-n', '1', self.device]\n\n # if the OS is posix\n else:\n ping = ['ping', '-c', '1', self.device]\n\n print(self.device + ' Checking for device availability', end='', flush=True)\n time.sleep(5)\n count = 0\n while count < 2:\n print('.', end='', flush=True)\n ping_call = subprocess.Popen(ping, stdout=devnull)\n returncode = ping_call.wait()\n if returncode == 0:\n break\n time.sleep(1)\n count = count + 1\n\n print('')\n if count == 2:\n print(self.device + ' Device is not up')\n print(self.device + ' Exiting...')\n return 'FAIL'\n else:\n print(self.device + ' Device is Online')\n print(self.device + ' Please wait for script initialization')\n time.sleep(5)",
"def getstatus(self):\n return self.__status",
"def status(self):\n\n client = self.connect()\n if not client.sys.is_initialized():\n print(\"Vault is not initialized\")\n return\n else:\n print(\"Vault is initialized\")\n\n if client.sys.is_sealed():\n print(\"Vault is sealed\")\n print(client.seal_status)\n return\n else:\n print(\"Vault is unsealed\")\n\n # read in the Vault access token\n client = self.connect(VAULT_TOKEN)\n print()\n print(\"Key Status\")\n print(json.dumps(client.key_status))\n\n print()\n print(\"HA Status\")\n print(json.dumps(client.ha_status))\n\n print()\n print(\"Secret Backends\")\n print(json.dumps(client.sys.list_mounted_secrets_engines(), indent=True))\n\n print()\n print(\"Policies\")\n print(json.dumps(client.sys.list_policies()))\n\n print()\n print(\"Audit Backends\")\n print(json.dumps(client.sys.list_enabled_audit_devices(), indent=True))\n\n print()\n print(\"Auth Backends\")\n print(json.dumps(client.sys.list_auth_methods(), indent=True))",
"def status(self,c,ADDR):\r\n if self.device_detected == True:\r\n resp = yield subprocess.check_output(\"cacli STS \" + str(ADDR))\r\n #print resp\r\n else:\r\n resp = \"Device not connected.\"\r\n print \"Device not connected. \"\r\n returnValue(resp)",
"async def async_get_pss_state(self, capability: str) -> bool | None:\n response: dict[str, Any] = await self.aiolivisi.async_get_device_state(\n capability[1:]\n )\n if response is None:\n return None\n on_state = response[\"onState\"]\n return on_state[\"value\"]",
"def get_config_sync_status(self):\n \n try:\n device_group = self.connection.Management.DeviceGroup.get_list()\n print self.connection.Management.DeviceGroup.get_sync_status([device_group])\n \n except:\n raise Exception(\"Target system has pending configuration, please sync beforehand.\")",
"def get_status(self):\n return super(Cabling, self).get_status()",
"def test_get_bios_boot_mode_list(self):\n pass",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show system uptime|display json\",\n \"show version\"], None, 'mixed')",
"def test_get_bios_boot_mode_by_moid(self):\n pass",
"def read_status(ctl):\n\tr = ctl.bus_read_struct_coherent(tm.status_addr, 'BBBBI')\n\treturn r",
"def _do_get_status(self):\n logging.info(__name__ + ' : Get status of the device.')\n result = self._execute('X')\n usage = {\n 0: \"Channel not in use\",\n 1: \"Channel used for Nitrogen level\",\n 2: \"Channel used for Helium Level (Normal pulsed operation)\",\n 3: \"Channel used for Helium Level (Continuous measurement)\",\n 9: \"Error on channel (Usually means probe unplugged)\"\n }\n # current_flowing = {\n # 0 : \"Curent not flowing in Helium Probe Wire\",\n # 1 : \"Curent not flowing in Helium Probe Wire\"\n # }\n # auto_fill_status = {\n # 00 : \"End Fill (Level > FULL)\",\n # 01 : \"Not Filling (Level < FULL, Level > FILL)\",\n # 10 : \"Filling (Level < FULL, Level > FILL)\",\n # 11 : \"Start Filling (Level < FILL)\"\n # }\n return usage.get(int(result[1]), \"Unknown\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")"
] | [
"0.7545377",
"0.73338354",
"0.68596",
"0.6580167",
"0.6284584",
"0.627462",
"0.62408715",
"0.6210762",
"0.6183254",
"0.61830187",
"0.61508465",
"0.61405003",
"0.61379737",
"0.6085388",
"0.6051852",
"0.6020663",
"0.6020096",
"0.6004745",
"0.59862554",
"0.5939429",
"0.58935285",
"0.5873183",
"0.5869869",
"0.58692807",
"0.58469284",
"0.58386815",
"0.58363783",
"0.58091223",
"0.58007264",
"0.57963437",
"0.57942253",
"0.57795537",
"0.5758852",
"0.572227",
"0.5690995",
"0.5681317",
"0.567362",
"0.5658561",
"0.56572187",
"0.5634567",
"0.56263095",
"0.5607501",
"0.56041217",
"0.55982625",
"0.5593835",
"0.5592564",
"0.5584376",
"0.5584316",
"0.55793417",
"0.55730146",
"0.55700475",
"0.5554793",
"0.5544095",
"0.5536457",
"0.55323344",
"0.55311906",
"0.5527211",
"0.5518555",
"0.55101085",
"0.55082077",
"0.5506974",
"0.5496191",
"0.5490976",
"0.548832",
"0.5478018",
"0.5468756",
"0.54685926",
"0.5462292",
"0.54587346",
"0.5458276",
"0.54444414",
"0.5434923",
"0.54320747",
"0.54319286",
"0.54263264",
"0.5417582",
"0.5414458",
"0.5408113",
"0.540731",
"0.54058164",
"0.5399947",
"0.53955275",
"0.5389502",
"0.53853476",
"0.53833354",
"0.53721434",
"0.53721434",
"0.53721434",
"0.53721434",
"0.53721434",
"0.53721434",
"0.53721434",
"0.53721434",
"0.53721434",
"0.53721434",
"0.53721434",
"0.53721434",
"0.53721434",
"0.53721434",
"0.53721434"
] | 0.7864809 | 0 |
Enable/Disable secure boot on the server. | def set_secure_boot_mode(self, secure_boot_enable):
if self._is_boot_mode_uefi():
self._change_secure_boot_settings('SecureBootEnable',
secure_boot_enable)
else:
msg = ('System is not in UEFI boot mode. "SecureBoot" related '
'resources cannot be changed.')
raise exception.IloCommandNotSupportedInBiosError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def enable_secure_boot(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_secure_boot\")",
"def set_secure_boot_mode(self, secure_boot_enable):\n sushy_system = self._get_sushy_system()\n try:\n sushy_system.secure_boot.enable_secure_boot(secure_boot_enable)\n except exception.InvalidInputError as e:\n msg = (self._('Invalid input. Error %(error)s')\n % {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish controller failed to set secure '\n 'boot settings on the server. Error: %(error)s')\n % {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def get_secure_boot_mode(self):\n sushy_system = self._get_sushy_system()\n try:\n secure_boot_enabled = GET_SECUREBOOT_CURRENT_BOOT_MAP.get(\n sushy_system.secure_boot.current_boot)\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish controller failed to provide '\n 'information about secure boot on the server. '\n 'Error: %(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexCommandNotSupportedError(msg)\n\n if secure_boot_enabled:\n LOG.debug(self._(\"Secure boot is Enabled\"))\n else:\n LOG.debug(self._(\"Secure boot is Disabled\"))\n return secure_boot_enabled",
"def set_secure_boot_state(self, task, state):\n return irmc_common.set_secure_boot_mode(task.node, state)",
"def __secure_boot(efivars_dir):\n enabled = False\n sboot = glob.glob(os.path.join(efivars_dir, \"SecureBoot-*/data\"))\n if len(sboot) == 1:\n # The minion is usually running as a privileged user, but is\n # not the case for the master. Seems that the master can also\n # pick the grains, and this file can only be readed by \"root\"\n try:\n with salt.utils.files.fopen(sboot[0], \"rb\") as fd:\n enabled = fd.read()[-1:] == b\"\\x01\"\n except PermissionError:\n pass\n return enabled",
"def safe_boot_disabled(self, safe_boot_disabled):\n\n self._safe_boot_disabled = safe_boot_disabled",
"def reset_secure_boot_keys(self):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('ResetToDefaultKeys', True)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def start_salt():\n with fabric_settings(warn_only=True):\n if env.host == env.master_server.public_ip:\n sudo(\"systemctl start salt-master\")\n time.sleep(3)\n sudo(\"systemctl start salt-minion\")",
"async def enable_protection(self) -> None:\n await self._request(\n \"dns_config\", method=\"POST\", json_data={\"protection_enabled\": True},\n )",
"def _change_secure_boot_settings(self, property, value):\n system = self._get_host_details()\n # find the BIOS URI\n if ('links' not in system['Oem']['Hp'] or\n 'SecureBoot' not in system['Oem']['Hp']['links']):\n msg = (' \"SecureBoot\" resource or feature is not '\n 'supported on this system')\n raise exception.IloCommandNotSupportedError(msg)\n\n secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']\n\n # Change the property required\n new_secure_boot_settings = {}\n new_secure_boot_settings[property] = value\n\n # perform the patch\n status, headers, response = self._rest_patch(\n secure_boot_uri, None, new_secure_boot_settings)\n\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)\n\n # Change the bios setting as a workaround to enable secure boot\n # Can be removed when fixed for Gen9 snap2\n val = self._get_bios_setting('CustomPostMessage')\n val = val.rstrip() if val.endswith(\" \") else val+\" \"\n self._change_bios_setting({'CustomPostMessage': val})",
"def lockdown_procedure():\n\tprint(\"----------\")\n\tprint_section_header(\"LOCKDOWN\", Fore.BLUE)\n\tprint_confirmation(\"Set secure configuration without user interaction.\")\n\n\t# Get sudo priv\n\tsp.run(\"sudo -E -v\", shell=True, stdout=sp.PIPE)\n\n\t####\n\t# FIREWALL\n\t####\n\n\tsp.run(['sudo', 'launchctl', 'load', '/System/Library/LaunchDaemons/com.apple.alf.agent.plist'], stdout=sp.PIPE)\n\tsp.run(['sudo', 'launchctl', 'load', '/System/Library/LaunchAgents/com.apple.alf.useragent.plist'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setglobalstate', 'on'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setloggingmode', 'on'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setstealthmode', 'on'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsigned', 'off'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsignedapp', 'off'], stdout=sp.PIPE)\n\tsp.run(['sudo', 'pkill', '-HUP', 'socketfilterfw'], stdout=sp.PIPE)\n\n\t####\n\t# SYSTEM PROTECTION\n\t####\n\n\tsp.run('sudo spctl --master-enable', shell=True, stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsigned', 'off'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsignedapp', 'off'], stdout=sp.PIPE)\n\tsp.run(['sudo defaults write /Library/Preferences/SystemConfiguration/com.apple.captive.control Active -bool false'], stdout=sp.PIPE)\n\n\t####\n\t# METADATA STORAGE\n\t####\n\n\tsp.run(['rm', '-rfv', '\"~/Library/LanguageModeling/*\"', '\"~/Library/Spelling/*\"', '\"~/Library/Suggestions/*\"'])\n\tsp.run(['rm', '-rfv', '\"~/Library/Application Support/Quick Look/*\"'], stdout=sp.PIPE)\n\tsp.run([':>~/Library/Preferences/com.apple.LaunchServices.QuarantineEventsV2'], shell=True, stdout=sp.PIPE)\n\n\t####\n\t# USER SAFETY\n\t####\n\n\tsp.run(['defaults', 'write', 'com.apple.screensaver', 'askForPassword', '-int', '1'], stdout=sp.PIPE)\n\tsp.run(['defaults', 'write', 'com.apple.screensaver', 'askForPasswordDelay', '-int', '0'], stdout=sp.PIPE)\n\tsp.run(['defaults', 'write', 'NSGlobalDomain', 'AppleShowAllExtensions', '-bool', 'true'], stdout=sp.PIPE)\n\tsp.run(['defaults', 'write', 'NSGlobalDomain', 'NSDocumentSaveNewDocumentsToCloud', '-bool', 'false'], stdout=sp.PIPE)\n\tsp.run(['defaults', 'write', 'com.apple.finder', 'AppleShowAllFiles', '-boolean', 'true'], shell=True, stdout=sp.PIPE)\n\tsp.run(['killAll', 'Finder'], stdout=sp.PIPE)\n\n\t####\n\t# RESTART\n\t####\n\n\tfinal_configuration()",
"def safe_boot_disabled(self):\n return self._safe_boot_disabled",
"def enable_server(self, server):\n log.info(\"Enabling %s in netscaler\", server)\n return self.post(\"server?action=enable\", {\"server\": {\"name\": server}}, content_type=self.content_type(\"server\"))",
"def set_enable(self, pwd, type='secret'):\n\n if type == 'secret':\n cmd = 'enable secret %s' %(pwd)\n else:\n cmd = 'enable password %s' %(pwd)\n\n output = self.iosapi.bcp_send_config_command(self.iosapi.netmiko_session, cmd)\n self.iosapi.bcp_log(\"info\", \"(%s) set_enable : Attempting to set enable\" %(__name__))\n return(output)",
"def enable_sensor_power():\n sen = digital.SensorPower(\"senpwr\") \n sen.set()",
"def get_secure_boot_mode(self):\n system = self._get_host_details()\n\n if ('links' not in system['Oem']['Hp'] or\n 'SecureBoot' not in system['Oem']['Hp']['links']):\n msg = ('\"SecureBoot\" resource or feature is not supported'\n ' on this system')\n raise exception.IloCommandNotSupportedError(msg)\n\n secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']\n\n # get the Secure Boot object\n status, headers, secure_boot_settings = self._rest_get(secure_boot_uri)\n\n if status >= 300:\n msg = self._get_extended_error(secure_boot_settings)\n raise exception.IloError(msg)\n\n return secure_boot_settings['SecureBootCurrentState']",
"def system_protection_config():\n\n\tprint_section_header(\"GENERAL SYSTEM PROTECTION\", Fore.BLUE)\n\n\t# Enable Gatekeeper\n\tif prompt_yes_no(top_line=\"-> Enable Gatekeeper?\",\n\t bottom_line=\"Defend against malware by enforcing code signing and verifying downloaded applications before letting them to run.\"):\n\t\tprint_confirmation(\"Enabling Gatekeeper...\")\n\t\tsp.run('sudo spctl --master-enable', shell=True, stdout=sp.PIPE)\n\t\tsp.run('sudo spctl --enable --label \"Developer ID\"', shell=True, stdout=sp.PIPE)\n\n\t# Disable automatic software whitelisting\n\tif prompt_yes_no(top_line=\"-> Prevent automatic software whitelisting?\",\n\t bottom_line=\"Both built-in and downloaded software will require user approval for whitelisting.\"):\n\t\tprint_confirmation(\"Preventing automatic whitelisting...\")\n\t\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsigned', 'off'], stdout=sp.PIPE)\n\t\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsignedapp', 'off'], stdout=sp.PIPE)\n\n\t# Captive Portal\n\tif prompt_yes_no(top_line=\"-> Disable Captive Portal Assistant and force login through browser on untrusted networks?\",\n\t bottom_line=\"Captive Portal could be triggered and direct you to a malicious site WITHOUT any user interaction.\"):\n\t\tprint_confirmation(\"Disabling Captive Portal Assistant...\")\n\t\tsp.run(['sudo', 'defaults', 'write', '/Library/Preferences/SystemConfiguration/com.apple.captive.control', 'Active', '-bool', 'false'], stdout=sp.PIPE)",
"def enable(self) -> Awaitable[Dict]:\n return self.client.send(\"Security.enable\", {})",
"async def disable_protection(self) -> None:\n await self._request(\n \"dns_config\", method=\"POST\", json_data={\"protection_enabled\": False},\n )",
"def boot(self):\n\n pass",
"def set_spow_enable(enable: bool):\n\n global _SPOW_ENABLED # noqa: PLW0603\n\n _SPOW_ENABLED = enable",
"def server_activate(self):\n\t\tpass",
"def enable(ctx):\n\n config_db = ConfigDBConnector()\n config_db.connect()\n config_db.mod_entry(\"NAT_GLOBAL\", \"Values\", {\"admin_mode\": \"enabled\"})",
"def enable_secure_simple_pairing(self):\n logging.info(\"Cert: Sending WRITE_SIMPLE_PAIRING_MODE [True]\")\n self._enqueue_hci_command(hci_packets.WriteSimplePairingModeBuilder(hci_packets.Enable.ENABLED), True)\n logging.info(\"Cert: Waiting for controller response\")\n assertThat(self._hci_event_stream).emits(lambda msg: b'\\x0e\\x04\\x01\\x56\\x0c' in msg.event)",
"def stop_salt():\n with fabric_settings(warn_only=True):\n if env.host == env.master_server.public_ip:\n sudo(\"systemctl stop salt-master\")\n sudo(\"systemctl stop salt-minion\")",
"def reboot_fpga(self):\n log.info(\"Booting FPGA from SPI prom\")\n self.set(\"FPGA_CTRL\", \"boot_fpga\", 1);",
"def libc_prctl_set_securebits():\n # straight from man capabilities(7):\n # \"An application can use the following call to lock itself, and all of\n # its descendants, into an environment where the only way of gaining\n # capabilities is by executing a program with associated file capabilities\"\n _call_c_style(\n libc,\n \"prctl\",\n PR_SET_SECUREBITS,\n (\n SECBIT_KEEP_CAPS_LOCKED\n | SECBIT_NO_SETUID_FIXUP\n | SECBIT_NO_SETUID_FIXUP_LOCKED\n | SECBIT_NOROOT\n | SECBIT_NOROOT_LOCKED\n ),\n 0,\n 0,\n 0,\n )",
"def disable_server(self, server):\n log.info(\"Disabling %s in netscaler\", server)\n return self.post(\"server?action=disable\", {\"server\": {\"name\": server}}, content_type=self.content_type(\"server\"))",
"def do_security_setup(run_as_user, branch, base_path, dist_path, enable=True):\n \n if not enable:\n #disable security setup if enabled\n runcmd(\"apt-get -y remove unattended-upgrades fail2ban psad rkhunter chkrootkit logwatch apparmor auditd iwatch\")\n return\n \n #modify host.conf\n modify_config(r'^nospoof on$', 'nospoof on', '/etc/host.conf')\n \n #enable automatic security updates\n runcmd(\"apt-get -y install unattended-upgrades\")\n runcmd('''bash -c \"echo -e 'APT::Periodic::Update-Package-Lists \"1\";\\nAPT::Periodic::Unattended-Upgrade \"1\";' > /etc/apt/apt.conf.d/20auto-upgrades\" ''')\n runcmd(\"dpkg-reconfigure -fnoninteractive -plow unattended-upgrades\")\n \n #sysctl\n runcmd(\"install -m 0644 -o root -g root -D %s/linux/other/sysctl_rules.conf /etc/sysctl.d/60-tweaks.conf\" % dist_path)\n\n #set up fail2ban\n runcmd(\"apt-get -y install fail2ban\")\n runcmd(\"install -m 0644 -o root -g root -D %s/linux/other/fail2ban.jail.conf /etc/fail2ban/jail.d/counterblock.conf\" % dist_path)\n runcmd(\"service fail2ban restart\")\n \n #set up psad\n runcmd(\"apt-get -y install psad\")\n modify_config(r'^ENABLE_AUTO_IDS\\s+?N;$', 'ENABLE_AUTO_IDS\\tY;', '/etc/psad/psad.conf')\n modify_config(r'^ENABLE_AUTO_IDS_EMAILS\\s+?Y;$', 'ENABLE_AUTO_IDS_EMAILS\\tN;', '/etc/psad/psad.conf')\n for f in ['/etc/ufw/before.rules', '/etc/ufw/before6.rules']:\n modify_config(r'^# End required lines.*?# allow all on loopback$',\n '# End required lines\\n\\n#CUSTOM: for psad\\n-A INPUT -j LOG\\n-A FORWARD -j LOG\\n\\n# allow all on loopback',\n f, dotall=True)\n runcmd(\"psad -R && psad --sig-update\")\n runcmd(\"service ufw restart\")\n runcmd(\"service psad restart\")\n \n #set up chkrootkit, rkhunter\n runcmd(\"apt-get -y install rkhunter chkrootkit\")\n runcmd('bash -c \"rkhunter --update; exit 0\"')\n runcmd(\"rkhunter --propupd\")\n runcmd('bash -c \"rkhunter --check --sk; exit 0\"')\n runcmd(\"rkhunter --propupd\")\n \n #logwatch\n runcmd(\"apt-get -y install logwatch libdate-manip-perl\")\n \n #apparmor\n runcmd(\"apt-get -y install apparmor apparmor-profiles\")\n \n #auditd\n #note that auditd will need a reboot to fully apply the rules, due to it operating in \"immutable mode\" by default\n runcmd(\"apt-get -y install auditd audispd-plugins\")\n runcmd(\"install -m 0640 -o root -g root -D %s/linux/other/audit.rules /etc/audit/rules.d/counterblock.rules\" % dist_path)\n modify_config(r'^USE_AUGENRULES=.*?$', 'USE_AUGENRULES=\"yes\"', '/etc/default/auditd')\n runcmd(\"service auditd restart\")\n\n #iwatch\n runcmd(\"apt-get -y install iwatch\")\n modify_config(r'^START_DAEMON=.*?$', 'START_DAEMON=true', '/etc/default/iwatch')\n runcmd(\"install -m 0644 -o root -g root -D %s/linux/other/iwatch.xml /etc/iwatch/iwatch.xml\" % dist_path)\n modify_config(r'guard email=\"root@localhost\"', 'guard email=\"noreply@%s\"' % socket.gethostname(), '/etc/iwatch/iwatch.xml')\n runcmd(\"service iwatch restart\")",
"def _DisableRootFsVerification(self):\n # 2 and 4 are the kernel partitions.\n for partition in [2, 4]:\n self.RunCmdOnDevice(['/usr/share/vboot/bin/make_dev_ssd.sh',\n '--partitions', str(partition),\n '--remove_rootfs_verification', '--force'])\n\n # Restart, wait a bit, and re-establish the SSH master connection.\n # We need to close the connection gracefully, then run the shutdown command\n # without using a master connection. port_forward=True bypasses the master\n # connection.\n self.CloseConnection()\n self.RunCmdOnDevice(['reboot'], port_forward=True)\n time.sleep(30)\n self.OpenConnection()",
"def set_power(sid):\n # Resolve the passed parameters if any\n timer = None\n os = None\n if request.json:\n if timer in request.json:\n timer = request.json.get('timer')\n if os in request.json:\n os = request.json.get('os')\n db = sqlite3.connect('/home/tropius/TROPIUS/TROPIUS.db')\n state = hosts.get(db, sid)['state']\n \n if state == 'on':\n # The host is on -- turn it off\n # TODO make a unix shell util file\n # TODO make a windows util file\n return\n elif state == 'off':\n # The host is off -- turn it on\n if timer is not None:\n sleep(timer)\n netutil.wake_on_lan(db, sid)\n ret = {'power': {'state': 'on'}}\n return jsonify(ret)\n # TODO find a keyboard driver and implement OS parameter",
"def autodisable_cloud(cloud):\n log.warning(\"Autodisabling %s\", cloud)\n cloud.ctl.disable()\n title = \"Cloud %s has been automatically disabled\" % cloud.name\n message = \"%s after multiple failures to connect to it.\" % title\n notify_user(cloud.owner, title=title, message=message, email_notify=True)",
"def check_secure():\n return get_config_handler().check_secure()",
"def __init__(__self__, *,\n enable_integrity_monitoring: Optional[pulumi.Input[bool]] = None,\n enable_secure_boot: Optional[pulumi.Input[bool]] = None):\n if enable_integrity_monitoring is not None:\n pulumi.set(__self__, \"enable_integrity_monitoring\", enable_integrity_monitoring)\n if enable_secure_boot is not None:\n pulumi.set(__self__, \"enable_secure_boot\", enable_secure_boot)",
"async def _hardcore_setheist(self, ctx):\r\n guild = ctx.guild\r\n config = await self.thief.get_guild_settings(guild)\r\n\r\n if config[\"Hardcore\"]:\r\n config[\"Hardcore\"] = False\r\n msg = \"Hardcore mode now OFF.\"\r\n else:\r\n config[\"Hardcore\"] = True\r\n msg = \"Hardcore mode now ON! **Warning** death will result in credit **and chip wipe**.\"\r\n await self.thief.config.guild(guild).Config.set(config)\r\n await ctx.send(msg)",
"def _turn_on_dev_mode(self):\n if self._device is not None:\n self._char_write(self._BLE_SERVICE_ANTI_DOS,\n [ord(c) for c in self._ANTI_DOS_MESSAGE])\n self._char_write(self._BLE_SERVICE_TX_POWER,\n [self._TX_POWER_VALUE])\n # Sending 0x01 to the wake service wakes the sphero.\n self._char_write(self._BLE_SERVICE_WAKE, [0x01])",
"def activate(self):\n if not self._env.enable_registration:\n return\n legacy_key = '{}:{}'.format(self._env.flask_host, self._env.flask_port)\n self._key = self._env.get('my_ident', legacy_key, 'microservice')\n LoopingCall(self.ping).start(5, now=False)",
"def get_secure_boot_state(self, task):\n return irmc_common.get_secure_boot_mode(task.node)",
"def enable(self):\n # Netmiko reports enable and config mode as being enabled\n if not self.native.check_enable_mode():\n self.native.enable()\n # Ensure device is not in config mode\n if self.native.check_config_mode():\n self.native.exit_config_mode()\n\n log.debug(\"Host %s: Device enabled.\", self.host)",
"def enabled(config):\n enable(config)\n reload_service('apache2')",
"def disable(ctx):\n config_db = ConfigDBConnector()\n config_db.connect()\n config_db.mod_entry(\"NAT_GLOBAL\", \"Values\", {\"admin_mode\": \"disabled\"})",
"def bootloader() -> NoReturn:",
"def sstcp_enabled():\n return common.POWER_CAP in SYSTEM_CAPS",
"def set_protection_enabled(self, c, state):\n self.enable_protection = state",
"def firewallOff():\n pass",
"def force_switch_on(self):\n self.turn_on_modem()",
"def boot(self):\n\t\tmesslen, received = self.socket.send('bootm\\r', 25)\t\t\n\t\treturn None",
"def shutdown(self):\n self.disable_modulation()\n self.disable()\n super().shutdown()",
"def set_management_https(enabled=True, deploy=False):\n\n if enabled is True:\n value = \"no\"\n elif enabled is False:\n value = \"yes\"\n else:\n raise CommandExecutionError(\n \"Invalid option provided for service enabled option.\"\n )\n\n ret = {}\n\n query = {\n \"type\": \"config\",\n \"action\": \"set\",\n \"xpath\": \"/config/devices/entry[@name='localhost.localdomain']/deviceconfig/system/service\",\n \"element\": \"<disable-https>{}</disable-https>\".format(value),\n }\n\n ret.update(__proxy__[\"panos.call\"](query))\n\n if deploy is True:\n ret.update(commit())\n\n return ret",
"def enable(self, sid):\n return",
"def scp_disable(task):\n cmd = \"no ip scp server enable\"\n task.run(task=netmiko_send_config, config_commands=cmd)\n task.run(task=netmiko_save_config)\n c_print(f\"*** {task.host}: SCP has been disabled ***\")",
"def boot(request, server_ids, server_id):\n try:\n if int(server_id) not in server_ids:\n raise Exception(\"Forbidden: specified Server does not belong to specified Service.\")\n\n server = Server.objects.get(pk=server_id) \n\n pysph = Vsphere(settings.VMWARE[\"address\"], settings.VMWARE[\"username\"], settings.VMWARE[\"password\"], server.sid)\n result = pysph.boot()\n\n if result:\n ActionLogger().log(request.user, \"modified\", \"Booted\", \"vServer %s\" % server.sid)\n return format_ajax_response(True, \"Server booted successfully.\")\n else:\n raise Exception(\"Pysphere's boot() returned False.\")\n except Exception as ex:\n logger.error(\"Failed to boot: %s\" % ex)\n return format_ajax_response(False, \"There was a error booting the server.\")",
"def boot():\r\n print \"\"\"\r\n ###### ## ## ### ## ## ## ## ######## ########\r\n ## ## ## ## ## ## ### ## ## ## ## ## ##\r\n ## #### ## ## #### ## ## ## ## ## ##\r\n ## ## ## ## ## ## ## ## ## ######## ######\r\n ## ## ######### ## #### ## ## ## ## ##\r\n ## ## ## ## ## ## ### ## ## ## ## ##\r\n ###### ## ## ## ## ## ####### ## ## ########\r\n\r\n Version %s-%s\r\n\r\n Multi Purpose Artificial Inelegance Program\r\n Copyright (c) Alexandre Gauthier 2010-2011\r\n All Rights Reserved\r\n \"\"\" % ( constants.VERSION, constants.TAGNAME )\r\n\r\n # Initialize log\r\n # TODO: The values should be read from config file.\r\n log.init_log('cyanure.log', 'DEBUG')\r\n\r\n logger.info(\"Cyanure system init: Version %s (%s)\" % (\r\n constants.VERSION, constants.TAGNAME ))",
"def run():\r\n autostartup()\r\n\r\n if settings.FEATURES.get('USE_CUSTOM_THEME', False):\r\n enable_theme()\r\n\r\n if settings.FEATURES.get('USE_MICROSITES', False):\r\n enable_microsites()\r\n\r\n if settings.FEATURES.get('ENABLE_THIRD_PARTY_AUTH', False):\r\n enable_third_party_auth()",
"def scp_enable(task):\n cmd = \"ip scp server enable\"\n task.run(task=netmiko_send_config, config_commands=cmd)\n c_print(f\"*** {task.host}: SCP has been enabled ***\")",
"def enable_snat(self) -> bool:\n return pulumi.get(self, \"enable_snat\")",
"def maintainance(self, on_off, instance_type):\n print((\"enabling\" if on_off else \"disabling\") + \" Maintainer mode\")\n tries = 60\n while True:\n reply = self.send_request(\n instance_type,\n requests.put,\n \"/_admin/cluster/maintenance\",\n '\"on\"' if on_off else '\"off\"',\n )\n if len(reply) > 0:\n print(\"Reply: \" + str(reply[0].text))\n if reply[0].status_code == 200:\n return\n print(f\"Reply status code is {reply[0].status_code}. Sleeping for 3 s.\")\n time.sleep(3)\n tries -= 1\n else:\n print(\"Reply is empty. Sleeping for 3 s.\")\n time.sleep(3)\n tries -= 1\n if tries <= 0:\n action = \"enable\" if on_off else \"disable\"\n raise Exception(f\"Couldn't {action} maintainance mode!\")",
"def use_spi():\n _LIB.oled_click_use_spi()",
"def disable_pki(client, mount_point=\"pki\"):\n client.sys.disable_secrets_engine(mount_point)",
"def boot_config():\n # quick check to grab a config file from /boot partition.\n # this function helps users who cannot SSH/access the Pi,\n # but can access the microSD card\n if os.path.exists(BOOT_CONFIG_PATH):\n print(\"Configuration loaded from /boot directory.\")\n with open(BOOT_CONFIG_PATH) as boot_file:\n with open(CONFIG_FILE_PATH, 'w+') as config_file:\n for line in boot_file:\n config_file.write(line)",
"def start_secure_mqtt_server(run_event):\n print('START SECURE MQTT SERVER')\n cmd = ['mosquitto', '-v', '-c', '/etc/mosquitto/mosquitto-ssl.conf']\n process = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=False)\n while run_event.is_set():\n time.sleep(1)\n process.terminate()\n process.wait()",
"def enable_ultra_ssd(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_ultra_ssd\")",
"def unconfigure_enable_password(device,secret=True,privilege=None):\n cmd=\"no enable\"\n if secret :\n cmd+=\" secret\"\n else :\n cmd+=\" password\"\n if privilege :\n cmd+=f\" level {privilege}\"\n\n try:\n device.configure(cmd)\n except SubCommandFailure as e:\n raise SubCommandFailure(\n f'Could not unconfigure enable password or secret:\\n{e}'\n )",
"def setprivileged(miner: Miner, login, allowsetting):\n commands = get_changeconfigcommands(getminerfilename(miner), 'api-allow', allowsetting)\n sendcommands_and_restart(miner, login, commands)",
"def do(self):\n call_command('activate-ssl')",
"def disabled(config):\n disable(config)\n reload_service('apache2')",
"def setLoopback(self, enable): \n if enable == True:\n DPxEnableDoutDinLoopback()\n else:\n DPxDisableDoutDinLoopback()",
"def enable():\n if not _status_apf():\n return __apf_cmd(\"-s\")",
"def configure_global_dual_active_recovery_reload_disable(device):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Disables dual-active recovery-reload\n command_list = ['stackwise-virtual']\n command_list.append(f'dual-active recovery-reload-disable')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to Enable global stackwise-virtual dual-active recovery-reload')\n return output",
"def changeBluetoothService(enable=True):\n \n #blueServiceStatus = os.popen('systemctl status bluetooth.service').read()\n ServStatStdout = execCommand('systemctl status bluetooth.service')\n \n if enable:\n if not 'active (running)' in ServStatStdout:\n checkRoot()\n #blueServiceStatus = os.popen('sudo systemctl start bluetooth.service').read()\n blueServStartStdout = execCommand('sudo systemctl start bluetooth.service')\n return\n \n if not enable:\n if not 'inactive (dead)' in ServStatStdout:\n checkRoot()\n #blueServiceStatus = os.popen('sudo systemctl stop bluetooth.service').read()\n blueServStopStdout = execCommand('sudo systemctl stop bluetooth.service')\n return",
"def _doEnableRegulation(self):\n self._cmdRegulOn()",
"def setup_salt():\n server = [s for s in env.bootmachine_servers if s.public_ip == env.host][0]\n\n if env.host == env.master_server.public_ip:\n run(\"touch /etc/salt/master\")\n append(\"/etc/salt/master\", \"file_roots:\\n base:\\n - {0}\".format(\n settings.REMOTE_STATES_DIR))\n append(\"/etc/salt/master\", \"pillar_roots:\\n base:\\n - {0}\".format(\n settings.REMOTE_PILLARS_DIR))\n run(\"systemctl enable salt-master\")\n run(\"touch /etc/salt/minion\")\n append(\"/etc/salt/minion\", \"master: {0}\".format(env.master_server.private_ip))\n append(\"/etc/salt/minion\", \"id: {0}\".format(server.name))\n append(\"/etc/salt/minion\", \"grains:\\n roles:\")\n for role in server.roles:\n append(\"/etc/salt/minion\", \" - {0}\".format(role))\n run(\"systemctl enable salt-minion\")",
"def is_bootable(self):\n return self.bootable_flag == 0x80",
"def _toggle_server(self):\r\n\t\t_logger.debug(\"Toggle server button is pressed.\")\r\n\r\n\t\tif not comm_server.is_running():\r\n\t\t\tserver_ip = self.children[\"entry_IP\"].get()\r\n\t\t\tserver_port = int(self.children[\"entry_port\"].get())\r\n\t\t\tif not comm_server.start_server(server_ip, server_port):\r\n\t\t\t\treturn\r\n\t\t\tself._save_server_config(server_ip, server_port)\r\n\r\n\t\t\tself.children[\"btn_toggle_server\"].config(text = \"關閉伺服器\")\r\n\t\t\tself._update_connection_num(\"\")\r\n\t\telse:\r\n\t\t\tcomm_server.stop_server()\r\n\t\t\tself.children[\"btn_toggle_server\"].config(text = \"啟動伺服器\")\r\n\t\t\tself.children[\"label_connections\"].config(text = \"連接數: -/-\")",
"def set_password(self, system):\n if system[\"embedded_available\"] and system[\"controller_addresses\"]:\n for url in [\"https://%s:8443/devmgr\" % system[\"controller_addresses\"][0],\n \"https://%s:443/devmgr\" % system[\"controller_addresses\"][0],\n \"http://%s:8080/devmgr\" % system[\"controller_addresses\"][0]]:\n try:\n rc, response = self._request(\"%s/utils/login?uid=admin&xsrf=false&onlycheck=true\" % url, ignore_errors=True, url_username=\"admin\",\n url_password=\"\", validate_certs=False)\n\n if rc == 200: # successful login without password\n system[\"password_set\"] = False\n if system[\"password\"]:\n try:\n rc, storage_system = self._request(\"%s/v2/storage-systems/1/passwords\" % url, method=\"POST\", url_username=\"admin\",\n headers=self.DEFAULT_HEADERS, url_password=\"\", validate_certs=False,\n data=json.dumps({\"currentAdminPassword\": \"\", \"adminPassword\": True,\n \"newPassword\": system[\"password\"]}))\n\n except Exception as error:\n system[\"failed\"] = True\n self.module.warn(\"Failed to set storage system password. Array [%s].\" % system[\"ssid\"])\n break\n\n elif rc == 401: # unauthorized\n system[\"password_set\"] = True\n break\n except Exception as error:\n pass\n else:\n self.module.warn(\"Failed to retrieve array password state. Array [%s].\" % system[\"ssid\"])\n system[\"failed\"] = True",
"def turn_on_syn_cookies(self):\n f = open(\"/etc/sysctl.conf\", \"r\")\n sysyctl = f.readlines()\n syn_cookies_on = False\n for line in sysyctl: # check if syn cookies are already turned on\n if re.search(r'^net.ipv4.tcp_syncookies = 1', line):\n syn_cookies_on = True\n else:\n syn_cookies_on = False\n f.close()\n if not syn_cookies_on:\n f = open(\"/etc/sysctl.conf\", \"a\")\n f.write(\"net.ipv4.tcp_syncookies = 1\")\n f.close()\n subprocess.call(['sysctl', '-p'])\n print(\"SYN cookies have been turn on\")",
"def is_http_boot_requested(node):\n http_boot_requested = (\n str(node.driver_info.get('enable_uefi_httpboot', 'false')).lower())\n return http_boot_requested == 'true'",
"def toggle_server(self):\n name = request.params.get('name', g.DEFAULT_SERVER)\n log.debug('toggle_server(%s)' % name)\n servers = model.Session.query(model.Server)\n server = servers.filter(model.Server.name == name).one()\n server.server_on = not server.server_on\n model.Session.update(server)\n model.Session.commit()\n redirect_to('/admin/dashboard')",
"def turn_on(self):\n self._remote.power(1)",
"def test_update_bios_boot_mode(self):\n pass",
"def secure(self) -> bool:\n return self.get_state(self.args[CONF_OVERALL_SECURITY_STATUS]) == \"Secure\"",
"def wait_for_linux(self):\n super(RPI, self).wait_for_linux()\n\n self.sendline('cat /etc/issue')\n if 0 == self.expect(['OpenEmbedded'] + self.prompt):\n self.routing = False\n self.wan_iface = \"eth0\"\n self.lan_iface = None\n self.expect(self.prompt)\n\n self.sendline(\n 'dmcli eRT getv Device.DeviceInfo.X_RDKCENTRAL-COM_CaptivePortalEnable'\n )\n if self.expect([\n ' type: bool, value: false',\n 'dmcli: not found'\n ] + self.prompt) > 1:\n self.sendline(\n 'dmcli eRT setv Device.DeviceInfo.X_RDKCENTRAL-COM_CaptivePortalEnable bool false'\n )\n self.expect(self.prompt)\n self.sendline('reboot')\n super(RPI, self).wait_for_linux()",
"def modsToggle(self, connect=False, verbose=True):\n try:\n if self._mods_available:\n if connect: \n if verbose: self.logger.info('Enabling mods...')\n self.syslogger.info('Enabling mods...')\n self.sh('echo {} > {}'.format(CONFIG.DEVICE.MODS_ON_OFF['on'], CONFIG.DEVICE.MODS_PATH))\n self._mods_enabled = False\n else:\n if verbose: self.logger.info('Disabling mods...')\n self.syslogger.info('Disabling mods...')\n self.sh('echo {} > {}'.format(CONFIG.DEVICE.MODS_ON_OFF['off'], CONFIG.DEVICE.MODS_PATH))\n self._mods_enabled = False\n except Exception as e:\n self.logger.error('Mods connot be enabled/disabled: {}'.format(e), self.syslogger)",
"def _setBootable(self, bootable):\n if self.partedPartition:\n if arch.isS390():\n return\n if self.flagAvailable(parted.PARTITION_BOOT):\n if bootable:\n self.setFlag(parted.PARTITION_BOOT)\n else:\n self.unsetFlag(parted.PARTITION_BOOT)\n else:\n raise errors.DeviceError(\"boot flag not available for this partition\", self.name)\n\n self._bootable = bootable\n else:\n self.req_bootable = bootable",
"def set_bootloader_mode(self, mode):\n self.check_validity()\n\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 9, 'B')",
"def enable(self):\n\t\tresponse = self.client.post(self._endpoint + \"/enable\")\n\t\treturn bool(response.json[\"success\"])",
"def start_server(self):\n if self.esp_mgr.ap:\n self.server_socket = adafruit_esp32spi_socket.socket()\n self.esp_mgr.esp.start_server(23, self.server_socket.socknum)",
"def boot(self, boot):\n\n self._boot = boot",
"def config_mode(self, config_command='sudo su'):\n return self.enable(cmd=config_command)",
"def SPIwriteenable(self):\n data=[0x06];\n self.SPItrans(data);",
"def _startup():\n from octoprint_dashboard.model import User, Config\n if Config.query.scalar() is None:\n print(\"No config, add config via command 'python -m flask config'\")\n shutdown_server()\n if User.query.filter_by(superadmin=True).count() == 0:\n print(\"No superadmin, add superadmin via command 'python -m flask add_superadmin <username>'\")\n shutdown_server()\n\n scheduler.start() # starts background task scheduler\n zeroconf_browser.start() # starts MDNS service discovery",
"def turn_on(self) -> None:\n self._monoprice.set_power(self._zone_id, True)",
"def startup(self):\n started = False\n self.start_driver_ctrl()\n return started",
"def turnLightingSystemOn():\n dislin.light('ON')",
"def turn_on(self, **kwargs):\n self.smartplug.turn_on()",
"def reboot(self, client, sec):\r\n result = client.reboot(sec)\r\n return result",
"def test_patch_bios_boot_mode(self):\n pass",
"def bootstrap():\n validate_configurator_version()\n\n # put new mkinitcpio.conf in place\n run(\"mv /etc/mkinitcpio.conf.pacnew /etc/mkinitcpio.conf\")\n sed(\"/etc/mkinitcpio.conf\",\n 'MODULES=\"\"',\n 'MODULES=\"xen-blkfront xen-fbfront xen-kbdfront xen-netfront xen-pcifront xenbus_probe_frontend xenfs\"') # nopep8\n sed(\"/etc/mkinitcpio.conf\",\n 'HOOKS=\"base udev autodetect modconf block filesystems keyboard fsck',\n 'HOOKS=\"base udev block filesystems shutdown autodetect\"')\n\n # upgrade pacakges\n run(\"pacman --noconfirm -Syu\")\n\n # put new pacman.conf in place\n run(\"mv /etc/pacman.conf.pacnew /etc/pacman.conf\")\n\n # install essential packages\n run(\"pacman --noconfirm -S base-devel\")\n run(\"pacman --noconfirm -S curl git rsync\")\n\n # create a user, named 'aur', to safely install AUR packages under fakeroot\n # uid and gid values auto increment from 1000\n # to prevent conficts set the 'aur' user's gid and uid to 902\n run(\"groupadd -g 902 aur && useradd -m -u 902 -g 902 -G wheel aur\")\n\n # allow users in the wheel group to sudo without a password\n uncomment(\"/etc/sudoers\", \"wheel.*NOPASSWD\")\n\n # install yaourt and upgrade non-pacman rackspace installed packages\n sudo(\"rm -rf /home/aur/.builds && mkdir /home/aur/.builds/\", user=\"aur\")\n with cd(\"/home/aur/.builds/\"):\n sudo(\"bash <(curl aur.sh) -si --noconfirm package-query yaourt\", user=\"aur\")\n sudo(\"yaourt --noconfirm -S xe-guest-utilities\", user=\"aur\")\n\n # allow fabric to sftp with contrib.files.put\n # http://stackoverflow.com/questions/10221839/cant-use-fabric-put-is-there-any-server-configuration-needed # nopep8\n # change before reboot because then the sshd config will be reloaded\n # sed(\"/etc/ssh/sshd_config\", \"Subsystem sftp /usr/lib/openssh/sftp-server\",\n # \"Subsystem sftp internal-sftp\")\n\n # systemd\n sed(\"/boot/grub/menu.lst\",\n \"kernel /boot/vmlinuz-linux root=/dev/xvda1 ro console=hvc0\",\n \"kernel /boot/vmlinuz-linux root=/dev/xvda1 ro console=hvc0 init=/usr/lib/systemd/systemd\")\n reboot()\n if not contains(\"/proc/1/comm\", \"systemd\"):\n abort(\"systemd is not installed properly\")\n server = [s for s in env.bootmachine_servers if s.public_ip == env.host][0]\n run(\"hostnamectl set-hostname {0}\".format(server.name))\n run(\"mv /etc/locale.gen.pacnew /etc/locale.gen.conf\")\n uncomment(\"/etc/locale.gen\", \"en_US.UTF-8 UTF-8\")\n uncomment(\"/etc/locale.gen\", \"en_US ISO-8859-1\")\n run(\"locale-gen\")\n run(\"localectl set-locale LANG='en_US.utf8'\")\n run(\"timedatectl set-timezone US/Central\")",
"def enter_sleep_mode(self):\n self.execute(SdpI2cCmdEnterSleepMode())",
"async def toggle(self, ctx):\r\n serverid = ctx.message.server.id\r\n if self.adkillr[serverid]['toggle'] is True:\r\n self.adkillr[serverid]['toggle'] = False\r\n e = discord.Embed(description='**AntiAdv is now disabled.**')\r\n await self.bot.say(embed=e)\r\n elif self.adkillr[serverid]['toggle'] is False:\r\n self.adkillr[serverid]['toggle'] = True\r\n e = discord.Embed(description='**AntiAdv is now enabled.**')\r\n await self.bot.say(embed=e)\r\n dataIO.save_json(\"data/adkillr/adkillr.json\", self.adkillr)"
] | [
"0.76249284",
"0.7167012",
"0.6691933",
"0.6566104",
"0.6534776",
"0.6505082",
"0.64091456",
"0.6071081",
"0.6065844",
"0.6018659",
"0.5990192",
"0.5968288",
"0.5868448",
"0.5822843",
"0.58050734",
"0.5786538",
"0.5715072",
"0.56919736",
"0.5675299",
"0.565694",
"0.5639137",
"0.5637952",
"0.5617222",
"0.5605255",
"0.55727106",
"0.5567131",
"0.5553382",
"0.55493206",
"0.55031747",
"0.5494628",
"0.54925984",
"0.54922867",
"0.5483548",
"0.5469377",
"0.54531354",
"0.5449086",
"0.5434936",
"0.54346967",
"0.54345125",
"0.5430613",
"0.54196435",
"0.5400082",
"0.5395885",
"0.53893214",
"0.5379679",
"0.53721493",
"0.53664833",
"0.5347434",
"0.53457654",
"0.53444827",
"0.5326186",
"0.532214",
"0.53127635",
"0.5294887",
"0.52829015",
"0.5258414",
"0.52418286",
"0.523708",
"0.5233778",
"0.52137977",
"0.5203505",
"0.52002156",
"0.51995313",
"0.5178224",
"0.5177106",
"0.51738906",
"0.51585054",
"0.5154257",
"0.5143415",
"0.5133309",
"0.5132584",
"0.513222",
"0.5128935",
"0.5126967",
"0.51260734",
"0.51212776",
"0.5116894",
"0.5116784",
"0.5113224",
"0.51099074",
"0.5109173",
"0.5105455",
"0.51029706",
"0.51021296",
"0.50978655",
"0.50961727",
"0.50893605",
"0.5088583",
"0.50824094",
"0.50798696",
"0.5079478",
"0.50710964",
"0.50609684",
"0.50556874",
"0.50493294",
"0.5042113",
"0.50362706",
"0.503495",
"0.5029932",
"0.5024777"
] | 0.6860936 | 2 |
Reset secure boot keys to manufacturing defaults. | def reset_secure_boot_keys(self):
if self._is_boot_mode_uefi():
self._change_secure_boot_settings('ResetToDefaultKeys', True)
else:
msg = ('System is not in UEFI boot mode. "SecureBoot" related '
'resources cannot be changed.')
raise exception.IloCommandNotSupportedInBiosError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def reset(ctx):\n\n controller = ctx.obj['controller']\n click.echo('Resetting OATH data...')\n old_id = controller.id\n controller.reset()\n\n settings = ctx.obj['settings']\n keys = settings.setdefault('keys', {})\n if old_id in keys:\n del keys[old_id]\n settings.write()\n\n click.echo(\n 'Success! All OATH credentials have been cleared from your YubiKey.')",
"def reset(ctx, force):\n\n force or click.confirm(\n \"WARNING! This will delete all stored OATH accounts and restore factory \"\n \"settings. Proceed?\",\n abort=True,\n err=True,\n )\n\n session = ctx.obj[\"session\"]\n click.echo(\"Resetting OATH data...\")\n old_id = session.device_id\n session.reset()\n\n keys = ctx.obj[\"oath_keys\"]\n if old_id in keys:\n del keys[old_id]\n keys.write()\n logger.info(\"Deleted remembered access key\")\n\n click.echo(\"Success! All OATH accounts have been deleted from the YubiKey.\")",
"def reset_pki():\n with open(f'{pki_dir}/serial', 'w') as serial_file:\n serial_file.write('00000000')\n serial_file.close()\n os.remove(f'{pki_dir}/*')",
"def reset_keys(self):\n self.UP_KEY, self.DOWN_KEY, self.START_KEY, self.BACK_KEY = False, False, False, False",
"def reset_keys(self):\n self.UP_KEY, self.DOWN_KEY, self.START_KEY, self.BACK_KEY = False, False, False, False",
"def reboot_fpga(self):\n log.info(\"Booting FPGA from SPI prom\")\n self.set(\"FPGA_CTRL\", \"boot_fpga\", 1);",
"def set_secure_boot_state(self, task, state):\n return irmc_common.set_secure_boot_mode(task.node, state)",
"def set_secure_boot_mode(self, secure_boot_enable):\n sushy_system = self._get_sushy_system()\n try:\n sushy_system.secure_boot.enable_secure_boot(secure_boot_enable)\n except exception.InvalidInputError as e:\n msg = (self._('Invalid input. Error %(error)s')\n % {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish controller failed to set secure '\n 'boot settings on the server. Error: %(error)s')\n % {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def shutdown(self):\n auth.debug(\"DICEKey shutdown called\")\n super().shutdown()\n AuthenticatorCryptoProvider.shutdown_providers()",
"def restore_config(self):\n self._clear_previous_windows_assigment()\n self._restart_i3_config()",
"def hotkeys_resetAll():\n _set = validate_hotkeySet(False)\n log.warning(\"All hotkeys on '{0}' set reset to maya defaults\".format(_set))\n mc.hotkey(fs = True )",
"def reset_options(self, keylist):\r\n return self.sendAndRecv(\"RESETCONF %s\\r\\n\"%(\" \".join(keylist)))",
"def set_secure_boot_mode(self, secure_boot_enable):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('SecureBootEnable',\n secure_boot_enable)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def reset_user(self):\n\n if self.resin.auth.is_logged_in():\n self.wipe_application()\n self.resin.models.key.base_request.request(\n 'user__has__public_key', 'DELETE',\n endpoint=self.resin.settings.get('pine_endpoint'), login=True\n )",
"def resetDevice(self):\n reset_pkt = [START_BYTE_1, START_BYTE_2, RESET_MTYPE, 0x00, HEADER_SIZE + RESET_DATA_SIZE]\n reset_pkt.extend(RESET_KEY_LE)\n\n crc = crc8(reset_pkt)\n reset_pkt.append(crc)\n\n self.write(bytearray(reset_pkt))",
"def restart_salt():\n stop_salt()\n start_salt()",
"def reset(self,bootloader=False):\n self.send_packet('\\xff' if bootloader else '\\xfe')",
"def hard_reset(self) -> None:\n os.system('rm -fr \"$HOME/.daf/\"')",
"def svc_reset_system_mode(self) -> None:\n self._call_client_api(self._device.reset_mode)",
"def soft_reset():",
"def resetDeviceStates(self):",
"def __mode_reset(self):\n\t\tfor key,val in self.ms_all.iteritems():\n\t\t\tval.reset_restart()",
"def unconfigure_service_password_encryption(device):\n\n try:\n device.configure(\"no service password-encryption\")\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not unconfigure service password encryption\"\n )",
"def reset(self):\n self.logger.debug(\"Resetting %s\", self.key)\n self.driver.reset(self.key)",
"def _reset(cls):\r\n cls._CONFIGURED = False\r\n cls._ENABLED = {}",
"def reset(self):\n\n ## Turn off controller to bring to a known state\n try:\n self.logger.info(\"Turning off sta3800 controller (sta3800_off).\")\n ccdsetup.sta3800_off()\n except Exception:\n self.logger.exception(\"Unable to turn off controller! State may be unknown.\")\n raise\n else:\n self.logger.info(\"Controller turned off successfully.\")\n\n ## Initialize controller\n try:\n self.logger.info(\"Turning on sta3800 controller (sta3800_setup).\")\n ccdsetup.sta3800_setup()\n except Exception:\n self.logger.exception(\"Unable to turn on sta3800 controller!\")\n raise\n else:\n self.logger.info(\"Controller turned on successfully.\")",
"def _DisableRootFsVerification(self):\n # 2 and 4 are the kernel partitions.\n for partition in [2, 4]:\n self.RunCmdOnDevice(['/usr/share/vboot/bin/make_dev_ssd.sh',\n '--partitions', str(partition),\n '--remove_rootfs_verification', '--force'])\n\n # Restart, wait a bit, and re-establish the SSH master connection.\n # We need to close the connection gracefully, then run the shutdown command\n # without using a master connection. port_forward=True bypasses the master\n # connection.\n self.CloseConnection()\n self.RunCmdOnDevice(['reboot'], port_forward=True)\n time.sleep(30)\n self.OpenConnection()",
"def _soft_reset(self):\n self._reset_specific_envs(self.episodes_done)\n self._update_other_info()",
"def reset(self):\n command = \"export STLINK_DEVICE=\" + self.stlink.port + \"; st-flash reset\"\n subprocess.run(command, shell=True)\n time.sleep(1)",
"def safe_boot_disabled(self, safe_boot_disabled):\n\n self._safe_boot_disabled = safe_boot_disabled",
"def resetSecret(self):\n self.secret = str(uuid())\n self.put()",
"def reset_factory(self):\n self.set_vcp_value_by_name('Restore Factory Defaults', 1)",
"def test_rekey_defaults(self, settings):\n old_key = b'0' * 32\n new_key = b'1' * 32\n\n settings.CHITON_ENCRYPTION_KEY = new_key\n settings.CHITON_PREVIOUS_ENCRYPTION_KEY = old_key\n\n encrypted = encrypt('message', key=old_key)\n rekeyed = rekey(encrypted)\n\n assert decrypt(rekeyed) == 'message'",
"def _change_secure_boot_settings(self, property, value):\n system = self._get_host_details()\n # find the BIOS URI\n if ('links' not in system['Oem']['Hp'] or\n 'SecureBoot' not in system['Oem']['Hp']['links']):\n msg = (' \"SecureBoot\" resource or feature is not '\n 'supported on this system')\n raise exception.IloCommandNotSupportedError(msg)\n\n secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']\n\n # Change the property required\n new_secure_boot_settings = {}\n new_secure_boot_settings[property] = value\n\n # perform the patch\n status, headers, response = self._rest_patch(\n secure_boot_uri, None, new_secure_boot_settings)\n\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)\n\n # Change the bios setting as a workaround to enable secure boot\n # Can be removed when fixed for Gen9 snap2\n val = self._get_bios_setting('CustomPostMessage')\n val = val.rstrip() if val.endswith(\" \") else val+\" \"\n self._change_bios_setting({'CustomPostMessage': val})",
"def resetToMainSection(self):\n wValue = 0\n wIndex = 0\n wLength = 0\n try:\n self.__bootCommand(op.BootloaderCommands.Reset,1,[0,0,0],[])\n except:\n #This will always throw an exception because it disconnects the device and re-enumerates as a normal Power Monitor\n print(\"Resetting to Main Section.\")",
"def reset(self):\n self.reset_dev_via_serial(self.forced_reset_timeout)",
"def restore_default_uac():\n if global_vars['OS']['Version'] == '10':\n write_registry_settings(UAC_DEFAULTS_WIN10, all_users=True)\n else:\n # Haven't checked Win8 settings, only applying minimum set\n write_registry_settings(UAC_DEFAULTS_WIN7, all_users=True)",
"def setdefaults(self):\n res = __library__.MSK_XX_setdefaults(self.__nativep)\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)",
"def lockdown_procedure():\n\tprint(\"----------\")\n\tprint_section_header(\"LOCKDOWN\", Fore.BLUE)\n\tprint_confirmation(\"Set secure configuration without user interaction.\")\n\n\t# Get sudo priv\n\tsp.run(\"sudo -E -v\", shell=True, stdout=sp.PIPE)\n\n\t####\n\t# FIREWALL\n\t####\n\n\tsp.run(['sudo', 'launchctl', 'load', '/System/Library/LaunchDaemons/com.apple.alf.agent.plist'], stdout=sp.PIPE)\n\tsp.run(['sudo', 'launchctl', 'load', '/System/Library/LaunchAgents/com.apple.alf.useragent.plist'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setglobalstate', 'on'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setloggingmode', 'on'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setstealthmode', 'on'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsigned', 'off'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsignedapp', 'off'], stdout=sp.PIPE)\n\tsp.run(['sudo', 'pkill', '-HUP', 'socketfilterfw'], stdout=sp.PIPE)\n\n\t####\n\t# SYSTEM PROTECTION\n\t####\n\n\tsp.run('sudo spctl --master-enable', shell=True, stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsigned', 'off'], stdout=sp.PIPE)\n\tsp.run(['sudo', '/usr/libexec/ApplicationFirewall/socketfilterfw', '--setallowsignedapp', 'off'], stdout=sp.PIPE)\n\tsp.run(['sudo defaults write /Library/Preferences/SystemConfiguration/com.apple.captive.control Active -bool false'], stdout=sp.PIPE)\n\n\t####\n\t# METADATA STORAGE\n\t####\n\n\tsp.run(['rm', '-rfv', '\"~/Library/LanguageModeling/*\"', '\"~/Library/Spelling/*\"', '\"~/Library/Suggestions/*\"'])\n\tsp.run(['rm', '-rfv', '\"~/Library/Application Support/Quick Look/*\"'], stdout=sp.PIPE)\n\tsp.run([':>~/Library/Preferences/com.apple.LaunchServices.QuarantineEventsV2'], shell=True, stdout=sp.PIPE)\n\n\t####\n\t# USER SAFETY\n\t####\n\n\tsp.run(['defaults', 'write', 'com.apple.screensaver', 'askForPassword', '-int', '1'], stdout=sp.PIPE)\n\tsp.run(['defaults', 'write', 'com.apple.screensaver', 'askForPasswordDelay', '-int', '0'], stdout=sp.PIPE)\n\tsp.run(['defaults', 'write', 'NSGlobalDomain', 'AppleShowAllExtensions', '-bool', 'true'], stdout=sp.PIPE)\n\tsp.run(['defaults', 'write', 'NSGlobalDomain', 'NSDocumentSaveNewDocumentsToCloud', '-bool', 'false'], stdout=sp.PIPE)\n\tsp.run(['defaults', 'write', 'com.apple.finder', 'AppleShowAllFiles', '-boolean', 'true'], shell=True, stdout=sp.PIPE)\n\tsp.run(['killAll', 'Finder'], stdout=sp.PIPE)\n\n\t####\n\t# RESTART\n\t####\n\n\tfinal_configuration()",
"def test_patch_bios_boot_mode(self):\n pass",
"def device_reset(self):\n\t\tlogger.info('Device Reset')\n\t\tself.spi.writebytes([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff])\n\t\tprint(DELIMITER*'-')",
"def resetStoredDefaults( self ):\n keys= list( self._defDict.keys() )\n data= [ self._defDict[ aKey ] for aKey in keys ]\n \n self.prefObj.save( group= self.prefGroup, name= keys, data= data )\n self.resetSelfWithDefaults()",
"def setBootargs(self):\n\t\tif self.testType == 'auto' or self.testType == 'manual':\n\t\t\tself.bootargs = self.settings.getKeyValue('nfs.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<nfsroot>', self.nfsroot)\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\n\t\telse:\n\t\t\tself.bootargs = self.settings.getKeyValue('ramdisk.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\t\t\n\t\treturn None",
"def reset_defaults(self):\n self.domain_list = [{\"domain\": \"mywebsite%s.com\" % uuid.uuid1()}]\n self.origin_list = [{\"origin\": \"mywebsite1.com\",\n \"port\": 443,\n \"ssl\": False}]\n self.caching_list = [{\"name\": \"default\", \"ttl\": 3600},\n {\"name\": \"home\",\n \"ttl\": 1200,\n \"rules\": [{\"name\": \"index\",\n \"request_url\": \"/index.htm\"}]}]\n self.service_name = str(uuid.uuid1())\n self.flavor_id = self.test_config.default_flavor",
"def reset_to_factory(self):\n self._log_msg_start(\"Reset to factory settings\")\n # Order of execution is clear, save, load. This will copy the factory default\n # settings from ROM to flash, load from flash, and activate.\n device_mask_dict = dict(\n deviceDevBbr=1, # devSpiFlash device battery backed RAM\n deviceDevFlash=1, # device Flash\n deviceDevEeprom=1, # device EEPROM\n deviceDeviceSpiFlash=1, # device SPI Flash\n )\n # self._ubx.send(\n # \"CFG-CFG\",\n # clearMask=0xFFFF,\n # saveMask=0xFFFF,\n # loadMask=0xFFFF,\n # deviceMask=device_mask_dict,\n # )\n self._ubx.send(\n \"CFG-CFG\",\n clearMask=0xFFFF,\n saveMask=0x0000,\n loadMask=0xFFFF,\n deviceMask=device_mask_dict,\n )\n self._ubx.send(\n \"CFG-CFG\",\n clearMask=0x0000,\n saveMask=dict(\n msgConf=1,\n ),\n loadMask=dict(),\n deviceMask=device_mask_dict,\n )",
"def reset():\n Vessel.reset_instances()",
"def restore_defaults(self):\n if messagebox.askyesno(\n message='Are you sure? '\n 'ALL SETTINGS will be reset to game defaults.\\n'\n 'You may need to re-install graphics afterwards.',\n title='Reset all settings to Defaults?', icon='question'):\n self.lnp.restore_defaults()\n messagebox.showinfo(\n self.root.title(),\n 'All settings reset to defaults!')",
"def reset_api_key(name):\r\n user = User.query.filter_by(name=name).first()\r\n if not user:\r\n return abort(404)\r\n if current_user.name != user.name:\r\n return abort(403)\r\n\r\n title = (\"User: %s · Settings\"\r\n \"- Reset API KEY\") % current_user.fullname\r\n user = db.session.query(model.user.User).get(current_user.id)\r\n user.api_key = model.make_uuid()\r\n db.session.commit()\r\n cached_users.delete_user_summary(user.name)\r\n msg = gettext('New API-KEY generated')\r\n flash(msg, 'success')\r\n return redirect(url_for('account.profile', name=name))",
"def reset_use_case(self, save: bool=None):\n self.pm.reset_use_case()\n self.pm_persist(save)",
"def reset(self):\n self.enc_len = None\n self.precomputed_enc_h = None\n self.mask = None",
"async def reboot(self) -> None:\n await self._api.call('system', 'reboot')",
"def test_update_bios_boot_mode(self):\n pass",
"def reset(self):\n self._faux._default_setup()\n self._faux._update()",
"def invalidateKrusty (self):\n if self.isKrustyValid():\n self.mountMainPartition()\n\n installFilePath = self._getKrustyInstallationFilePath()\n if os.path.exists(installFilePath):\n os.remove(installFilePath)\n #self._runCommandRaiseIfFail(\"rm -rf %s\" % (self._getKrustyInstallationFilePath()))\n\n self._log(\"invalidate-krusty\").notice(\"secure digital software (krusty) is invalidated\")\n else:\n self._log(\"invalidate-krusty\").notice(\"secure digital software (krusty) is already invalid\")",
"def reset():\n if os.name == \"posix\": #In linux\n os.system(\"clear\")\n elif os.name == (\"ce\", \"nt\", \"dos\"): #In windows\n os.system(\"cls\")",
"async def reset(self, ctx):\n await self.config.clear_all_guilds()\n await ctx.send(\"Reset all settings to default values.\")",
"def hardreset(self, no_sleep=False):\n self.reset_pin.value = False\n time.sleep(0.2)\n self.reset_pin.value = True\n # wait for MicroPyton prompt\n if not no_sleep:\n self.__read_until(b'information.\\r\\n>>>', timeout=10)",
"def _reset(self):\n self._interface.set('fw_wp_en', 'off')",
"def reset( self ):\n self.conf = self.defaults",
"def reboot(self, *args, **kwargs):\n log_tag = self.get_log_tag()\n self.logger.info(\"{} Attempting to reset the Treerunner board\"\n \"\".format(log_tag))\n cmd = \"shutdown > /dev/null 2>&1\"\n self.exec_command_ssh(cmd, background=True)\n self.logger.info(\"{} Waiting for the Treerunner board to come\"\n \" back online\".format(log_tag))\n time.sleep(30)\n # Start the sshd server daemon\n self.start_sshd_server()",
"def reset(self):\n\n\t\tself._send_message(\"RESET\", \"\\x00\")",
"def reset(self):\n self._write(0x16, 1, 3, 0x08)",
"def reset(serial):\n if click.confirm(\n \"Warning: Your credentials will be lost!!! Do you wish to continue?\"\n ):\n print(\"Press the button to confirm -- again, your credentials will be lost!!!\")\n solo.client.find(serial).reset()\n click.echo(\"....aaaand they're gone\")",
"def reset(serial):\n if click.confirm(\n \"Warning: Your credentials will be lost!!! Do you wish to continue?\"\n ):\n print(\"Press the button to confirm -- again, your credentials will be lost!!!\")\n solo.client.find(serial).reset()\n click.echo(\"....aaaand they're gone\")",
"def reset(self):\n # The camera will give no response to this command\n self._serial_io('\\x55\\x99\\x66\\x11', None)\n while True:\n try:\n self.system_state = 0x11\n if self.system_state == 0x11:\n break\n except CygnetExc:\n time.sleep(.2)\n while True:\n self.system_state = 0x12\n time.sleep(.2)\n if self.system_state == 0x16:\n break",
"def reset_defaults(cls, deco_classname):\n # v0.3.0b24 -- use new classmethods\n orig_defaults = cls._classname2SettingsDataOrigDefaults_dict[deco_classname]\n settings_map = cls._classname2SettingsData_dict[deco_classname]\n for name in settings_map:\n settings_map[name].default = orig_defaults[name]",
"def reset_eligibilities(self):\n for key in self.eligibilities.keys():\n self.eligibilities[key] = 0",
"def _restore_keyboard(self):\n if hasattr(self, \"original_kbd_settings\"):\n fd = sys.stdin.fileno()\n termios.tcsetattr(fd, termios.TCSADRAIN, self.original_kbd_settings)",
"def libc_prctl_set_securebits():\n # straight from man capabilities(7):\n # \"An application can use the following call to lock itself, and all of\n # its descendants, into an environment where the only way of gaining\n # capabilities is by executing a program with associated file capabilities\"\n _call_c_style(\n libc,\n \"prctl\",\n PR_SET_SECUREBITS,\n (\n SECBIT_KEEP_CAPS_LOCKED\n | SECBIT_NO_SETUID_FIXUP\n | SECBIT_NO_SETUID_FIXUP_LOCKED\n | SECBIT_NOROOT\n | SECBIT_NOROOT_LOCKED\n ),\n 0,\n 0,\n 0,\n )",
"def enable_secure_boot(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_secure_boot\")",
"def reset_bios_to_default(self):\n # Check if the BIOS resource if exists.\n headers_bios, bios_uri, bios_settings = self._check_bios_resource()\n # Get the BaseConfig resource.\n try:\n base_config_uri = bios_settings['links']['BaseConfigs']['href']\n except KeyError:\n msg = (\"BaseConfigs resource not found. Couldn't apply the BIOS \"\n \"Settings.\")\n raise exception.IloCommandNotSupportedError(msg)\n\n # Check if BIOS resource supports patch, else get the settings\n if not self._operation_allowed(headers_bios, 'PATCH'):\n headers, bios_uri, _ = self._get_bios_settings_resource(\n bios_settings)\n self._validate_if_patch_supported(headers, bios_uri)\n\n status, headers, config = self._rest_get(base_config_uri)\n if status != 200:\n msg = self._get_extended_error(config)\n raise exception.IloError(msg)\n\n new_bios_settings = {}\n for cfg in config['BaseConfigs']:\n default_settings = cfg.get('default', None)\n if default_settings is not None:\n new_bios_settings = default_settings\n break\n else:\n msg = (\"Default Settings not found in 'BaseConfigs' resource.\")\n raise exception.IloCommandNotSupportedError(msg)\n request_headers = self._get_bios_hash_password(self.bios_password)\n status, headers, response = self._rest_patch(bios_uri, request_headers,\n new_bios_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def reset(self):\n self._keyCode = \"\"\n self._keyCodeCount = 0\n self._keyCodeTime = 0.0",
"def reset(self):\n self.keyToFile=dict()",
"def disable_pki(client, mount_point=\"pki\"):\n client.sys.disable_secrets_engine(mount_point)",
"def reset_config():\r\n # TODO implement configuration reset\r\n pass",
"def reset_config():\n return _set_config(_gen_config())",
"def full_reset(self):\n self.at_cmd('CFUN=1')",
"def reset():\n for cpu_id in POSSIBLE_CPUS:\n set_cpu(cpu_id, True)",
"def stop_salt():\n with fabric_settings(warn_only=True):\n if env.host == env.master_server.public_ip:\n sudo(\"systemctl stop salt-master\")\n sudo(\"systemctl stop salt-minion\")",
"def reset(self):\n self.manager.delete_all()\n for name, val in DEFAULT_SETTINGS.items():\n val['name'] = name\n val['default_value'] = val['value']\n self.manager.from_dict(val)",
"async def send_reset(self):\n try:\n await self._send_command([PrivateConstants.SYSTEM_RESET])\n except RuntimeError:\n exit(0)",
"def reset_secret(self, save=False):\n client = cas.get_client()\n client.revoke_application_tokens(self.client_id, self.client_secret)\n self.client_secret = generate_client_secret()\n\n if save:\n self.save()\n return True",
"def __TweakMasterKeyBits(key_bytes: bytes) -> bytes:\n key_bytes = bytearray(key_bytes)\n # Clear the lowest 3 bits of the first byte of kL\n key_bytes[0] = BitUtils.ResetBits(key_bytes[0], 0x07)\n # Clear the highest 3 bits of the last byte of kL (standard kholaw only clears the highest one)\n key_bytes[31] = BitUtils.ResetBits(key_bytes[31], 0xE0)\n # Set the second-highest bit of the last byte of kL\n key_bytes[31] = BitUtils.SetBits(key_bytes[31], 0x40)\n\n return bytes(key_bytes)",
"def reset(self):\n self.enc_len = None\n self.precomputed_enc_h = None\n self.mask = None\n self.prev_attn = None",
"async def _reset_settings(self, ctx):\n data = await self.get_data(ctx)\n await data.Settings.clear()\n msg = (\"{0.name} ({0.id}) reset all \"\n \"casino settings.\").format(ctx.author)\n await ctx.send(msg)",
"def reboot(self):\n raise NotImplementedError",
"def reset(self):\n self.settings = None\n self.sublime_settings = None\n self.settings_base = \"Javatar.sublime-settings\"\n self.sublime_base = \"Preferences.sublime-settings\"",
"def reset(self):\r\n err = self._cfuncs['ka_reset'](self._core._get_ka())\r\n self._core._handle_error(err)",
"def soft_reset() -> None:\n ...",
"def reset(self):\r\n # TODO: have reset flag such that it forces all the bottom changes\r\n self.pwm_freq = self._default[\"pwm_freq\"]\r\n self.gate_logic = self._default[\"gate_logic\"]\r\n self.max_pwm = self._default[\"max_pwm\"]\r\n self.lase_on_power_up = self._default[\"lase_on_power_up\"]\r\n\r\n self.mode = self._default[\"mode\"]\r\n self.lase = self._default[\"lase\"]\r\n self.percent = self._default[\"percent\"] # in percent\r",
"async def admin_reset(self, ctx: commands.Context):\n await self.config.clear_all()\n await self.initialize_internals()\n await ctx.send('Global team management factory reset complete.')",
"def soft_reset(self):\n self.ser.write(\"\\030\")\n self._handle_reset()",
"def disable(self):\n logging.debug(\"Restoring sudoers configuration...\")\n command = (\"sed -i -e '/{mark}/,+1d' \" \"{filename}\").format(\n mark=self.MARK, filename=self.SUDOERS\n )\n Command(command, verbose=False).run()",
"def UnsetWiredDefault(self):\n config = ConfigParser.ConfigParser()\n config.read(self.wired_conf)\n profileList = config.sections()\n for profile in profileList:\n if config.has_option(profile, \"default\"):\n if misc.to_bool(config.get(profile, \"default\")):\n config.set(profile, \"default\", False)\n config.write(open(self.wired_conf, \"w\"))\n self.SaveWiredNetworkProfile(profile)",
"async def send_reset(self):\n try:\n await self._send_command([PrivateConstants.SYSTEM_RESET])\n except RuntimeError:\n exit(0) #keep this??",
"def sudo_restart ( self, ):\r\n pass\r\n \"sudo reboot\"",
"def storage_reset(self):\n daos_srv_bin = os.path.join(self.daosbinpath, \"daos_server\")\n cmd = \"{} storage prepare -n --reset -f\".format(daos_srv_bin)\n result = pcmd(self._hosts, cmd)\n if len(result) > 1 or 0 not in result:\n raise ServerFailed(\"Error resetting NVMe storage\")",
"def unset_iscsi_boot_info(self, mac):\n if(self._is_boot_mode_uefi() is True):\n iscsi_info = {'iSCSIBootEnable': 'Disabled'}\n self._change_iscsi_settings(mac.upper(), iscsi_info)\n else:\n msg = 'iscsi boot is not supported in the BIOS boot mode'\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def reset(self):\n self.write_to_serial('*RST')",
"def reset():\n _runtime.reset()"
] | [
"0.64598894",
"0.61968",
"0.61686015",
"0.6030497",
"0.6030497",
"0.58504564",
"0.5713005",
"0.5710499",
"0.5703248",
"0.56868505",
"0.5620701",
"0.5609751",
"0.5601407",
"0.5587413",
"0.5550478",
"0.55427927",
"0.55306214",
"0.55234766",
"0.55176026",
"0.5505577",
"0.5487669",
"0.54727244",
"0.547004",
"0.54634017",
"0.5460822",
"0.54337865",
"0.5423768",
"0.5415206",
"0.5410141",
"0.5405178",
"0.5396446",
"0.53943455",
"0.5393105",
"0.5377948",
"0.5348732",
"0.53467894",
"0.5346271",
"0.53433204",
"0.5341661",
"0.53387904",
"0.53383267",
"0.53187424",
"0.531786",
"0.5313199",
"0.5306339",
"0.5298985",
"0.5286753",
"0.5278667",
"0.5271214",
"0.5267156",
"0.5265404",
"0.52640337",
"0.526325",
"0.5254141",
"0.52538645",
"0.5247179",
"0.5246943",
"0.523326",
"0.5230235",
"0.5222209",
"0.5197784",
"0.5197418",
"0.5184673",
"0.5184673",
"0.51789796",
"0.5178688",
"0.517743",
"0.51679033",
"0.5166602",
"0.51656",
"0.51634383",
"0.515605",
"0.5151435",
"0.5146058",
"0.51447856",
"0.51376814",
"0.51375204",
"0.5135873",
"0.51329577",
"0.51315564",
"0.5125245",
"0.5124221",
"0.51227987",
"0.51185733",
"0.51006913",
"0.51002496",
"0.5100095",
"0.5097365",
"0.5095563",
"0.5083247",
"0.50817484",
"0.5080061",
"0.50764626",
"0.5075152",
"0.50747186",
"0.5072999",
"0.50633854",
"0.5055805",
"0.5053302",
"0.5047857"
] | 0.84218895 | 0 |
Request the power state of the server. | def get_host_power_status(self):
data = self._get_host_details()
return data['Power'].upper() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _doPowerState(self, state=False):\n if state:\n self._cmdPowerOn()\n else:\n self._cmdPowerOff()",
"async def power_on(self):\n ...",
"def power():\n request_command(tv_command=TVCommand.power)",
"def test_api_ucs_power(self):\n # first power off all servers\n self.set_all_server_power_state(\"off\")\n # verify power state is down\n self.check_all_server_power_state(\"down\")\n # now power on the servers\n self.set_all_server_power_state(\"on\")\n # verify power state is up\n self.check_all_server_power_state(\"up\")",
"def power_state(self) -> const.PowerState:\n return self.relay(\"power_state\")",
"def power_on(self):\n pass",
"def get_power_state(self, node):",
"def set_power_state(self, context, server, state):\n\n fsm = utils.get_state_machine(start_state=server.status)\n\n @utils.synchronized(server.uuid)\n def do_set_power_state():\n LOG.debug('Power %(state)s called for server %(server)s',\n {'state': state,\n 'server': server})\n self.driver.set_power_state(context, server, state)\n\n try:\n do_set_power_state()\n server.power_state = self.driver.get_power_state(context,\n server.uuid)\n except Exception as e:\n with excutils.save_and_reraise_exception():\n LOG.exception(\"Set server power state to %(state)s failed, \"\n \"the reason: %(reason)s\",\n {\"state\": state, \"reason\": six.text_type(e)})\n server.power_state = self.driver.get_power_state(context,\n server.uuid)\n if state in ['reboot', 'soft_reboot'] \\\n and server.power_state != states.POWER_ON:\n utils.process_event(fsm, server, event='error')\n else:\n utils.process_event(fsm, server, event='fail')\n\n action = POWER_NOTIFICATION_MAP[state]\n notifications.notify_about_server_action(\n context, server, self.host,\n action=action,\n phase=fields.NotificationPhase.ERROR,\n exception=e)\n\n utils.process_event(fsm, server, event='done')\n LOG.info('Successfully set node power state: %s',\n state, server=server)",
"def power_status(self):\n raise NotImplementedError(\"ERROR: Unimplemented function.\")",
"def set_power(sid):\n # Resolve the passed parameters if any\n timer = None\n os = None\n if request.json:\n if timer in request.json:\n timer = request.json.get('timer')\n if os in request.json:\n os = request.json.get('os')\n db = sqlite3.connect('/home/tropius/TROPIUS/TROPIUS.db')\n state = hosts.get(db, sid)['state']\n \n if state == 'on':\n # The host is on -- turn it off\n # TODO make a unix shell util file\n # TODO make a windows util file\n return\n elif state == 'off':\n # The host is off -- turn it on\n if timer is not None:\n sleep(timer)\n netutil.wake_on_lan(db, sid)\n ret = {'power': {'state': 'on'}}\n return jsonify(ret)\n # TODO find a keyboard driver and implement OS parameter",
"async def power(self, turn_on):\n\n op = DHumOp.ON if turn_on else DHumOp.OFF\n keys = self._get_cmd_keys(CMD_STATE_OPERATION)\n op_value = self.model_info.enum_value(keys[2], op.value)\n if self._should_poll:\n # different power command for ThinQ1 devices\n cmd = \"Start\" if turn_on else \"Stop\"\n await self.set(keys[0], keys[2], key=None, value=cmd)\n self._status.update_status(keys[2], op_value)\n return\n await self.set(keys[0], keys[1], key=keys[2], value=op_value)",
"def vm_power(self, vm_name, state):\n states = [\"on\", \"off\"]\n if state not in states:\n raise OpenStackConnectorException(f\"Incorrect action was provided for the vm {vm_name} power state change\")\n \n vm_id = self._get_vm_id_by_name(vm_name)\n\n if not vm_id:\n return False\n \n try:\n if state == \"on\":\n self.connection.compute.start_server(vm_id)\n else:\n self.connection.compute.stop_server(vm_id)\n except ConflictException: # This exception block handles the situation when the VM is already in the required power state\n pass\n \n return True",
"def state(self):\n return self._power",
"def get_setPower(self):\n self.read(\":POW?\")",
"def set_power_state(self, node, power_state):",
"def _set_power_state(node, target_state):\n amt_common.awake_amt_interface(node)\n client = amt_common.get_wsman_client(node)\n\n method = 'RequestPowerStateChange'\n options = pywsman.ClientOptions()\n options.add_selector('Name', 'Intel(r) AMT Power Management Service')\n\n doc = _generate_power_action_input(AMT_POWER_MAP[target_state])\n try:\n client.wsman_invoke(options, resource_uris.CIM_PowerManagementService,\n method, doc)\n except (exception.AMTFailure, exception.AMTConnectFailure) as e:\n with excutils.save_and_reraise_exception():\n LOG.exception(_LE(\"Failed to set power state %(state)s for \"\n \"node %(node_id)s with error: %(error)s.\"),\n {'state': target_state, 'node_id': node.uuid,\n 'error': e})\n else:\n LOG.info(_LI(\"Power state set to %(state)s for node %(node_id)s\"),\n {'state': target_state, 'node_id': node.uuid})",
"def power(ip, state='toggle') -> dict:\n states = ['on', 'off', 'toggle']\n\n if state.lower() not in states:\n raise Exception(\"Invalid power state [{}]. Must be in {}.\".format(state, str(states)))\n\n bulb = get_bulb(ip=ip)\n\n try:\n if state == states[0]: # on\n bulb.turn_on()\n elif state == states[1]: # off\n bulb.turn_off()\n else: # toggle\n bulb.toggle()\n properties = bulb.get_properties()\n return properties\n except Exception as e:\n raise Exception(str(e))",
"def reqSetPower(self, ID_list, s_l):\n while self.status != Modem.Status.IDLE :\n sleep(0.1)\n if self.status != Modem.Status.IDLE:\n raise ValueError(\"Modem setPower unexpected status: \\\n \" + str(self.status))\n self.status = Modem.Status.BUSY2REQ\n self.send(self.interpreter.buildSetPower(ID_list, s_l))\n while self.status != Modem.Status.IDLE and self.status != Modem.Status.KILL:\n sleep(self.m_to)\n # self.recvCommand()\n if self.status == Modem.Status.KILL:\n return self.close()\n return self.errorCheck()",
"def turn_on(self):\n self._lms.query(self._id, 'power', '1')\n self.update_ha_state()",
"def is_on(self):\n return self._client.get_power()",
"def get_power_state(cls, client_object):\n vm_mor = client_object.get_api()\n return vm_mor.runtime.powerState",
"def power_state(self) -> Optional[pulumi.Input['PowerStateArgs']]:\n return pulumi.get(self, \"power_state\")",
"async def get_power(self):\n if not self._current_power_supported:\n return 0\n\n try:\n value = await self._get_config(STATE_POWER_V1)\n return value[STATE_POWER_V1]\n except (ValueError, InvalidRequestError):\n # Device does not support whole unit instant power usage\n self._current_power_supported = False\n return 0",
"def update_power_state(intent):\n card_title = \"Power\"\n\n power_state = intent.get('slots',{}).get('PowerState',{}).get('value')\n if power_state and (power_state.upper() == 'ON' or \\\n power_state.upper() == 'OFF'):\n speech_output = \"OK.\"\n new_value_dict = {\"power_state\":power_state.upper()}\n shadow_connection.update_shadow(new_value_dict)\n else:\n speech_output = \"I did not understand that. Please repeat your request.\"\n\n response = response_builders.build_response(session_attributes,\n response_builders.build_speechlet_response(card_title,\n speech_output, reprompt_text, should_end_session))\n return response",
"def enter_low_power(self):\n line = self.connection.low_power()\n\n if 'OK' in lines:\n return 0\n\n return -1",
"def fusion_api_get_power_device_power_state(self, uri=None, api=None, headers=None):\n return self.pd.get(uri=uri, api=api, headers=headers, param='/powerState')",
"def get_power_state(self):\n\n doc = self.client.enumerate(uris.CIM_ComputerSystem)\n\n enabled_state = doc.find(\n './/s:Body/wsen:EnumerateResponse/wsman:Items/wsinst:CIM_HostComputerSystem/wsinst:EnabledState', wsman.NS_MAP_COMPUTER_SYSTEM)\n return constants._get_enabled_state(enabled_state.text)",
"def post():\n parser = reqparse.RequestParser()\n parser.add_argument('ip', type=str, required=True,\n help=APIStatus.IP_REQUIRED.value.get('message'))\n parser.add_argument('state', type=str, required=False,\n help=APIMessage.REQUIRED_ARG.value.get('message')\n .format('state', 'on, off, toggle'))\n\n args = parser.parse_args()\n\n args.state = args.state if args.state else 'toggle'\n\n try:\n status = Bulbs.power(ip=args.ip, state=args.state)\n return Handler.success(response=status)\n except Exception as e:\n return Handler.exception(\n status=APIStatus.ERROR,\n params=args,\n traceback=traceback.format_exc(),\n exception=e\n )",
"def set_state(self, state: bool) -> None:\n payload = self._cfg.state_power_on if state else self._cfg.state_power_off\n command = f\"{COMMAND_POWER}{self._cfg.idx+1}\"\n self._mqtt_client.publish(\n self._cfg.command_topic + command,\n payload,\n )",
"def powerDispatch(self):\n\n if self.ui.powerDevice.currentText().startswith('INDI'):\n self.app.power.name = self.ui.powerDeviceName.currentText()\n self.app.message.emit('Power enabled', 0)\n self.deviceStat['power'] = False\n else:\n self.app.power.name = ''\n self.app.message.emit('Power disabled', 0)\n self.deviceStat['power'] = None\n\n return True",
"def poweron(self):\n raise NotImplementedError()",
"def _perform_power_op(self, oper):\n\n power_settings = {\"Action\": \"Reset\",\n \"ResetType\": oper}\n systems_uri = \"/rest/v1/Systems/1\"\n\n status, headers, response = self._rest_post(systems_uri, None,\n power_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def turn_on(self):\n self._remote.power(1)",
"def get_power_management() -> int:",
"def power_on(self, default=False):\n if default:\n return self.exec_command('SupplyPowerDefault = 1')\n return self.exec_command('SupplyPower = 1')",
"def is_power_limited(self):\n status = self.get_status_response()\n return ((status[1] & 0x10) == 0x10)\n #end is_power_limited()",
"def is_powering_on(self):\n return self._get_state() == ServerState.POWERING_ON",
"def power_on(self):\n raise NotImplementedError",
"def get_power(self):\r\n return self._api.get_power()",
"def fusion_api_edit_server_hardware_power_state(self, body, uri, api=None, headers=None):\n return self.sh.update(body, uri, api, headers, param='/powerState')",
"async def get_power_state(self,\n reset_inactivity_timeout=True,\n response_timeout_in_seconds=None):\n command = _create_get_power_state_command(sequence_number=self._get_and_increment_command_sequence_number(),\n wait_for_response=True,\n reset_inactivity_timeout=reset_inactivity_timeout)\n\n response_packet = await self._send_command(command,\n response_timeout_in_seconds=response_timeout_in_seconds)\n\n return _parse_power_state(response_packet.data)",
"def req_plandb(self):\n # Check if target system is currently connected\n try:\n # This function resolves the map of connected nodes\n node = self.resolve_node_id(self.target)\n\n # Request the PlanDB state\n logging.debug(\"Requesting PlanDB state from target.\")\n db_req = pyimc.PlanDB()\n\n # Enumerations are exposed as a subclass of the message\n db_req.type = pyimc.PlanDB.TypeEnum.REQUEST\n db_req.op = pyimc.PlanDB.OperationEnum.GET_STATE # Note: DSTATE does not seem to work as intended\n db_req.request_id = self.db_reqid\n self.db_reqid += 1\n\n # Send the IMC message to the node\n self.send(node, db_req)\n\n except KeyError as e:\n # Target system is not connected\n logging.debug('Target system is not connected.')",
"def test_on_reboot_on(self):\n self.openstack('baremetal node power on {0}'.format(self.node['uuid']))\n show_prop = self.node_show(self.node['uuid'], ['power_state'])\n self.assertEqual('power on', show_prop['power_state'])\n\n self.openstack('baremetal node reboot {0}'.format(self.node['uuid']))\n show_prop = self.node_show(self.node['uuid'], ['power_state'])\n self.assertEqual('power on', show_prop['power_state'])",
"def get_host_power_status(self):\n sushy_system = self._get_sushy_system()\n return GET_POWER_STATE_MAP.get(sushy_system.power_state)",
"def get_power_state(self, task):\n return _power_status(task.node)",
"def set_power_state(self, task, pstate):\n _set_and_wait(task, pstate)",
"def fusion_api_set_power_device_power_state(self, body, uri, api=None, headers=None):\n return self.pd.update(body=body, uri=uri, api=api, headers=headers, param='/powerState')",
"def status(self):\n ret = self.dev.ctrl_transfer(0xc0, 0x01, 0x0081, 0x0000, 0x0001)\n if ret[0] == 0xa0:\n return self.POWER_ON\n return self.POWER_OFF",
"def _power_status(node):\n amt_common.awake_amt_interface(node)\n client = amt_common.get_wsman_client(node)\n namespace = resource_uris.CIM_AssociatedPowerManagementService\n try:\n doc = client.wsman_get(namespace)\n except (exception.AMTFailure, exception.AMTConnectFailure) as e:\n with excutils.save_and_reraise_exception():\n LOG.exception(_LE(\"Failed to get power state for node %(node_id)s \"\n \"with error: %(error)s.\"),\n {'node_id': node.uuid, 'error': e})\n\n item = \"PowerState\"\n power_state = amt_common.xml_find(doc, namespace, item).text\n for state in AMT_POWER_MAP:\n if power_state == AMT_POWER_MAP[state]:\n return state\n return states.ERROR",
"async def async_turn_on(self):\n data_cmd = _command(COMMAND_POWER_ON)\n await self._async_send_command(data_cmd)",
"def get_state(self):\n ret = self.send(\"?S\", recv=True)\n assert ret in \"WDR\"\n return ret",
"async def async_turn_on(self, **kwargs: Any) -> None:\n await self.coordinator.roku.remote(\"poweron\")\n await self.coordinator.async_request_refresh()",
"def d_requestState(self, stateId):\n self.sendUpdate('requestState', [stateId])",
"def set_host_power(self, power):\n power = power.upper()\n if (power is not None) and (power not in POWER_STATE):\n msg = (\"Invalid input '%(pow)s'. \"\n \"The expected input is ON or OFF.\" %\n {'pow': power})\n raise exception.IloInvalidInputError(msg)\n\n # Check current power status, do not act if it's in requested state.\n cur_status = self.get_host_power_status()\n\n if cur_status == power:\n LOG.debug(self._(\"Node is already in '%(power)s' power state.\"),\n {'power': power})\n return\n\n self._perform_power_op(POWER_STATE[power])",
"def request_current_state(self):\n if self._connected:\n payload = {\n \"msg\": \"REQUEST-CURRENT-STATE\",\n \"time\": time.strftime(\"%Y-%m-%dT%H:%M:%SZ\", time.gmtime())\n }\n self._mqtt.publish(self.command_topic, json.dumps(payload))\n else:\n _LOGGER.warning(\n \"Unable to send commands because device %s is not connected\",\n self.serial)",
"def set_power_state(self, msg):\n last_pct = self._pct\n last_plugged_in = self._plugged_in\n\n self._pct = msg.lifePercent\n self._plugged_in = msg.powerSupplyPresent\n\n if (last_pct != self._pct or last_plugged_in != self._plugged_in):\n drain_str = \"not charging\"\n if (self._plugged_in):\n drain_str = \"charging\"\n self.setToolTip(\"Battery: %.2f%% (%s)\" % (self._pct, drain_str))\n\n self.set_charging( self._plugged_in )\n\n self.update_perc(msg.lifePercent)",
"def on(self):\n\t\trb0 = [0x00]\n\t\trb1 = [0x00, 0x00]\n\t\tattempts = 0\n\n\t\twhile self.state != ON and attempts < MAX_RETRIES:\n\t\t\tself.spi.transfer([0x03], rb0, 1)\t\t## Send the command byte; response will be written to rb0\n\t\t\ttime.sleep(9e-3) \t\t\t\t\t\t## Sleep for 9 ms\n\t\t\tself.spi.transfer([0x00, 0x01], rb1, 2)\t## Send the following 2 bytes; response will be written to rb1\n\t\t\ttime.sleep(0.1)\n\n\t\t\tif rb0[0] < 0: \t\t\t\t\t\t## Account for implicit unsigned-to-signed \n\t\t\t\trb0[0] += 256\t\t\t\t\t## conversion from the transfer operation\n\n\t\t\tattempts += 1\n\t\t\tprint(f\"[{self.__class__.__name__}::on]\", end=' ')\n\t\t\tif rb0[0] == 0xF3 and rb1[0] == 0x03: \t## Ensure response values are as expected\n\t\t\t\tself.state = ON \n\t\t\t\tprint(\"SUCCESS -- device powered on.\")\n\t\t\telse:\n\t\t\t\tif attempts != MAX_RETRIES:\n\t\t\t\t\tprint(f\"Attempt #{attempts} failed -- retrying after delay ...\")\n\t\t\t\t\ttime.sleep(RETRY_DELAY)\n\t\t\t\telse:\n\t\t\t\t\tprint(\"ERROR -- command failed.\")\n\n\t\treturn self.state == ON",
"async def _update_power_sensor(self, state) -> bool:\n if (\n state is None\n or state.state == STATE_UNKNOWN\n or state.state == STATE_UNAVAILABLE\n ):\n self._power = None\n self.async_write_ha_state()\n return False\n\n if state.state in OFF_STATES:\n self._power = self._standby_usage or 0\n if self._multiply_factor and self._multiply_factor_standby:\n self._power *= self._multiply_factor\n else:\n self._power = await self._power_calculator.calculate(state)\n if self._multiply_factor and self._power is not None:\n self._power *= self._multiply_factor\n\n if self._power is None:\n self.async_write_ha_state()\n return False\n\n self._power = round(self._power, 2)\n\n _LOGGER.debug(\n 'State changed to \"%s\" for entity \"%s\". Power:%s',\n state.state,\n state.entity_id,\n self._power,\n )\n\n self.async_write_ha_state()\n return True",
"async def async_turn_on(self) -> None:\n self._zone.power = True",
"def get_power_state(self, userid):\n return self._smtclient.get_power_state(userid)",
"def togglepow(self,channel):\n if self.rf is not None:\n newstatus = bool(self.pow[channel-1].get())\n finalstatus = self.rf.setrempow(channel-1,newstatus)\n self.messages.log('Remote power channel %d set to '%(channel)+str(finalstatus))\n else:\n self.messages.log('Not connected to a focuser.')",
"def get_power(self):\n #GPIO.setmode(GPIO.BOARD)\n #GPIO.setup(self.input_pin, GPIO.IN)\n return 0",
"def test_off_reboot_on(self):\n self.openstack('baremetal node power off {0}'\n .format(self.node['uuid']))\n show_prop = self.node_show(self.node['uuid'], ['power_state'])\n self.assertEqual('power off', show_prop['power_state'])\n\n self.openstack('baremetal node reboot {0}'.format(self.node['uuid']))\n show_prop = self.node_show(self.node['uuid'], ['power_state'])\n self.assertEqual('power on', show_prop['power_state'])",
"def get_node_power_state(self, context, node_id):\n return self.call(context,\n self.make_msg('get_node_power_state',\n node_id=node_id))",
"def set_power_management(value: int) -> None:",
"def power(self, node_uuid, target):\n # TODO(lucasagomes): Test if target is a valid state and if it's able\n # to transition to the target state from the current one\n rpc_node = objects.Node.get_by_uuid(pecan.request.context, node_uuid)\n if rpc_node.target_power_state is not None:\n raise wsme.exc.ClientSideError(_(\"Power operation for node %s is \"\n \"already in progress.\") %\n rpc_node['uuid'],\n status_code=409)\n # Note that there is a race condition. The node state(s) could change\n # by the time the RPC call is made and the TaskManager manager gets a\n # lock.\n pecan.request.rpcapi.change_node_power_state(pecan.request.context,\n node_uuid, target)\n return NodeStates.convert(rpc_node)",
"def standby() -> None:",
"def testGetPower(self):\n self.ports.get_power(file_name = 'get_power.xml', port_ids = portsDict['port_ids'], power = portsDict['power'])",
"def request() -> None:\n\t_flag.set()",
"def enter_low_power_mode(self) -> bool:\n try:\n self.processes[\"dispatch\"].pause()\n return True\n except:\n return False",
"def get_power_state(self, id_or_uri):\n uri = self._client.build_uri(id_or_uri) + \"/powerState\"\n return self._client.get(uri)",
"def power_on(self, wait=0.2):\n print('Powering up O2 Meter ({})...'.format(self.ID))\n self.sensor.write(\"#PWUP\\r\")\n on_status = self.sensor.readline()\n time.sleep(wait)\n\n if 'PWUP' in on_status:\n print(' Ready!')\n elif 'ERR' in on_status:\n print('Power-on error: {}'.format(on_status.rstrip()))\n else:\n print('Something went wrong during power-on.\\n -> Sensor returned {}'.format(on_status))\n return",
"def get_power(self) -> bool:\r\n if not self.backlight:\r\n return None\r\n\r\n return self.backlight.power",
"def _do_power_action(cls, task):\n if task is None:\n return\n result = vc_soap_util.get_task_state(task)\n return result",
"def _turn_on(self):\n logger.info(\"Check antenna power\")\n power = yield WaitDBus(self.gsm_device.GetAntennaPower)\n logger.info(\"antenna power is %d\", power)\n if power:\n yield None\n logger.info(\"turn on antenna power\")\n try:\n yield WaitDBus(self.gsm_device.SetAntennaPower, True)\n except dbus.exceptions.DBusException, ex:\n if ex.get_dbus_name() != 'org.freesmartphone.GSM.SIM.AuthFailed':\n raise\n yield self._ask_pin()",
"def _sync_server_power_state(self, context, db_server,\n node_power_state):\n\n # We re-query the DB to get the latest server info to minimize\n # (not eliminate) race condition.\n db_server.refresh()\n db_power_state = db_server.power_state\n\n if db_server.status not in (states.ACTIVE, states.STOPPED):\n # on the receiving end of mogan-engine, it could happen\n # that the DB server already report the new resident\n # but the actual BM has not showed up on the hypervisor\n # yet. In this case, let's allow the loop to continue\n # and run the state sync in a later round\n LOG.info(\"During sync_power_state the server has a \"\n \"pending task (%(task)s). Skip.\",\n {'task': db_server.task_state},\n server=db_server)\n return\n\n if node_power_state != db_power_state:\n LOG.info('During _sync_server_power_state the DB '\n 'power_state (%(db_power_state)s) does not match '\n 'the node_power_state from the hypervisor '\n '(%(node_power_state)s). Updating power_state in the '\n 'DB to match the hypervisor.',\n {'db_power_state': db_power_state,\n 'node_power_state': node_power_state},\n server=db_server)\n # power_state is always updated from hypervisor to db\n db_server.power_state = node_power_state\n db_server.save()",
"def laser_state():\n global laser\n check_laser()\n req_data = request.get_json()\n if req_data != None:\n on_off_state = bool(req_data[\"on_off\"])\n intensity = int(req_data[\"intensity\"])\n if on_off_state: #if value is set to on, intensity jumps to 100%\n laser.on()\n try:\n print (\"laser to \" + str (intensity))\n laser.power = intensity\n except ValueError:\n print (\"Bad laser power received.\")\n if not on_off_state: #state switches to on when value changed.\n laser.off()\n lstate = {\"on_off\": laser.state(), \"intensity\": laser.power }\n return jsonify(lstate)",
"def set_all_server_power_state(self, state):\n api_data = request(\"get\", \"/serviceProfile\")\n self.assertEqual(api_data['status'], 200,\n 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status']))\n total_elements = 0\n for server in api_data[\"json\"][\"ServiceProfile\"][\"members\"]:\n if server[\"assoc_state\"] == \"associated\":\n api_data_c = request(\"post\", \"/power\",\n query={\"identifier\": str(server[\"path\"]), \"action\": state})\n self.assertEqual(api_data_c['status'], 200,\n 'Incorrect HTTP return code, expected 200, got:' +\n str(api_data_c['status']))\n total_elements += 1\n self.assertGreater(total_elements, 0, \"Found zero elements\")",
"def wifi_on(self):\n self._clear_read_buffer()\n self._write_cmd(\"PE01\")\n time.sleep(100e-3)",
"def set_power(self, dbm=-30):\r\n _debug('simq03b_api.set_power')\r\n \r\n self.write(\"POWer \"+str(dbm))",
"def _request_switch(self, state, callback=None):\n self.stick.send(\n CircleSwitchRequest(self.mac, state), callback,\n )",
"def enable_sensor_power():\n sen = digital.SensorPower(\"senpwr\") \n sen.set()",
"def on(cls, client_object):\n vm_mor = client_object.get_api()\n return cls._do_power_action(vm_mor.PowerOnVM_Task())",
"def requestBattery(self) -> None:\n self._protocol.write_line(CMD_BATTERY)",
"def power_mode(self):\n if not self.eve_type.is_upwell_structure:\n return None\n\n if self.fuel_expires_at and self.fuel_expires_at > now():\n return self.PowerMode.FULL_POWER\n\n elif self.last_online_at:\n if self.last_online_at >= now() - timedelta(days=7):\n return self.PowerMode.LOW_POWER\n else:\n return self.PowerMode.ABANDONED\n\n elif self.state in {self.State.ANCHORING, self.State.ANCHOR_VULNERABLE}:\n return self.PowerMode.LOW_POWER\n\n else:\n return self.PowerMode.LOW_ABANDONED",
"def set_power(self, power: bool):\r\n if not self.backlight:\r\n return\r\n\r\n self.backlight.power = power",
"def turn_on(self) -> None:\n self._monoprice.set_power(self._zone_id, True)",
"def reqNodeStatus(self):\n while self.status != Modem.Status.IDLE :\n sleep(0.1)\n if self.status != Modem.Status.IDLE:\n raise ValueError(\"Modem getNodeStatus unexpected status: \\\n \" + str(self.status))\n self.status = Modem.Status.BUSY2REQ\n self.send(self.interpreter.buildGetStatus())\n while self.status != Modem.Status.IDLE and self.status != Modem.Status.KILL:\n sleep(self.m_to)\n #self.recvCommand()\n if self.status == Modem.Status.KILL:\n return self.close()\n return self.errorCheck()",
"def set_power(self, dbm=-30):\r\n return self._api.set_power(dbm)",
"def set_power(self, dbm=-30):\r\n self.write(\"SOURce1:POWer:POWer \"+str(dbm))",
"def set_power(self, dbm=-30):\r\n self.write(\"SOURce1:POWer:POWer \"+str(dbm))",
"async def power_off(self):\n ...",
"def power_control(self, power):\n ret = self._transfer(TVPower(power=power))\n return ret.data if isinstance(ret, RaspiAckMsg) and ret.ack else None",
"def poweroff(self) -> None:\n pass",
"def update(self):\n try:\n if self._remote.power() == 1:\n self._state = STATE_ON\n else:\n self._state = STATE_OFF\n\n # Set TV to be able to remotely power on\n # self._remote.power_on_command_settings(2)\n if self._remote.mute() == 2:\n self._muted = False\n else:\n self._muted = True\n self._volume = self._remote.volume() / 60\n except OSError:\n self._state = STATE_OFF",
"def updateStateImage(self):\n\n if self.device.states['online']:\n self.device.updateStateImageOnServer(indigo.kStateImageSel.EnergyMeterOn)\n else:\n self.device.updateStateImageOnServer(indigo.kStateImageSel.EnergyMeterOff)",
"def set_power(self, power):\n x = 0\n if power > 100:\n power = 100\n elif power < 0:\n power = 0\n if power != 0:\n while (self.__rpm < 100) and x < 3:\n time.sleep(1)\n x += 1\n if x > 3:\n print(\"Fan doesn't spinn!\")\n return\n self.__pow = power",
"def is_power_onoff(self):\n return self['application'] == 'ccd201_pon_app'",
"async def async_turn_on(self, **kwargs: Any) -> None:\n _LOGGER.debug(\"Tried to switch on %s\", self.name)\n try:\n await self.hass.async_add_executor_job(\n self.device.appliance.set_setting, BSH_POWER_STATE, BSH_POWER_ON\n )\n except HomeConnectError as err:\n _LOGGER.error(\"Error while trying to turn on device: %s\", err)\n self._state = False\n self.async_entity_update()",
"async def power_on(self) -> str:\n return f\"d2 lamp is {await self.hw_device.lamp('d2')}; halogen lamp is {await self.hw_device.lamp('hal')}\""
] | [
"0.72591937",
"0.70166993",
"0.6846167",
"0.6774623",
"0.67444295",
"0.6627822",
"0.6618059",
"0.657318",
"0.6498877",
"0.6434031",
"0.64102936",
"0.6391049",
"0.63899004",
"0.6373958",
"0.6363609",
"0.63495874",
"0.63292205",
"0.63288325",
"0.63002634",
"0.62778926",
"0.6276813",
"0.6254265",
"0.6227349",
"0.620841",
"0.61854434",
"0.61428195",
"0.61364335",
"0.6115394",
"0.60836256",
"0.60779804",
"0.60627526",
"0.605765",
"0.60508364",
"0.60272896",
"0.5973997",
"0.59736943",
"0.5958165",
"0.5953535",
"0.5939881",
"0.5934522",
"0.5932749",
"0.5931239",
"0.5929394",
"0.5927094",
"0.59245163",
"0.5909414",
"0.59015167",
"0.5899447",
"0.58964497",
"0.589147",
"0.58836013",
"0.5876874",
"0.58749807",
"0.5854034",
"0.58517146",
"0.5831841",
"0.57979125",
"0.5789381",
"0.5769897",
"0.57620823",
"0.57539153",
"0.572472",
"0.5720657",
"0.57142437",
"0.5709778",
"0.56997967",
"0.56880224",
"0.5678193",
"0.5677459",
"0.56655717",
"0.5644342",
"0.5641733",
"0.56383157",
"0.5635795",
"0.5630164",
"0.56277573",
"0.5623103",
"0.56184477",
"0.56101125",
"0.56088483",
"0.5608168",
"0.56046844",
"0.56014264",
"0.56002635",
"0.5598242",
"0.5572154",
"0.55718565",
"0.5564259",
"0.556099",
"0.555269",
"0.555269",
"0.55497336",
"0.55357397",
"0.5532589",
"0.5530594",
"0.55261815",
"0.5524874",
"0.55248123",
"0.55218613",
"0.55193913"
] | 0.5593325 | 85 |
Perform requested power operation. | def _perform_power_op(self, oper):
power_settings = {"Action": "Reset",
"ResetType": oper}
systems_uri = "/rest/v1/Systems/1"
status, headers, response = self._rest_post(systems_uri, None,
power_settings)
if status >= 300:
msg = self._get_extended_error(response)
raise exception.IloError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def power():\n request_command(tv_command=TVCommand.power)",
"def power_on(self):\n raise NotImplementedError",
"def pow(self, power):\n daskD.wait(self.client.map(_call_pow, self.vecDask, power=power, pure=False))\n return self",
"def power_on(self):\n pass",
"def power(self, power):\n\n self._power = power",
"def poweron(self):\n raise NotImplementedError()",
"def power(self, value: int):\n self._power = value",
"def get_setPower(self):\n self.read(\":POW?\")",
"def perform(self, context):\r\n context.owner.spendPower(self.power)",
"def __pow__(self, exponent):\n return self.runtime.pow(self, exponent)",
"def power(a, b):\n pass",
"def poweroff(self):\n raise NotImplementedError()",
"def power(self, power: int, matrix_power: bool = False) -> QuantumCircuit:\n raise NotImplementedError",
"def powerIP(self,power):\n np.power(self.t, power, out=self.t)\n return self",
"async def power_on(self):\n ...",
"def __pow__(self,n):\r\n\t\t\r\n\t\t# take power\r\n\t\tp = self.power(n)\r\n\t\t\r\n\t\treturn p",
"def poweroff(self) -> None:\n pass",
"def Incrpower(self, increment):\n self.power += increment",
"def _call_pow(vecObj, power):\n res = vecObj.pow(power)\n return res",
"def power(a, b):\n \n return a**b",
"def power(self):\n return self._power",
"def power(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"power\")",
"def get_power():\n return float(cmd(\"pa?\"))",
"def power(x): \r\n return x(1)",
"def __pow__(self,*args):\r\n pass",
"def power(num,pwr):\n if pwr is 0:\n return 1\n\n if pwr < 0 :\n return \"not supported by this function.\"\n\n if num != 0 and pwr >= 0:\n return num * power(num,pwr-1)",
"def set_power(self, power):\n x = 0\n if power > 100:\n power = 100\n elif power < 0:\n power = 0\n if power != 0:\n while (self.__rpm < 100) and x < 3:\n time.sleep(1)\n x += 1\n if x > 3:\n print(\"Fan doesn't spinn!\")\n return\n self.__pow = power",
"def get_power(self):\r\n x = self.query('POW?')\r\n if x == None: return None\r\n return float(x)",
"def power(self) -> int:\n return self._power",
"def power(self) -> int:\n return self._power",
"def power(self) -> int:\n return self._power",
"def power(self) -> int:\n return self._power",
"def get_power(self):\r\n return self._api.get_power()",
"def power(x, y):\n return x ** y",
"def set_power(self, value):\n self.write(\":POW {}W\".format(value))",
"def _pow_(self, n):\n assert n > 0\n return generic_power(self, n)",
"def power(self):\r\n return self.model * self.percent / 100",
"async def power(self, turn_on):\n\n op = DHumOp.ON if turn_on else DHumOp.OFF\n keys = self._get_cmd_keys(CMD_STATE_OPERATION)\n op_value = self.model_info.enum_value(keys[2], op.value)\n if self._should_poll:\n # different power command for ThinQ1 devices\n cmd = \"Start\" if turn_on else \"Stop\"\n await self.set(keys[0], keys[2], key=None, value=cmd)\n self._status.update_status(keys[2], op_value)\n return\n await self.set(keys[0], keys[1], key=keys[2], value=op_value)",
"def get_power(self):\r\n return self.p",
"def _doPowerState(self, state=False):\n if state:\n self._cmdPowerOn()\n else:\n self._cmdPowerOff()",
"def power(number, power):\n return math.pow(number, power)",
"def power(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"power\")",
"def power(self) -> interface.Power:\n return cast(interface.Power, self._interfaces[interface.Power])",
"def analytical_power(self, proc, vdd, temp, load):\n from tech import spice\n leakage = spice[\"bitcell_leakage\"]\n dynamic = 0 #temporary\n total_power = self.return_power(dynamic, leakage)\n return total_power",
"def power(num, exponent):\n return num ** exponent",
"def set_power(self, dbm=-30):\r\n self.write(\"POW \"+str(dbm))",
"def get_power(self):\r\n _debug('simq03b_api.get_power')\r\n \r\n x = self.query('POWer?')\r\n if x == None: return None\r\n return float(x)",
"def power_off(self):\n raise NotImplementedError",
"def set_power(self, dbm=-30):\r\n _debug('simq03b_api.set_power')\r\n \r\n self.write(\"POWer \"+str(dbm))",
"def power(self,p):\r\n\t\t\r\n\t\t# raise to power\r\n\t\tr,o = Li._expand(self,p)\r\n\t\t\r\n\t\treturn Li(r)",
"def set_power(self, power):\n\n return self._service.exposed_set_power(power)",
"def power(self) -> int:\n return self._power_consumption",
"def power(x, y):\n return x^y",
"def analytical_power(self, corner, load):\n from tech import spice\n leakage = spice[\"bitcell_leakage\"]\n # FIXME\n dynamic = 0\n total_power = self.return_power(dynamic, leakage)\n return total_power",
"def power_control(self, power):\n ret = self._transfer(TVPower(power=power))\n return ret.data if isinstance(ret, RaspiAckMsg) and ret.ack else None",
"def power(base, exponent):\n return base ** exponent",
"def set_power(self, dbm=-30):\r\n self.write(\"SOURce1:POWer:POWer \"+str(dbm))",
"def set_power(self, dbm=-30):\r\n self.write(\"SOURce1:POWer:POWer \"+str(dbm))",
"def handlePowers(self):\n self.player.sprite.handlePowers(self.get_remaining_time())",
"def get_power(self) -> float:\n\n #:READ[n][:CHANnel[m]][:SCALar]: POWer[:DC]?\n return float(self._inst.query(\":READ:POW?\"))",
"def manage_power(agent):\n total_active_power = agent.power_rating\n total_reactive_power = reactive_power(power_rating=total_active_power, power_factor=agent.power_factor)\n # power_surplus > 0 ==> incoming_power exceeds power demand\n if agent.incoming_power > 0:\n incoming_power = agent.incoming_power - agent.incoming_power * 0.05\n # if there is incoming power i overrule the clusters, no matter what\n request_inject_power = (0, 0)\n else:\n incoming_power = 0\n power_surplus = incoming_power - total_active_power\n\n # If the incoming power exceeds my demand, I update my power demand so that i will absorb the incoming excess too\n if power_surplus >= 0:\n total_active_power = incoming_power\n total_reactive_power = 0\n total_power = (total_active_power, total_reactive_power)\n return total_power, (0,0)\n\n # -- If there is incoming power, then no more power will be injected into the network\n # this will overwrite any kind of external control that is running in the cluster, No matter if it exceeds my\n # demand or not.\n #\n # -- If there is NO incoming power (incoming_power <= 0 just for rounding errors safety), then I manage all the\n # external controls\n request_inject_active_power = 0\n request_inject_reactive_power = 0\n if incoming_power <= 0:\n request_inject_active_power = agent.request_inject_power\n request_inject_reactive_power = reactive_power(power_rating=agent.request_inject_power,\n power_factor=agent.request_power_factor)\n # Return these two tuples just for the sake of line length\n total_power = (total_active_power, total_reactive_power)\n request_inject_power = (request_inject_active_power, request_inject_reactive_power)\n return total_power, request_inject_power",
"def get_power(self):\n #GPIO.setmode(GPIO.BOARD)\n #GPIO.setup(self.input_pin, GPIO.IN)\n return 0",
"def __pow__(self,power):\n return Factor().__build( VarSet(self.v) , np.power(self.t,power) )",
"def __pow__(self, exponent, modulus=None):\n raise NotImplementedError",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __pow__(self, *args, **kwargs): # real signature unknown\n pass",
"def set_power(self, power):\n print('Setting santec power to %.4f mW' % power)\n self.santec1.write(\"LP %.2f\" % power)\n self.santec2.write(\"LP %.2f\" % power)\n self.santec3.write(\"LP %.2f\" % power)\n self.santec4.write(\"LP %.2f\" % power)",
"def powerDispatch(self):\n\n if self.ui.powerDevice.currentText().startswith('INDI'):\n self.app.power.name = self.ui.powerDeviceName.currentText()\n self.app.message.emit('Power enabled', 0)\n self.deviceStat['power'] = False\n else:\n self.app.power.name = ''\n self.app.message.emit('Power disabled', 0)\n self.deviceStat['power'] = None\n\n return True",
"def power_status(self):\n raise NotImplementedError(\"ERROR: Unimplemented function.\")",
"def testGetPower(self):\n self.ports.get_power(file_name = 'get_power.xml', port_ids = portsDict['port_ids'], power = portsDict['power'])",
"def host_power_action(self, host, action):\n return action",
"def host_power_action(self, host, action):\n return action",
"def simulate_power(self):\n if self.p_treatment - self.p_control < 0:\n thresh = 1 - self.alpha\n else:\n thresh = self.alpha\n\n try:\n p_crit = self.norm_null.ppf(1 - thresh)\n beta = self.norm_alt.cdf(p_crit)\n except:\n self.norm_distribution()\n p_crit = self.norm_null.ppf(1 - thresh)\n beta = self.norm_alt.cdf(p_crit)\n\n power = (1 - beta) if self.p_treatment > self.p_control else beta\n self.power = power\n\n return power",
"def _number_dbm_changed(self, *a):\r\n self.api.set_power(self.number_dbm.get_value())",
"def power_shutdown(self):\n raise NotImplementedError(\"ERROR: Unimplemented function.\")",
"def power(num, exponent):\n power = num ** exponent\n return power",
"def calcPower(self, inputs):\n if self.getAtt('available', inputs):\n possible_charge_rate = self.getAtt('possible_charge_rate', inputs)\n Vm = self.getAtt('Vm', inputs)\n P = possible_charge_rate * Vm\n if not self.stayConnected:\n P = P * self.calculateVoltageIndex(Vm) * self.calculateTrafoIndex()\n return P\n return 0.0",
"async def get_power(self):\n if not self._current_power_supported:\n return 0\n\n try:\n value = await self._get_config(STATE_POWER_V1)\n return value[STATE_POWER_V1]\n except (ValueError, InvalidRequestError):\n # Device does not support whole unit instant power usage\n self._current_power_supported = False\n return 0",
"def get_power(self):\r\n x = self.query('SOURce1:POWer:POWer?')\r\n if x == None: return None\r\n return float(x)",
"def get_power(self):\r\n x = self.query('SOURce1:POWer:POWer?')\r\n if x == None: return None\r\n return float(x)",
"def power(source : Image, destination : Image = None, exponent : float = 1) -> Image:\n\n\n parameters = {\n \"src\":source,\n \"dst\": destination,\n \"exponent\":float(exponent)\n }\n\n execute(__file__, '../clij-opencl-kernels/kernels/power_' + str(len(destination.shape)) + 'd_x.cl', 'power_' + str(len(destination.shape)) + 'd', destination.shape, parameters)\n return destination",
"def __rpow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __rpow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __rpow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __rpow__(self, *args, **kwargs): # real signature unknown\n pass",
"def __rpow__(self, *args, **kwargs): # real signature unknown\n pass"
] | [
"0.71487105",
"0.7076449",
"0.70716",
"0.6828888",
"0.6828583",
"0.670036",
"0.6691989",
"0.66501945",
"0.6643156",
"0.6614282",
"0.6602157",
"0.6598427",
"0.6593769",
"0.6579953",
"0.6551106",
"0.6543305",
"0.6498859",
"0.6488113",
"0.6479012",
"0.646967",
"0.6468145",
"0.6437044",
"0.6413782",
"0.63949406",
"0.6377545",
"0.63713187",
"0.6358516",
"0.6351886",
"0.6347593",
"0.6347593",
"0.6347593",
"0.6347593",
"0.63357383",
"0.6320231",
"0.630305",
"0.63023454",
"0.6299369",
"0.6294977",
"0.62924826",
"0.628619",
"0.62499297",
"0.6239881",
"0.6221265",
"0.6213581",
"0.6192397",
"0.61747783",
"0.6163102",
"0.61537707",
"0.6149068",
"0.61397594",
"0.61344326",
"0.61189634",
"0.60920197",
"0.607886",
"0.6061167",
"0.6057938",
"0.6053508",
"0.6053508",
"0.6043784",
"0.6027334",
"0.6015815",
"0.60110044",
"0.60080665",
"0.6005482",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59899884",
"0.59817004",
"0.5967208",
"0.5966713",
"0.5966139",
"0.59634155",
"0.59634155",
"0.59589267",
"0.59575886",
"0.59524643",
"0.5949941",
"0.5948552",
"0.5934464",
"0.5929966",
"0.5929966",
"0.59250134",
"0.5913051",
"0.5913051",
"0.5913051",
"0.5913051",
"0.5913051"
] | 0.7263758 | 0 |
Simulates a physical press of the server power button. | def _press_pwr_btn(self, pushType="Press"):
power_settings = {"Action": "PowerButton",
"Target": "/Oem/Hp",
"PushType": pushType}
systems_uri = "/rest/v1/Systems/1"
status, headers, response = self._rest_post(systems_uri, None,
power_settings)
if status >= 300:
msg = self._get_extended_error(response)
raise exception.IloError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def power():\n request_command(tv_command=TVCommand.power)",
"def press_pwr_btn(self):\n self._press_pwr_btn()",
"async def power_on(self):\n ...",
"def double_click_power(self):\n get_power_event_cmd = (\"getevent -pl 2>&1 | sed -n \"\n \"'/^add/{h}/KEY_POWER/{x;s/[^/]*//p}'\")\n input_event = self.adb.exec_adb_cmd(\n \"shell '{cmd}'\".format(cmd=get_power_event_cmd)).communicate()[0]\n\n self.android_device_driver.adb.exec_adb_cmd(\"shell '{cmd}'\".format(\n cmd=DOUBLE_CLICK_POWER_EVENT_TEMPLATE.format(input_event=input_event)))",
"def _PressLeftButton(self):\n self._kit.MousePressButtons({PeripheralKit.MOUSE_BUTTON_LEFT})\n time.sleep(self.send_delay)",
"def press(self, key: list, t):\n if not self.ser.alive:\n return\n k = '00'\n for v in key:\n k = hex(int(v, 16) ^ int(k, 16))\n if len(k) == 3:\n k = k.replace('0x', '0x0')\n if \"-\" in t:\n val = t.split(\"-\")\n delay = round(random.uniform(float(val[0]), float(val[1])), 4)\n else:\n delay = float(t)\n k = k.replace('0x', '')\n # close relay\n self.ser.write(k.encode('utf-8'), isHex=True)\n # How long do you need to press\n self.log.info('button press time={}'.format(delay))\n time.sleep(delay)\n # release relay\n self.ser.write('00'.encode('utf-8'), isHex=True)",
"def on_press(key):\n currentX, currentY = pyautogui.position()\n\n if key in LEFT_LEYS:\n pyautogui.move(-DEFAULT_MOVEMENT, 0)\n if key in DOWN_KEYS:\n pyautogui.move(0, DEFAULT_MOVEMENT)\n if key in UP_KEYS:\n pyautogui.move(0, -DEFAULT_MOVEMENT)\n if key in RIGHT_KEYS:\n pyautogui.move(DEFAULT_MOVEMENT, 0)\n\n if key in LEFTMOST_KEYS:\n pyautogui.moveTo(0, currentY)\n notify(\"Powermouse\", \"Moved to left of the screen\")\n if key in BOTTOM_KEYS:\n pyautogui.moveTo(currentX, screenHeight)\n notify(\"Powermouse\", \"Moved to bottom of screen\")\n if key in TOP_KEYS:\n pyautogui.moveTo(screenWidth, currentY)\n notify(\"Powermouse\", \"Moved to top of screen\")\n if key in RIGHTMOST_KEYS:\n pyautogui.moveTo(0, currentY)\n notify(\"Powermouse\", \"Moved to right of screen\")\n\n if key in CLICK_KEYS:\n pyautogui.click()\n notify(\"Powermouse\", f\"Clicked at position {pyautogui.position()}\")\n\n if key in QUIT_KEYS:\n notify(\"Powermouse\", \"Quitting\")\n exit()",
"def hold_pwr_btn(self):\n self._press_pwr_btn(pushType=\"PressAndHold\")",
"def _doPowerState(self, state=False):\n if state:\n self._cmdPowerOn()\n else:\n self._cmdPowerOff()",
"def power_on(self):\n pass",
"def m_press(self, button: MButton):\n pass",
"def turn_on(self):\n self._remote.power(1)",
"async def async_press(self) -> None:\n if self.entity_description.key == _RESTART_KEY:\n await self._device.async_reboot()\n else:\n await self._device.async_unpair_remotes()\n await self._device.async_config_remotes(RemoteConfig.OPEN)",
"def _PressRightButton(self):\n self._kit.MousePressButtons({PeripheralKit.MOUSE_BUTTON_RIGHT})\n time.sleep(self.send_delay)",
"def host_power_action(self, host, action):\n return action",
"def host_power_action(self, host, action):\n return action",
"def test_api_ucs_power(self):\n # first power off all servers\n self.set_all_server_power_state(\"off\")\n # verify power state is down\n self.check_all_server_power_state(\"down\")\n # now power on the servers\n self.set_all_server_power_state(\"on\")\n # verify power state is up\n self.check_all_server_power_state(\"up\")",
"def pressed(self):\n print(\"Pressed (Pin: {}): {} -- {}\".format(self.button_pin, datetime.now(), self))\n if self.led.is_lit:\n self.led.off()\n self.mute()\n else:\n self.led.on()\n self.unmute()",
"def _windows_power_control(self):\n\n os_power_command = 'shutdown /r /t 3' if self._power_event_type == 'restart' \\\n else 'shutdown /h /t 3'\n\n exit_code, out = self._staf_start_proc(os_power_command,\n self._sut.bespoke_root,\n self._command_timeout,\n location = self._sut.network_address)\n\n if exit_code != 0:\n raise CoreError('Power control event \"{0}\" failed: {1}'.format(self._name, out))",
"async def sendKeyPress(self, key):\n key = str(key)\n await self.director.sendPostRequest(\n \"/api/v1/items/{}/commands\".format(self.item_id),\n \"KEY_PRESS\",\n {\"KeyName\": key},\n )",
"async def async_turn_on(self):\n data_cmd = _command(COMMAND_POWER_ON)\n await self._async_send_command(data_cmd)",
"def on(cls, client_object):\n vm_mor = client_object.get_api()\n return cls._do_power_action(vm_mor.PowerOnVM_Task())",
"async def power(self, turn_on):\n\n op = DHumOp.ON if turn_on else DHumOp.OFF\n keys = self._get_cmd_keys(CMD_STATE_OPERATION)\n op_value = self.model_info.enum_value(keys[2], op.value)\n if self._should_poll:\n # different power command for ThinQ1 devices\n cmd = \"Start\" if turn_on else \"Stop\"\n await self.set(keys[0], keys[2], key=None, value=cmd)\n self._status.update_status(keys[2], op_value)\n return\n await self.set(keys[0], keys[1], key=keys[2], value=op_value)",
"def set_power(self, power: bool):\r\n if not self.backlight:\r\n return\r\n\r\n self.backlight.power = power",
"def LeftClick(self):\n self._PressLeftButton()\n self._ReleaseAllButtons()",
"async def power_off(self):\n ...",
"def handle_button_press(button_state, mqtt_client, message):\n if button_state:\n ev3.Sound.speak(message).wait()\n mqtt_client.send_message(\"button_pressed\", [message])",
"def power_up(self):\n t_end = time.time() + 3\n while time.time() < t_end:\n self.light_led(5)\n self.light_led(6)",
"def sleep(self):\r\n if not self.backlight:\r\n return\r\n\r\n self.backlight.power = False",
"async def on_buttonA_down(event, data):\n ArmDevice.storage.command[6] = gripper_open_speed",
"def presskey(self, key):\n \"\"\"Method to press any key\n Need to add further code for other keys based on requirements\"\"\"\n action = ActionChains(self.driver)\n action.send_keys(key)\n action.perform()",
"def _press(self, event):",
"def on_press(self):\n self.pressed = True",
"def on_press(self):\n self.pressed = True",
"def set_power_state(self, context, server, state):\n\n fsm = utils.get_state_machine(start_state=server.status)\n\n @utils.synchronized(server.uuid)\n def do_set_power_state():\n LOG.debug('Power %(state)s called for server %(server)s',\n {'state': state,\n 'server': server})\n self.driver.set_power_state(context, server, state)\n\n try:\n do_set_power_state()\n server.power_state = self.driver.get_power_state(context,\n server.uuid)\n except Exception as e:\n with excutils.save_and_reraise_exception():\n LOG.exception(\"Set server power state to %(state)s failed, \"\n \"the reason: %(reason)s\",\n {\"state\": state, \"reason\": six.text_type(e)})\n server.power_state = self.driver.get_power_state(context,\n server.uuid)\n if state in ['reboot', 'soft_reboot'] \\\n and server.power_state != states.POWER_ON:\n utils.process_event(fsm, server, event='error')\n else:\n utils.process_event(fsm, server, event='fail')\n\n action = POWER_NOTIFICATION_MAP[state]\n notifications.notify_about_server_action(\n context, server, self.host,\n action=action,\n phase=fields.NotificationPhase.ERROR,\n exception=e)\n\n utils.process_event(fsm, server, event='done')\n LOG.info('Successfully set node power state: %s',\n state, server=server)",
"def powerDispatch(self):\n\n if self.ui.powerDevice.currentText().startswith('INDI'):\n self.app.power.name = self.ui.powerDeviceName.currentText()\n self.app.message.emit('Power enabled', 0)\n self.deviceStat['power'] = False\n else:\n self.app.power.name = ''\n self.app.message.emit('Power disabled', 0)\n self.deviceStat['power'] = None\n\n return True",
"def poweron(self):\n raise NotImplementedError()",
"def set_host_power(self, power):\n power = power.upper()\n if (power is not None) and (power not in POWER_STATE):\n msg = (\"Invalid input '%(pow)s'. \"\n \"The expected input is ON or OFF.\" %\n {'pow': power})\n raise exception.IloInvalidInputError(msg)\n\n # Check current power status, do not act if it's in requested state.\n cur_status = self.get_host_power_status()\n\n if cur_status == power:\n LOG.debug(self._(\"Node is already in '%(power)s' power state.\"),\n {'power': power})\n return\n\n self._perform_power_op(POWER_STATE[power])",
"def button_pressed(self, channel):\n print(\"button_pressed() channel = \"+str(channel))\n GPIO.remove_event_detect(channel) # turn button off to avoid double hits\n match channel:\n case self.prev_button: \n print(\"Previous button - sending message: mycroft.audio.service.prev\")\n self.send_message(\"mycroft.audio.service.prev\")\n case self.stop_button: \n print(\"Stop button - sending message: mycroft.stop\")\n self.send_message(\"mycroft.stop\")\n case self.next_button: \n print(\"Next button - sending message: mycroft.audio.service.next\")\n self.send_message(\"mycroft.audio.service.next\")\n case _: # not expected\n print(\"Did not expect channel = \"+str(channel))\n GPIO.add_event_detect(channel, GPIO.FALLING, callback=self.button_pressed, bouncetime=5) # turn button back on",
"def simulate_button_clicked(self):\n self.simulate_bool = True\n self.update_change()",
"def set_power(sid):\n # Resolve the passed parameters if any\n timer = None\n os = None\n if request.json:\n if timer in request.json:\n timer = request.json.get('timer')\n if os in request.json:\n os = request.json.get('os')\n db = sqlite3.connect('/home/tropius/TROPIUS/TROPIUS.db')\n state = hosts.get(db, sid)['state']\n \n if state == 'on':\n # The host is on -- turn it off\n # TODO make a unix shell util file\n # TODO make a windows util file\n return\n elif state == 'off':\n # The host is off -- turn it on\n if timer is not None:\n sleep(timer)\n netutil.wake_on_lan(db, sid)\n ret = {'power': {'state': 'on'}}\n return jsonify(ret)\n # TODO find a keyboard driver and implement OS parameter",
"def enable_sensor_power():\n sen = digital.SensorPower(\"senpwr\") \n sen.set()",
"def power_on(self, sync=True, wait_for_guest_ready=True):\n self.vmomi_object.PowerOn()\n if sync: self._wait_for_power_on(wait_for_guest_ready)",
"def power_down(self):\n t_end = time.time() + 3\n while time.time() < t_end:\n self.light_led(4)\n self.light_led(6)",
"def press(self) -> bool:\n return self._sendcommand(PRESS_KEY, self._retry_count)",
"def press(self) -> bool:\n return self._sendcommand(PRESS_KEY, self._retry_count)",
"def _linux_power_control(self):\n\n os_power_command = 'shutdown -r now' if self._power_event_type == 'restart' \\\n else 'shutdown -h now'\n\n exit_code, out = self._staf_start_proc(os_power_command,\n self._sut.bespoke_root,\n self._command_timeout,\n location = self._sut.network_address)\n\n if exit_code != 0:\n raise CoreError('Power control event \"{0}\" failed: {1}'.format(self._name, out))",
"def start_game():\n logger.info(\"Clicking play button\")\n mouseclick(coords_play_final_button[0], coords_play_final_button[1])",
"def turn_on(self) -> None:\n self._monoprice.set_power(self._zone_id, True)",
"async def async_press(self) -> None:\n await self.entity_description.press_action(self.wrapper)",
"def emulate_press(self, key_code, scan_code, value, timeval):\n scan_event = self.create_event_object(\n \"Misc\",\n 0x04,\n scan_code,\n timeval)\n key_event = self.create_event_object(\n \"Key\",\n key_code,\n value,\n timeval)\n return scan_event, key_event",
"def test_turn_on(power_supply):\n power_supply.Init()\n assert power_supply.state() != tango.DevState.ON\n power_supply.current = 5.0\n power_supply.turn_on()\n assert power_supply.state() == tango.DevState.ON",
"def poweroff(self) -> None:\n pass",
"def send_software_trigger(self):\n self.lib.SendSoftwareTrigger()",
"def k_press(self, key: KKey):\n pass",
"def _perform_power_op(self, oper):\n\n power_settings = {\"Action\": \"Reset\",\n \"ResetType\": oper}\n systems_uri = \"/rest/v1/Systems/1\"\n\n status, headers, response = self._rest_post(systems_uri, None,\n power_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def press_key(self, event):\n if self.active:\n keycode = self.mapping[event.pin_num]\n while self.busy:\n sleep(0.01)\n self.busy = True\n self.send_key(keycode)\n self.busy = False",
"def motorswitch(self, bo, pin, t):\n self.app.processEvents()\n if(self.win.getStopped() == True):\n self.win.updatelabel2(\"Jingle button was clicked.\\nClick another!\")\n return\n while self.win.getPaused() == True:\n self.app.processEvents() # Not really too sure if this line is needed. NEEDS TESTING\n self.win.updatelabel2(\"Jingle Song Paused!\\nChoose A new Song or Play to Resume!\")\n time.sleep(.1)\n GPIO.output(pin, bo)\n time.sleep(t)",
"def Pause():\n\tDmg.enableButton.SetOff()",
"def power_on(self, default=False):\n if default:\n return self.exec_command('SupplyPowerDefault = 1')\n return self.exec_command('SupplyPower = 1')",
"def send_one(self, button):\n self.client.send_one(self.name, button)",
"def togglePWMPower(self):\n # PCPWM1 is located at position 6\n mask = 1 << 6\n self._injectFault(\"PCONP\", 0x400FC0C4, mask)",
"def when_pressed(self, button, func, *args):\n\n self.hardware_interfaces[self._gpio].set_pin_event(self._b_names[button],\n func,\n *args)",
"def power_on(self):\n raise NotImplementedError",
"async def test_onoff(hass: HomeAssistant, mock_remote) -> None:\n\n await setup_panasonic_viera(hass)\n\n data = {ATTR_ENTITY_ID: \"remote.panasonic_viera_tv\"}\n\n # simulate tv off when async_update\n mock_remote.get_mute = Mock(side_effect=SOAPError)\n\n await hass.services.async_call(REMOTE_DOMAIN, SERVICE_TURN_OFF, data)\n await hass.services.async_call(REMOTE_DOMAIN, SERVICE_TURN_ON, data)\n await hass.async_block_till_done()\n\n power = getattr(Keys.power, \"value\", Keys.power)\n assert mock_remote.send_key.call_args_list == [call(power), call(power)]",
"async def async_turn_on(self) -> None:\n self._zone.power = True",
"def voldown(self, raiseby=1):\n command + 'voldown ' + str(raiseby)\n self.run_command(command)",
"def powerup_collected(self) -> None:\n self.powerup_collected_sound.play()",
"async def async_press(self) -> None:\n try:\n await self.entity_description.press_func(self.device)\n except DevicePasswordProtected as ex:\n self.entry.async_start_reauth(self.hass)\n raise HomeAssistantError(\n f\"Device {self.entry.title} require re-authenticatication to set or change the password\"\n ) from ex\n except DeviceUnavailable as ex:\n raise HomeAssistantError(\n f\"Device {self.entry.title} did not respond\"\n ) from ex",
"def press(button='left', coords=(0, 0)):\n _perform_click_input(button=button, coords=coords, button_down=True, button_up=False)",
"def arduPusherClick(self, dummy = 0):\r\n self.ardu.write(chr(self.CLICK))",
"def trigger(self):\n GPIO.output(self.trigger_pin, 1)\n time.sleep(10/1000000)\n GPIO.output(self.trigger_pin, 0)",
"def RightClick(self):\n self._PressRightButton()\n self._ReleaseAllButtons()",
"async def async_turn_on(self, **kwargs: Any) -> None:\n await self.coordinator.roku.remote(\"poweron\")\n await self.coordinator.async_request_refresh()",
"def pointer_notify_button(\n self, time_msec: int, button: int, button_state: ButtonState\n ) -> int:\n return lib.wlr_seat_pointer_notify_button(\n self._ptr, time_msec, button, button_state.value\n )",
"def poweron(self) -> None:\n self.servo_reset()",
"async def async_preset_button(self, preset):\n if self._preset_key != None and preset != None:\n if not self._slave_mode:\n if int(preset) > 0 and int(preset) <= self._preset_key:\n value = await self.async_call_linkplay_httpapi(\"MCUKeyShortClick:{0}\".format(str(preset)), None)\n if value != \"OK\":\n _LOGGER.warning(\"Failed to recall preset %s. \" \"Device: %s, Got response: %s\", self.entity_id, preset, value)\n else:\n _LOGGER.warning(\"Wrong preset number %s. Device: %s, has to be integer between 1 and %s\", self.entity_id, preset, self._preset_key)\n else:\n await self._master.async_preset_button(preset)",
"def togglepow(self,channel):\n if self.rf is not None:\n newstatus = bool(self.pow[channel-1].get())\n finalstatus = self.rf.setrempow(channel-1,newstatus)\n self.messages.log('Remote power channel %d set to '%(channel)+str(finalstatus))\n else:\n self.messages.log('Not connected to a focuser.')",
"def takeControl(self):\n mainloop()",
"def takeControl(self):\n mainloop()",
"def run_button(self):\r\n self.step = False # Clear step command\r\n self.is_pause = False\r\n self.run_command()",
"def enter_low_power_mode(self) -> bool:\n try:\n self.processes[\"dispatch\"].pause()\n return True\n except:\n return False",
"def vm_power(self, vm_name, state):\n states = [\"on\", \"off\"]\n if state not in states:\n raise OpenStackConnectorException(f\"Incorrect action was provided for the vm {vm_name} power state change\")\n \n vm_id = self._get_vm_id_by_name(vm_name)\n\n if not vm_id:\n return False\n \n try:\n if state == \"on\":\n self.connection.compute.start_server(vm_id)\n else:\n self.connection.compute.stop_server(vm_id)\n except ConflictException: # This exception block handles the situation when the VM is already in the required power state\n pass\n \n return True",
"def on_press(key):\n try:\n # gets pressed key char value and searches it from dict with get method.\n mapped_key = key_mappings.get(key.char) # gets value and type tuple or None\n if mapped_key:\n module.pressed_key = mapped_key\n except AttributeError:\n traceback.print_exc()\n except KeyboardInterrupt:\n print(f\"\\n{module.current_time()} Application stopped\")",
"def pause_button(self):\r\n self.is_action = True\r\n self.update_settings()\r\n self.is_pause = True\r\n if self.pause_call is not None:\r\n self.wm.after(1, self.pause_call)",
"def stop():\n set_power(0)",
"def mouse_press_event(self, x: int, y: int, button: int):\n pass",
"def pressKeyboardButton(self, button):\r\n if self.__layoutMaps[self.currentLayout].has_key(button):\r\n self.phone._touch.press(self.__layoutMaps[self.currentLayout][button][0])\r\n self.phone._run('Switch case',testStepReporting=False)\r\n return True\r\n else:\r\n return False",
"def toggle_power(self, duration, wait):\n payload = {\"duration\": duration}\n response = requests.post(self.__api_url('toggle'.format(self.name)), data=payload, headers=self.headers)\n if wait:\n time.sleep(duration)\n return response.text",
"def send_btn_clicked(self):\n command = self.SendLine.text()\n self.Serial.send(command)",
"def power_play(self, power_play):\n\n self._power_play = power_play",
"def press(button_id: str) -> None:\n try:\n self.query_one(f\"#{button_id}\", Button).press()\n except NoMatches:\n pass",
"async def async_turn_on(self):\n await self.local_meural.send_key_resume()",
"def pause(self, instance):\n self.power_off(instance)",
"def send_command(self):\n button = self.sender()\n answer: str = self.UsbHost.send_command(self.state.ser, self.command_dict[button], str(self.state.device_id))\n if answer == 'Ok':\n self.statusbar.showMessage(self.result_dict[button])\n else:\n error_message(self.error_dict[button])\n self.statusbar.showMessage(answer_translate[answer])\n self.create_log_message(self.command_dict[button], answer, \"\")",
"def click(self,x:int=None,y:int=None):\n x = int(x/self.zoom_count)#1.5是缩放比例\n y = int(y/self.zoom_count)\n lParam = win32api.MAKELONG(x, y)\n win32gui.PostMessage(self.ScreenBoardhwnd, wcon.WM_MOUSEMOVE,wcon.MK_LBUTTON, lParam)\n win32gui.SendMessage(self.ScreenBoardhwnd, wcon.WM_SETCURSOR, self.ScreenBoardhwnd, win32api.MAKELONG(wcon.HTCLIENT, wcon.WM_LBUTTONDOWN))\n # win32gui.PostMessage(self.ScreenBoardhwnd, wcon.WM_SETCURSOR, 0, 0)\n while (win32api.GetKeyState(wcon.VK_CONTROL) < 0 or\n win32api.GetKeyState(wcon.VK_SHIFT) < 0 or\n win32api.GetKeyState(wcon.VK_MENU) < 0):\n time.sleep(0.005)\n win32gui.PostMessage(self.ScreenBoardhwnd, wcon.WM_LBUTTONDOWN,\n wcon.MK_LBUTTON, lParam)\n win32gui.PostMessage(self.ScreenBoardhwnd, wcon.WM_LBUTTONUP, 0, lParam)",
"async def run_command(device, command):\n print(\"Waiting for button presses ...\")\n async for event in device.async_read_loop():\n if EV_KEY == event.type:\n key_event = evdev.KeyEvent(event)\n if evdev.KeyEvent.key_down == key_event.keystate:\n os.system(command)",
"def testPowerOnResponse(self):\n message = (mavutil.mavlink.GOPRO_COMMAND_POWER, mavutil.mavlink.GOPRO_REQUEST_SUCCESS)\n self.mgr.set_response_callback('vehicle','name', message)\n self.mgr.processMsgQueue.assert_called_with()",
"def shortcut_click(self, event):\r\n\r\n tile = self.board.get_event_tile(event.pos)\r\n\r\n if not self.is_new_game and not self.is_game_over and tile is not None:\r\n self.update_reset_button()\r\n tile_reveal_result = self.board.left_click_up(tile, is_shortcut_click=True)\r\n self.process_tile_reveal(tile_reveal_result)",
"def click_ac_power_button(self):\n with IFrameSwitch(self._driver, \"childFrame\"):\n with IFrameSwitch(self._driver, \"frame3\"):\n log.info(\"Waiting to see AC Power Button.\")\n self._driver.wait().until(\n EC.element_to_be_clickable((By.ID, self.ac_power_id))\n )\n log.info(\"Clicking AC Power button.\")\n self._driver.find_element_by_id(self.ac_power_id).click()\n self._driver.sleep(1)"
] | [
"0.74329734",
"0.7256616",
"0.678499",
"0.6608517",
"0.65348643",
"0.64914054",
"0.6230932",
"0.62221473",
"0.62064654",
"0.6177791",
"0.61654186",
"0.61310524",
"0.60974616",
"0.608872",
"0.6059632",
"0.6059632",
"0.5930312",
"0.58865917",
"0.5870763",
"0.5838959",
"0.5828924",
"0.5789177",
"0.5772703",
"0.57638323",
"0.57527804",
"0.5716567",
"0.5698876",
"0.56980443",
"0.5694779",
"0.56930614",
"0.5692731",
"0.56751055",
"0.56725955",
"0.56725955",
"0.56308913",
"0.56292766",
"0.5628456",
"0.5624197",
"0.561946",
"0.5597901",
"0.55941606",
"0.5590033",
"0.55835557",
"0.5562697",
"0.55589676",
"0.55589676",
"0.5558115",
"0.55563897",
"0.55526525",
"0.5552561",
"0.55172783",
"0.5503239",
"0.54885143",
"0.54801387",
"0.5479709",
"0.5479654",
"0.54765254",
"0.5471129",
"0.54702955",
"0.54615474",
"0.5457973",
"0.5455108",
"0.5441696",
"0.544046",
"0.54253507",
"0.54182625",
"0.5415688",
"0.54137117",
"0.5402463",
"0.5396126",
"0.53959996",
"0.5389537",
"0.53875834",
"0.5383426",
"0.5382269",
"0.53821063",
"0.5375431",
"0.5373189",
"0.5363887",
"0.5363887",
"0.53606385",
"0.5341273",
"0.53399134",
"0.5336516",
"0.53334266",
"0.53307444",
"0.5330335",
"0.5328187",
"0.5321946",
"0.53194064",
"0.53117335",
"0.53012",
"0.52999055",
"0.52981377",
"0.529628",
"0.52959096",
"0.52825016",
"0.5281484",
"0.52750504",
"0.52741855"
] | 0.7021857 | 2 |
Simulates a physical press of the server power button. | def press_pwr_btn(self):
self._press_pwr_btn() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def power():\n request_command(tv_command=TVCommand.power)",
"def _press_pwr_btn(self, pushType=\"Press\"):\n power_settings = {\"Action\": \"PowerButton\",\n \"Target\": \"/Oem/Hp\",\n \"PushType\": pushType}\n\n systems_uri = \"/rest/v1/Systems/1\"\n\n status, headers, response = self._rest_post(systems_uri, None,\n power_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"async def power_on(self):\n ...",
"def double_click_power(self):\n get_power_event_cmd = (\"getevent -pl 2>&1 | sed -n \"\n \"'/^add/{h}/KEY_POWER/{x;s/[^/]*//p}'\")\n input_event = self.adb.exec_adb_cmd(\n \"shell '{cmd}'\".format(cmd=get_power_event_cmd)).communicate()[0]\n\n self.android_device_driver.adb.exec_adb_cmd(\"shell '{cmd}'\".format(\n cmd=DOUBLE_CLICK_POWER_EVENT_TEMPLATE.format(input_event=input_event)))",
"def _PressLeftButton(self):\n self._kit.MousePressButtons({PeripheralKit.MOUSE_BUTTON_LEFT})\n time.sleep(self.send_delay)",
"def press(self, key: list, t):\n if not self.ser.alive:\n return\n k = '00'\n for v in key:\n k = hex(int(v, 16) ^ int(k, 16))\n if len(k) == 3:\n k = k.replace('0x', '0x0')\n if \"-\" in t:\n val = t.split(\"-\")\n delay = round(random.uniform(float(val[0]), float(val[1])), 4)\n else:\n delay = float(t)\n k = k.replace('0x', '')\n # close relay\n self.ser.write(k.encode('utf-8'), isHex=True)\n # How long do you need to press\n self.log.info('button press time={}'.format(delay))\n time.sleep(delay)\n # release relay\n self.ser.write('00'.encode('utf-8'), isHex=True)",
"def on_press(key):\n currentX, currentY = pyautogui.position()\n\n if key in LEFT_LEYS:\n pyautogui.move(-DEFAULT_MOVEMENT, 0)\n if key in DOWN_KEYS:\n pyautogui.move(0, DEFAULT_MOVEMENT)\n if key in UP_KEYS:\n pyautogui.move(0, -DEFAULT_MOVEMENT)\n if key in RIGHT_KEYS:\n pyautogui.move(DEFAULT_MOVEMENT, 0)\n\n if key in LEFTMOST_KEYS:\n pyautogui.moveTo(0, currentY)\n notify(\"Powermouse\", \"Moved to left of the screen\")\n if key in BOTTOM_KEYS:\n pyautogui.moveTo(currentX, screenHeight)\n notify(\"Powermouse\", \"Moved to bottom of screen\")\n if key in TOP_KEYS:\n pyautogui.moveTo(screenWidth, currentY)\n notify(\"Powermouse\", \"Moved to top of screen\")\n if key in RIGHTMOST_KEYS:\n pyautogui.moveTo(0, currentY)\n notify(\"Powermouse\", \"Moved to right of screen\")\n\n if key in CLICK_KEYS:\n pyautogui.click()\n notify(\"Powermouse\", f\"Clicked at position {pyautogui.position()}\")\n\n if key in QUIT_KEYS:\n notify(\"Powermouse\", \"Quitting\")\n exit()",
"def hold_pwr_btn(self):\n self._press_pwr_btn(pushType=\"PressAndHold\")",
"def _doPowerState(self, state=False):\n if state:\n self._cmdPowerOn()\n else:\n self._cmdPowerOff()",
"def power_on(self):\n pass",
"def m_press(self, button: MButton):\n pass",
"def turn_on(self):\n self._remote.power(1)",
"async def async_press(self) -> None:\n if self.entity_description.key == _RESTART_KEY:\n await self._device.async_reboot()\n else:\n await self._device.async_unpair_remotes()\n await self._device.async_config_remotes(RemoteConfig.OPEN)",
"def _PressRightButton(self):\n self._kit.MousePressButtons({PeripheralKit.MOUSE_BUTTON_RIGHT})\n time.sleep(self.send_delay)",
"def host_power_action(self, host, action):\n return action",
"def host_power_action(self, host, action):\n return action",
"def test_api_ucs_power(self):\n # first power off all servers\n self.set_all_server_power_state(\"off\")\n # verify power state is down\n self.check_all_server_power_state(\"down\")\n # now power on the servers\n self.set_all_server_power_state(\"on\")\n # verify power state is up\n self.check_all_server_power_state(\"up\")",
"def pressed(self):\n print(\"Pressed (Pin: {}): {} -- {}\".format(self.button_pin, datetime.now(), self))\n if self.led.is_lit:\n self.led.off()\n self.mute()\n else:\n self.led.on()\n self.unmute()",
"def _windows_power_control(self):\n\n os_power_command = 'shutdown /r /t 3' if self._power_event_type == 'restart' \\\n else 'shutdown /h /t 3'\n\n exit_code, out = self._staf_start_proc(os_power_command,\n self._sut.bespoke_root,\n self._command_timeout,\n location = self._sut.network_address)\n\n if exit_code != 0:\n raise CoreError('Power control event \"{0}\" failed: {1}'.format(self._name, out))",
"async def sendKeyPress(self, key):\n key = str(key)\n await self.director.sendPostRequest(\n \"/api/v1/items/{}/commands\".format(self.item_id),\n \"KEY_PRESS\",\n {\"KeyName\": key},\n )",
"async def async_turn_on(self):\n data_cmd = _command(COMMAND_POWER_ON)\n await self._async_send_command(data_cmd)",
"def on(cls, client_object):\n vm_mor = client_object.get_api()\n return cls._do_power_action(vm_mor.PowerOnVM_Task())",
"async def power(self, turn_on):\n\n op = DHumOp.ON if turn_on else DHumOp.OFF\n keys = self._get_cmd_keys(CMD_STATE_OPERATION)\n op_value = self.model_info.enum_value(keys[2], op.value)\n if self._should_poll:\n # different power command for ThinQ1 devices\n cmd = \"Start\" if turn_on else \"Stop\"\n await self.set(keys[0], keys[2], key=None, value=cmd)\n self._status.update_status(keys[2], op_value)\n return\n await self.set(keys[0], keys[1], key=keys[2], value=op_value)",
"def set_power(self, power: bool):\r\n if not self.backlight:\r\n return\r\n\r\n self.backlight.power = power",
"def LeftClick(self):\n self._PressLeftButton()\n self._ReleaseAllButtons()",
"async def power_off(self):\n ...",
"def handle_button_press(button_state, mqtt_client, message):\n if button_state:\n ev3.Sound.speak(message).wait()\n mqtt_client.send_message(\"button_pressed\", [message])",
"def power_up(self):\n t_end = time.time() + 3\n while time.time() < t_end:\n self.light_led(5)\n self.light_led(6)",
"def sleep(self):\r\n if not self.backlight:\r\n return\r\n\r\n self.backlight.power = False",
"async def on_buttonA_down(event, data):\n ArmDevice.storage.command[6] = gripper_open_speed",
"def presskey(self, key):\n \"\"\"Method to press any key\n Need to add further code for other keys based on requirements\"\"\"\n action = ActionChains(self.driver)\n action.send_keys(key)\n action.perform()",
"def _press(self, event):",
"def on_press(self):\n self.pressed = True",
"def on_press(self):\n self.pressed = True",
"def set_power_state(self, context, server, state):\n\n fsm = utils.get_state_machine(start_state=server.status)\n\n @utils.synchronized(server.uuid)\n def do_set_power_state():\n LOG.debug('Power %(state)s called for server %(server)s',\n {'state': state,\n 'server': server})\n self.driver.set_power_state(context, server, state)\n\n try:\n do_set_power_state()\n server.power_state = self.driver.get_power_state(context,\n server.uuid)\n except Exception as e:\n with excutils.save_and_reraise_exception():\n LOG.exception(\"Set server power state to %(state)s failed, \"\n \"the reason: %(reason)s\",\n {\"state\": state, \"reason\": six.text_type(e)})\n server.power_state = self.driver.get_power_state(context,\n server.uuid)\n if state in ['reboot', 'soft_reboot'] \\\n and server.power_state != states.POWER_ON:\n utils.process_event(fsm, server, event='error')\n else:\n utils.process_event(fsm, server, event='fail')\n\n action = POWER_NOTIFICATION_MAP[state]\n notifications.notify_about_server_action(\n context, server, self.host,\n action=action,\n phase=fields.NotificationPhase.ERROR,\n exception=e)\n\n utils.process_event(fsm, server, event='done')\n LOG.info('Successfully set node power state: %s',\n state, server=server)",
"def powerDispatch(self):\n\n if self.ui.powerDevice.currentText().startswith('INDI'):\n self.app.power.name = self.ui.powerDeviceName.currentText()\n self.app.message.emit('Power enabled', 0)\n self.deviceStat['power'] = False\n else:\n self.app.power.name = ''\n self.app.message.emit('Power disabled', 0)\n self.deviceStat['power'] = None\n\n return True",
"def poweron(self):\n raise NotImplementedError()",
"def set_host_power(self, power):\n power = power.upper()\n if (power is not None) and (power not in POWER_STATE):\n msg = (\"Invalid input '%(pow)s'. \"\n \"The expected input is ON or OFF.\" %\n {'pow': power})\n raise exception.IloInvalidInputError(msg)\n\n # Check current power status, do not act if it's in requested state.\n cur_status = self.get_host_power_status()\n\n if cur_status == power:\n LOG.debug(self._(\"Node is already in '%(power)s' power state.\"),\n {'power': power})\n return\n\n self._perform_power_op(POWER_STATE[power])",
"def button_pressed(self, channel):\n print(\"button_pressed() channel = \"+str(channel))\n GPIO.remove_event_detect(channel) # turn button off to avoid double hits\n match channel:\n case self.prev_button: \n print(\"Previous button - sending message: mycroft.audio.service.prev\")\n self.send_message(\"mycroft.audio.service.prev\")\n case self.stop_button: \n print(\"Stop button - sending message: mycroft.stop\")\n self.send_message(\"mycroft.stop\")\n case self.next_button: \n print(\"Next button - sending message: mycroft.audio.service.next\")\n self.send_message(\"mycroft.audio.service.next\")\n case _: # not expected\n print(\"Did not expect channel = \"+str(channel))\n GPIO.add_event_detect(channel, GPIO.FALLING, callback=self.button_pressed, bouncetime=5) # turn button back on",
"def simulate_button_clicked(self):\n self.simulate_bool = True\n self.update_change()",
"def set_power(sid):\n # Resolve the passed parameters if any\n timer = None\n os = None\n if request.json:\n if timer in request.json:\n timer = request.json.get('timer')\n if os in request.json:\n os = request.json.get('os')\n db = sqlite3.connect('/home/tropius/TROPIUS/TROPIUS.db')\n state = hosts.get(db, sid)['state']\n \n if state == 'on':\n # The host is on -- turn it off\n # TODO make a unix shell util file\n # TODO make a windows util file\n return\n elif state == 'off':\n # The host is off -- turn it on\n if timer is not None:\n sleep(timer)\n netutil.wake_on_lan(db, sid)\n ret = {'power': {'state': 'on'}}\n return jsonify(ret)\n # TODO find a keyboard driver and implement OS parameter",
"def enable_sensor_power():\n sen = digital.SensorPower(\"senpwr\") \n sen.set()",
"def power_on(self, sync=True, wait_for_guest_ready=True):\n self.vmomi_object.PowerOn()\n if sync: self._wait_for_power_on(wait_for_guest_ready)",
"def power_down(self):\n t_end = time.time() + 3\n while time.time() < t_end:\n self.light_led(4)\n self.light_led(6)",
"def press(self) -> bool:\n return self._sendcommand(PRESS_KEY, self._retry_count)",
"def press(self) -> bool:\n return self._sendcommand(PRESS_KEY, self._retry_count)",
"def _linux_power_control(self):\n\n os_power_command = 'shutdown -r now' if self._power_event_type == 'restart' \\\n else 'shutdown -h now'\n\n exit_code, out = self._staf_start_proc(os_power_command,\n self._sut.bespoke_root,\n self._command_timeout,\n location = self._sut.network_address)\n\n if exit_code != 0:\n raise CoreError('Power control event \"{0}\" failed: {1}'.format(self._name, out))",
"def start_game():\n logger.info(\"Clicking play button\")\n mouseclick(coords_play_final_button[0], coords_play_final_button[1])",
"def turn_on(self) -> None:\n self._monoprice.set_power(self._zone_id, True)",
"async def async_press(self) -> None:\n await self.entity_description.press_action(self.wrapper)",
"def emulate_press(self, key_code, scan_code, value, timeval):\n scan_event = self.create_event_object(\n \"Misc\",\n 0x04,\n scan_code,\n timeval)\n key_event = self.create_event_object(\n \"Key\",\n key_code,\n value,\n timeval)\n return scan_event, key_event",
"def test_turn_on(power_supply):\n power_supply.Init()\n assert power_supply.state() != tango.DevState.ON\n power_supply.current = 5.0\n power_supply.turn_on()\n assert power_supply.state() == tango.DevState.ON",
"def poweroff(self) -> None:\n pass",
"def send_software_trigger(self):\n self.lib.SendSoftwareTrigger()",
"def k_press(self, key: KKey):\n pass",
"def _perform_power_op(self, oper):\n\n power_settings = {\"Action\": \"Reset\",\n \"ResetType\": oper}\n systems_uri = \"/rest/v1/Systems/1\"\n\n status, headers, response = self._rest_post(systems_uri, None,\n power_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def press_key(self, event):\n if self.active:\n keycode = self.mapping[event.pin_num]\n while self.busy:\n sleep(0.01)\n self.busy = True\n self.send_key(keycode)\n self.busy = False",
"def motorswitch(self, bo, pin, t):\n self.app.processEvents()\n if(self.win.getStopped() == True):\n self.win.updatelabel2(\"Jingle button was clicked.\\nClick another!\")\n return\n while self.win.getPaused() == True:\n self.app.processEvents() # Not really too sure if this line is needed. NEEDS TESTING\n self.win.updatelabel2(\"Jingle Song Paused!\\nChoose A new Song or Play to Resume!\")\n time.sleep(.1)\n GPIO.output(pin, bo)\n time.sleep(t)",
"def Pause():\n\tDmg.enableButton.SetOff()",
"def power_on(self, default=False):\n if default:\n return self.exec_command('SupplyPowerDefault = 1')\n return self.exec_command('SupplyPower = 1')",
"def send_one(self, button):\n self.client.send_one(self.name, button)",
"def togglePWMPower(self):\n # PCPWM1 is located at position 6\n mask = 1 << 6\n self._injectFault(\"PCONP\", 0x400FC0C4, mask)",
"def when_pressed(self, button, func, *args):\n\n self.hardware_interfaces[self._gpio].set_pin_event(self._b_names[button],\n func,\n *args)",
"def power_on(self):\n raise NotImplementedError",
"async def test_onoff(hass: HomeAssistant, mock_remote) -> None:\n\n await setup_panasonic_viera(hass)\n\n data = {ATTR_ENTITY_ID: \"remote.panasonic_viera_tv\"}\n\n # simulate tv off when async_update\n mock_remote.get_mute = Mock(side_effect=SOAPError)\n\n await hass.services.async_call(REMOTE_DOMAIN, SERVICE_TURN_OFF, data)\n await hass.services.async_call(REMOTE_DOMAIN, SERVICE_TURN_ON, data)\n await hass.async_block_till_done()\n\n power = getattr(Keys.power, \"value\", Keys.power)\n assert mock_remote.send_key.call_args_list == [call(power), call(power)]",
"async def async_turn_on(self) -> None:\n self._zone.power = True",
"def voldown(self, raiseby=1):\n command + 'voldown ' + str(raiseby)\n self.run_command(command)",
"def powerup_collected(self) -> None:\n self.powerup_collected_sound.play()",
"async def async_press(self) -> None:\n try:\n await self.entity_description.press_func(self.device)\n except DevicePasswordProtected as ex:\n self.entry.async_start_reauth(self.hass)\n raise HomeAssistantError(\n f\"Device {self.entry.title} require re-authenticatication to set or change the password\"\n ) from ex\n except DeviceUnavailable as ex:\n raise HomeAssistantError(\n f\"Device {self.entry.title} did not respond\"\n ) from ex",
"def press(button='left', coords=(0, 0)):\n _perform_click_input(button=button, coords=coords, button_down=True, button_up=False)",
"def arduPusherClick(self, dummy = 0):\r\n self.ardu.write(chr(self.CLICK))",
"def trigger(self):\n GPIO.output(self.trigger_pin, 1)\n time.sleep(10/1000000)\n GPIO.output(self.trigger_pin, 0)",
"def RightClick(self):\n self._PressRightButton()\n self._ReleaseAllButtons()",
"async def async_turn_on(self, **kwargs: Any) -> None:\n await self.coordinator.roku.remote(\"poweron\")\n await self.coordinator.async_request_refresh()",
"def pointer_notify_button(\n self, time_msec: int, button: int, button_state: ButtonState\n ) -> int:\n return lib.wlr_seat_pointer_notify_button(\n self._ptr, time_msec, button, button_state.value\n )",
"def poweron(self) -> None:\n self.servo_reset()",
"async def async_preset_button(self, preset):\n if self._preset_key != None and preset != None:\n if not self._slave_mode:\n if int(preset) > 0 and int(preset) <= self._preset_key:\n value = await self.async_call_linkplay_httpapi(\"MCUKeyShortClick:{0}\".format(str(preset)), None)\n if value != \"OK\":\n _LOGGER.warning(\"Failed to recall preset %s. \" \"Device: %s, Got response: %s\", self.entity_id, preset, value)\n else:\n _LOGGER.warning(\"Wrong preset number %s. Device: %s, has to be integer between 1 and %s\", self.entity_id, preset, self._preset_key)\n else:\n await self._master.async_preset_button(preset)",
"def togglepow(self,channel):\n if self.rf is not None:\n newstatus = bool(self.pow[channel-1].get())\n finalstatus = self.rf.setrempow(channel-1,newstatus)\n self.messages.log('Remote power channel %d set to '%(channel)+str(finalstatus))\n else:\n self.messages.log('Not connected to a focuser.')",
"def takeControl(self):\n mainloop()",
"def takeControl(self):\n mainloop()",
"def run_button(self):\r\n self.step = False # Clear step command\r\n self.is_pause = False\r\n self.run_command()",
"def enter_low_power_mode(self) -> bool:\n try:\n self.processes[\"dispatch\"].pause()\n return True\n except:\n return False",
"def vm_power(self, vm_name, state):\n states = [\"on\", \"off\"]\n if state not in states:\n raise OpenStackConnectorException(f\"Incorrect action was provided for the vm {vm_name} power state change\")\n \n vm_id = self._get_vm_id_by_name(vm_name)\n\n if not vm_id:\n return False\n \n try:\n if state == \"on\":\n self.connection.compute.start_server(vm_id)\n else:\n self.connection.compute.stop_server(vm_id)\n except ConflictException: # This exception block handles the situation when the VM is already in the required power state\n pass\n \n return True",
"def on_press(key):\n try:\n # gets pressed key char value and searches it from dict with get method.\n mapped_key = key_mappings.get(key.char) # gets value and type tuple or None\n if mapped_key:\n module.pressed_key = mapped_key\n except AttributeError:\n traceback.print_exc()\n except KeyboardInterrupt:\n print(f\"\\n{module.current_time()} Application stopped\")",
"def pause_button(self):\r\n self.is_action = True\r\n self.update_settings()\r\n self.is_pause = True\r\n if self.pause_call is not None:\r\n self.wm.after(1, self.pause_call)",
"def stop():\n set_power(0)",
"def mouse_press_event(self, x: int, y: int, button: int):\n pass",
"def pressKeyboardButton(self, button):\r\n if self.__layoutMaps[self.currentLayout].has_key(button):\r\n self.phone._touch.press(self.__layoutMaps[self.currentLayout][button][0])\r\n self.phone._run('Switch case',testStepReporting=False)\r\n return True\r\n else:\r\n return False",
"def toggle_power(self, duration, wait):\n payload = {\"duration\": duration}\n response = requests.post(self.__api_url('toggle'.format(self.name)), data=payload, headers=self.headers)\n if wait:\n time.sleep(duration)\n return response.text",
"def send_btn_clicked(self):\n command = self.SendLine.text()\n self.Serial.send(command)",
"def power_play(self, power_play):\n\n self._power_play = power_play",
"def press(button_id: str) -> None:\n try:\n self.query_one(f\"#{button_id}\", Button).press()\n except NoMatches:\n pass",
"async def async_turn_on(self):\n await self.local_meural.send_key_resume()",
"def pause(self, instance):\n self.power_off(instance)",
"def send_command(self):\n button = self.sender()\n answer: str = self.UsbHost.send_command(self.state.ser, self.command_dict[button], str(self.state.device_id))\n if answer == 'Ok':\n self.statusbar.showMessage(self.result_dict[button])\n else:\n error_message(self.error_dict[button])\n self.statusbar.showMessage(answer_translate[answer])\n self.create_log_message(self.command_dict[button], answer, \"\")",
"def click(self,x:int=None,y:int=None):\n x = int(x/self.zoom_count)#1.5是缩放比例\n y = int(y/self.zoom_count)\n lParam = win32api.MAKELONG(x, y)\n win32gui.PostMessage(self.ScreenBoardhwnd, wcon.WM_MOUSEMOVE,wcon.MK_LBUTTON, lParam)\n win32gui.SendMessage(self.ScreenBoardhwnd, wcon.WM_SETCURSOR, self.ScreenBoardhwnd, win32api.MAKELONG(wcon.HTCLIENT, wcon.WM_LBUTTONDOWN))\n # win32gui.PostMessage(self.ScreenBoardhwnd, wcon.WM_SETCURSOR, 0, 0)\n while (win32api.GetKeyState(wcon.VK_CONTROL) < 0 or\n win32api.GetKeyState(wcon.VK_SHIFT) < 0 or\n win32api.GetKeyState(wcon.VK_MENU) < 0):\n time.sleep(0.005)\n win32gui.PostMessage(self.ScreenBoardhwnd, wcon.WM_LBUTTONDOWN,\n wcon.MK_LBUTTON, lParam)\n win32gui.PostMessage(self.ScreenBoardhwnd, wcon.WM_LBUTTONUP, 0, lParam)",
"async def run_command(device, command):\n print(\"Waiting for button presses ...\")\n async for event in device.async_read_loop():\n if EV_KEY == event.type:\n key_event = evdev.KeyEvent(event)\n if evdev.KeyEvent.key_down == key_event.keystate:\n os.system(command)",
"def testPowerOnResponse(self):\n message = (mavutil.mavlink.GOPRO_COMMAND_POWER, mavutil.mavlink.GOPRO_REQUEST_SUCCESS)\n self.mgr.set_response_callback('vehicle','name', message)\n self.mgr.processMsgQueue.assert_called_with()",
"def shortcut_click(self, event):\r\n\r\n tile = self.board.get_event_tile(event.pos)\r\n\r\n if not self.is_new_game and not self.is_game_over and tile is not None:\r\n self.update_reset_button()\r\n tile_reveal_result = self.board.left_click_up(tile, is_shortcut_click=True)\r\n self.process_tile_reveal(tile_reveal_result)",
"def click_ac_power_button(self):\n with IFrameSwitch(self._driver, \"childFrame\"):\n with IFrameSwitch(self._driver, \"frame3\"):\n log.info(\"Waiting to see AC Power Button.\")\n self._driver.wait().until(\n EC.element_to_be_clickable((By.ID, self.ac_power_id))\n )\n log.info(\"Clicking AC Power button.\")\n self._driver.find_element_by_id(self.ac_power_id).click()\n self._driver.sleep(1)"
] | [
"0.74329734",
"0.7021857",
"0.678499",
"0.6608517",
"0.65348643",
"0.64914054",
"0.6230932",
"0.62221473",
"0.62064654",
"0.6177791",
"0.61654186",
"0.61310524",
"0.60974616",
"0.608872",
"0.6059632",
"0.6059632",
"0.5930312",
"0.58865917",
"0.5870763",
"0.5838959",
"0.5828924",
"0.5789177",
"0.5772703",
"0.57638323",
"0.57527804",
"0.5716567",
"0.5698876",
"0.56980443",
"0.5694779",
"0.56930614",
"0.5692731",
"0.56751055",
"0.56725955",
"0.56725955",
"0.56308913",
"0.56292766",
"0.5628456",
"0.5624197",
"0.561946",
"0.5597901",
"0.55941606",
"0.5590033",
"0.55835557",
"0.5562697",
"0.55589676",
"0.55589676",
"0.5558115",
"0.55563897",
"0.55526525",
"0.5552561",
"0.55172783",
"0.5503239",
"0.54885143",
"0.54801387",
"0.5479709",
"0.5479654",
"0.54765254",
"0.5471129",
"0.54702955",
"0.54615474",
"0.5457973",
"0.5455108",
"0.5441696",
"0.544046",
"0.54253507",
"0.54182625",
"0.5415688",
"0.54137117",
"0.5402463",
"0.5396126",
"0.53959996",
"0.5389537",
"0.53875834",
"0.5383426",
"0.5382269",
"0.53821063",
"0.5375431",
"0.5373189",
"0.5363887",
"0.5363887",
"0.53606385",
"0.5341273",
"0.53399134",
"0.5336516",
"0.53334266",
"0.53307444",
"0.5330335",
"0.5328187",
"0.5321946",
"0.53194064",
"0.53117335",
"0.53012",
"0.52999055",
"0.52981377",
"0.529628",
"0.52959096",
"0.52825016",
"0.5281484",
"0.52750504",
"0.52741855"
] | 0.7256616 | 1 |
Simulate a physical press and hold of the server power button. | def hold_pwr_btn(self):
self._press_pwr_btn(pushType="PressAndHold") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def press_pwr_btn(self):\n self._press_pwr_btn()",
"def power():\n request_command(tv_command=TVCommand.power)",
"def _press_pwr_btn(self, pushType=\"Press\"):\n power_settings = {\"Action\": \"PowerButton\",\n \"Target\": \"/Oem/Hp\",\n \"PushType\": pushType}\n\n systems_uri = \"/rest/v1/Systems/1\"\n\n status, headers, response = self._rest_post(systems_uri, None,\n power_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def press(self, key: list, t):\n if not self.ser.alive:\n return\n k = '00'\n for v in key:\n k = hex(int(v, 16) ^ int(k, 16))\n if len(k) == 3:\n k = k.replace('0x', '0x0')\n if \"-\" in t:\n val = t.split(\"-\")\n delay = round(random.uniform(float(val[0]), float(val[1])), 4)\n else:\n delay = float(t)\n k = k.replace('0x', '')\n # close relay\n self.ser.write(k.encode('utf-8'), isHex=True)\n # How long do you need to press\n self.log.info('button press time={}'.format(delay))\n time.sleep(delay)\n # release relay\n self.ser.write('00'.encode('utf-8'), isHex=True)",
"def _PressLeftButton(self):\n self._kit.MousePressButtons({PeripheralKit.MOUSE_BUTTON_LEFT})\n time.sleep(self.send_delay)",
"def _PressRightButton(self):\n self._kit.MousePressButtons({PeripheralKit.MOUSE_BUTTON_RIGHT})\n time.sleep(self.send_delay)",
"def on_press(key):\n currentX, currentY = pyautogui.position()\n\n if key in LEFT_LEYS:\n pyautogui.move(-DEFAULT_MOVEMENT, 0)\n if key in DOWN_KEYS:\n pyautogui.move(0, DEFAULT_MOVEMENT)\n if key in UP_KEYS:\n pyautogui.move(0, -DEFAULT_MOVEMENT)\n if key in RIGHT_KEYS:\n pyautogui.move(DEFAULT_MOVEMENT, 0)\n\n if key in LEFTMOST_KEYS:\n pyautogui.moveTo(0, currentY)\n notify(\"Powermouse\", \"Moved to left of the screen\")\n if key in BOTTOM_KEYS:\n pyautogui.moveTo(currentX, screenHeight)\n notify(\"Powermouse\", \"Moved to bottom of screen\")\n if key in TOP_KEYS:\n pyautogui.moveTo(screenWidth, currentY)\n notify(\"Powermouse\", \"Moved to top of screen\")\n if key in RIGHTMOST_KEYS:\n pyautogui.moveTo(0, currentY)\n notify(\"Powermouse\", \"Moved to right of screen\")\n\n if key in CLICK_KEYS:\n pyautogui.click()\n notify(\"Powermouse\", f\"Clicked at position {pyautogui.position()}\")\n\n if key in QUIT_KEYS:\n notify(\"Powermouse\", \"Quitting\")\n exit()",
"def double_click_power(self):\n get_power_event_cmd = (\"getevent -pl 2>&1 | sed -n \"\n \"'/^add/{h}/KEY_POWER/{x;s/[^/]*//p}'\")\n input_event = self.adb.exec_adb_cmd(\n \"shell '{cmd}'\".format(cmd=get_power_event_cmd)).communicate()[0]\n\n self.android_device_driver.adb.exec_adb_cmd(\"shell '{cmd}'\".format(\n cmd=DOUBLE_CLICK_POWER_EVENT_TEMPLATE.format(input_event=input_event)))",
"async def power_on(self):\n ...",
"def _doPowerState(self, state=False):\n if state:\n self._cmdPowerOn()\n else:\n self._cmdPowerOff()",
"def power_down(self):\n t_end = time.time() + 3\n while time.time() < t_end:\n self.light_led(4)\n self.light_led(6)",
"def sleep(self):\r\n if not self.backlight:\r\n return\r\n\r\n self.backlight.power = False",
"def pulse(vjoy, btn_id):\n global g_is_running\n g_is_running = True\n while g_is_running:\n vjoy[1].button(btn_id).is_pressed = True\n time.sleep(g_hold_time)\n vjoy[1].button(btn_id).is_pressed = False\n time.sleep(g_pause_time)",
"def LeftClick(self):\n self._PressLeftButton()\n self._ReleaseAllButtons()",
"def pressed(self):\n print(\"Pressed (Pin: {}): {} -- {}\".format(self.button_pin, datetime.now(), self))\n if self.led.is_lit:\n self.led.off()\n self.mute()\n else:\n self.led.on()\n self.unmute()",
"def turn_on(self):\n self._remote.power(1)",
"async def async_press(self) -> None:\n if self.entity_description.key == _RESTART_KEY:\n await self._device.async_reboot()\n else:\n await self._device.async_unpair_remotes()\n await self._device.async_config_remotes(RemoteConfig.OPEN)",
"async def on_buttonA_down(event, data):\n ArmDevice.storage.command[6] = gripper_open_speed",
"def RightClick(self):\n self._PressRightButton()\n self._ReleaseAllButtons()",
"def test_api_ucs_power(self):\n # first power off all servers\n self.set_all_server_power_state(\"off\")\n # verify power state is down\n self.check_all_server_power_state(\"down\")\n # now power on the servers\n self.set_all_server_power_state(\"on\")\n # verify power state is up\n self.check_all_server_power_state(\"up\")",
"def power_up(self):\n t_end = time.time() + 3\n while time.time() < t_end:\n self.light_led(5)\n self.light_led(6)",
"async def power_off(self):\n ...",
"def on_press(self):\n self.pressed = True",
"def on_press(self):\n self.pressed = True",
"def m_press(self, button: MButton):\n pass",
"def Pause():\n\tDmg.enableButton.SetOff()",
"def hold(self):\n self.dev.write(1, 'H')",
"def handle_arm_down_button(button_state, robot):\n if button_state:\n robot.arm_down()",
"def power_on(self):\n pass",
"def voldown(self, raiseby=1):\n command + 'voldown ' + str(raiseby)\n self.run_command(command)",
"def handle_arm_up_button(button_state, robot):\n if button_state:\n robot.arm_up()",
"def _ReleaseAllButtons(self):\n self._kit.MouseReleaseAllButtons()\n time.sleep(self.send_delay)",
"def set_power(self, power: bool):\r\n if not self.backlight:\r\n return\r\n\r\n self.backlight.power = power",
"async def power(self, turn_on):\n\n op = DHumOp.ON if turn_on else DHumOp.OFF\n keys = self._get_cmd_keys(CMD_STATE_OPERATION)\n op_value = self.model_info.enum_value(keys[2], op.value)\n if self._should_poll:\n # different power command for ThinQ1 devices\n cmd = \"Start\" if turn_on else \"Stop\"\n await self.set(keys[0], keys[2], key=None, value=cmd)\n self._status.update_status(keys[2], op_value)\n return\n await self.set(keys[0], keys[1], key=keys[2], value=op_value)",
"def stop():\n set_power(0)",
"def volume_down(self):\n self.handleCommand(25)",
"async def async_turn_on(self):\n data_cmd = _command(COMMAND_POWER_ON)\n await self._async_send_command(data_cmd)",
"def test_turn_on(power_supply):\n power_supply.Init()\n assert power_supply.state() != tango.DevState.ON\n power_supply.current = 5.0\n power_supply.turn_on()\n assert power_supply.state() == tango.DevState.ON",
"def wake():\n G.DEVICE.wake()",
"def arm_up(self):\n self.arm_motor.run_forever(speed_sp=self.MAX_SPEED)\n while not self.touch_sensor.is_pressed:\n time.sleep(0.01)\n self.arm_motor.stop()\n ev3.Sound.beep()",
"def turn_on(self) -> None:\n self._monoprice.set_power(self._zone_id, True)",
"def poweron(self) -> None:\n self.servo_reset()",
"async def on_buttonA_down(event, data):\n ArmDevice.storage.command[3] = forearm_roll_speed",
"def motorswitch(self, bo, pin, t):\n self.app.processEvents()\n if(self.win.getStopped() == True):\n self.win.updatelabel2(\"Jingle button was clicked.\\nClick another!\")\n return\n while self.win.getPaused() == True:\n self.app.processEvents() # Not really too sure if this line is needed. NEEDS TESTING\n self.win.updatelabel2(\"Jingle Song Paused!\\nChoose A new Song or Play to Resume!\")\n time.sleep(.1)\n GPIO.output(pin, bo)\n time.sleep(t)",
"def arm_up(self):\n self.arm_motor.run_forever(speed_sp=900)\n while not self.touch_sensor.is_pressed:\n time.sleep(0.01)\n self.arm_motor.stop(stop_action=ev3.Motor.STOP_ACTION_BRAKE)\n ev3.Sound.beep().wait()",
"def togglePWMPower(self):\n # PCPWM1 is located at position 6\n mask = 1 << 6\n self._injectFault(\"PCONP\", 0x400FC0C4, mask)",
"def _press(self, event):",
"def emulate_press(self, key_code, scan_code, value, timeval):\n scan_event = self.create_event_object(\n \"Misc\",\n 0x04,\n scan_code,\n timeval)\n key_event = self.create_event_object(\n \"Key\",\n key_code,\n value,\n timeval)\n return scan_event, key_event",
"def when_pressed(self, button, func, *args):\n\n self.hardware_interfaces[self._gpio].set_pin_event(self._b_names[button],\n func,\n *args)",
"def button_handler(self, channel):\n if channel != self.BUTTON_PIN:\n return\n\n state = GPIO.input(self.BUTTON_PIN)\n now = time.time()\n delta = now - self.prev_button_state[1]\n\n if self.prev_button_state[0] != state:\n self.prev_button_state = (state, now)\n\n if state == GPIO.HIGH:\n self.button_hold = None\n\n # debounce the button tap and trigger action\n if delta > self.TAP_TIME and self.button_tap is None:\n self.button_tap = True\n os.kill(os.getpid(), signal.SIGALRM)\n else:\n self.button_tap = None\n\n # schedule a hold check\n signal.alarm(int(self.HOLD_TIME))\n\n elif state == GPIO.LOW:\n if delta >= self.HOLD_TIME and self.button_hold is None:\n self.button_hold = True\n self.button_tap = False",
"def poweroff(self) -> None:\n pass",
"def power_off(timeout: int = 0) -> None:",
"async def test_onoff(hass: HomeAssistant, mock_remote) -> None:\n\n await setup_panasonic_viera(hass)\n\n data = {ATTR_ENTITY_ID: \"remote.panasonic_viera_tv\"}\n\n # simulate tv off when async_update\n mock_remote.get_mute = Mock(side_effect=SOAPError)\n\n await hass.services.async_call(REMOTE_DOMAIN, SERVICE_TURN_OFF, data)\n await hass.services.async_call(REMOTE_DOMAIN, SERVICE_TURN_ON, data)\n await hass.async_block_till_done()\n\n power = getattr(Keys.power, \"value\", Keys.power)\n assert mock_remote.send_key.call_args_list == [call(power), call(power)]",
"def test_turn_off(power_supply):\n power_supply.Init()\n assert power_supply.state() != tango.DevState.OFF\n power_supply.turn_off()\n assert power_supply.state() == tango.DevState.OFF",
"def handle_button_press(button_state, mqtt_client, message):\n if button_state:\n ev3.Sound.speak(message).wait()\n mqtt_client.send_message(\"button_pressed\", [message])",
"def press(button='left', coords=(0, 0)):\n _perform_click_input(button=button, coords=coords, button_down=True, button_up=False)",
"def ev_joybuttondown(self, event: tcod.event.JoystickButton) -> T | None:",
"def press_key(self, event):\n if self.active:\n keycode = self.mapping[event.pin_num]\n while self.busy:\n sleep(0.01)\n self.busy = True\n self.send_key(keycode)\n self.busy = False",
"def _windows_power_control(self):\n\n os_power_command = 'shutdown /r /t 3' if self._power_event_type == 'restart' \\\n else 'shutdown /h /t 3'\n\n exit_code, out = self._staf_start_proc(os_power_command,\n self._sut.bespoke_root,\n self._command_timeout,\n location = self._sut.network_address)\n\n if exit_code != 0:\n raise CoreError('Power control event \"{0}\" failed: {1}'.format(self._name, out))",
"def press(self) -> bool:\n return self._sendcommand(PRESS_KEY, self._retry_count)",
"def press(self) -> bool:\n return self._sendcommand(PRESS_KEY, self._retry_count)",
"def main():\n bp = Bin_API('COM10')\n print('Testing')\n\n bp.set_hwtrig_term(1)",
"async def async_turn_off(self):\n await self.local_meural.send_key_suspend()",
"def arm_up(self):\n self.arm_motor.run_to_rel_pos(position_sp=5100, speed_sp=900)\n while not self.touch_sensor.is_pressed:\n time.sleep(0.01)\n self.arm_motor.stop()\n ev3.Sound.beep()",
"def doubleclick(point):\n m = PyMouse()\n m.press(*point)\n m.release(*point)\n m.press(*point)\n m.release(*point)",
"def controlDown(*args):",
"def controlDown(*args):",
"def controlDown(*args):",
"def controlDown(*args):",
"def controlDown(*args):",
"def controlDown(*args):",
"def controlDown(*args):",
"def controlDown(*args):",
"def controlDown(*args):",
"def controlDown(*args):",
"def controlDown(*args):",
"def trigger(self):\n GPIO.output(self.trigger_pin, 1)\n time.sleep(10/1000000)\n GPIO.output(self.trigger_pin, 0)",
"def wake(self):\n self.timeout_set(0) # Reset timeout counter\n self.write(255) # Wake\n\n if self._firmware >= 264:\n self.__delay(50)\n self.write(self.ASCII_ESC, '8', 0, 0) # Sleep off (important!)\n else:\n # Datasheet recommends a 50 mS delay before issuing further commands,\n # but in practice this alone isn't sufficient (e.g. text size/style\n # commands may still be misinterpreted on wake). A slightly longer\n # delay, interspersed with NUL chars (no-ops) seems to help.\n i = 0\n while i < 10:\n self.write(0)\n self.timeout_set(10000)\n i += 1",
"def enable_sensor_power():\n sen = digital.SensorPower(\"senpwr\") \n sen.set()",
"def set_pressed(self, status: bool):\n self._pressed = status\n self._pressed_time = time.time()",
"def button_pressed(self, channel):\n print(\"button_pressed() channel = \"+str(channel))\n GPIO.remove_event_detect(channel) # turn button off to avoid double hits\n match channel:\n case self.prev_button: \n print(\"Previous button - sending message: mycroft.audio.service.prev\")\n self.send_message(\"mycroft.audio.service.prev\")\n case self.stop_button: \n print(\"Stop button - sending message: mycroft.stop\")\n self.send_message(\"mycroft.stop\")\n case self.next_button: \n print(\"Next button - sending message: mycroft.audio.service.next\")\n self.send_message(\"mycroft.audio.service.next\")\n case _: # not expected\n print(\"Did not expect channel = \"+str(channel))\n GPIO.add_event_detect(channel, GPIO.FALLING, callback=self.button_pressed, bouncetime=5) # turn button back on",
"async def poweroff(ctx):\n await ctx.send(\"Bye\")\n await bot.logout()",
"def release(button='left', coords=(0, 0)):\n _perform_click_input(button=button, coords=coords, button_down=False, button_up=True)",
"async def async_turn_on(self) -> None:\n self._zone.power = True",
"def _linux_power_control(self):\n\n os_power_command = 'shutdown -r now' if self._power_event_type == 'restart' \\\n else 'shutdown -h now'\n\n exit_code, out = self._staf_start_proc(os_power_command,\n self._sut.bespoke_root,\n self._command_timeout,\n location = self._sut.network_address)\n\n if exit_code != 0:\n raise CoreError('Power control event \"{0}\" failed: {1}'.format(self._name, out))",
"def volume_up(self):\n self.handleCommand(24)",
"def k_press(self, key: KKey):\n pass",
"def pause(self, instance):\n self.power_off(instance)",
"def set_power(sid):\n # Resolve the passed parameters if any\n timer = None\n os = None\n if request.json:\n if timer in request.json:\n timer = request.json.get('timer')\n if os in request.json:\n os = request.json.get('os')\n db = sqlite3.connect('/home/tropius/TROPIUS/TROPIUS.db')\n state = hosts.get(db, sid)['state']\n \n if state == 'on':\n # The host is on -- turn it off\n # TODO make a unix shell util file\n # TODO make a windows util file\n return\n elif state == 'off':\n # The host is off -- turn it on\n if timer is not None:\n sleep(timer)\n netutil.wake_on_lan(db, sid)\n ret = {'power': {'state': 'on'}}\n return jsonify(ret)\n # TODO find a keyboard driver and implement OS parameter",
"def raise_arm(self):\r\n self.arm_motor.turn_on(self.speed)\r\n while not self.arm_touch_sensor.is_pressed():\r\n pass\r\n self.arm_motor.turn_off()\r\n\r\n # ---------------------------------------------------------------------\r\n # Done: 6. Implement this method; it is a ONE-LINER! (not)\r\n # ---------------------------------------------------------------------\r",
"def sleep(self):\n if not self.is_sleeping:\n self.wait_until_idle()\n self.__interface.send_command('POWER_OFF')\n self.wait_until_idle()\n self.__interface.send_command('DEEP_SLEEP')\n self.__interface.send_data(0xa5)\n\n self.__sleeping = True",
"def left_button(self, left_speed, right_speed):\n self.right_motor.run_forever(speed_sp=int(right_speed))",
"def simulate_button_clicked(self):\n self.simulate_bool = True\n self.update_change()",
"def click(self,x:int=None,y:int=None):\n x = int(x/self.zoom_count)#1.5是缩放比例\n y = int(y/self.zoom_count)\n lParam = win32api.MAKELONG(x, y)\n win32gui.PostMessage(self.ScreenBoardhwnd, wcon.WM_MOUSEMOVE,wcon.MK_LBUTTON, lParam)\n win32gui.SendMessage(self.ScreenBoardhwnd, wcon.WM_SETCURSOR, self.ScreenBoardhwnd, win32api.MAKELONG(wcon.HTCLIENT, wcon.WM_LBUTTONDOWN))\n # win32gui.PostMessage(self.ScreenBoardhwnd, wcon.WM_SETCURSOR, 0, 0)\n while (win32api.GetKeyState(wcon.VK_CONTROL) < 0 or\n win32api.GetKeyState(wcon.VK_SHIFT) < 0 or\n win32api.GetKeyState(wcon.VK_MENU) < 0):\n time.sleep(0.005)\n win32gui.PostMessage(self.ScreenBoardhwnd, wcon.WM_LBUTTONDOWN,\n wcon.MK_LBUTTON, lParam)\n win32gui.PostMessage(self.ScreenBoardhwnd, wcon.WM_LBUTTONUP, 0, lParam)",
"def set_pressed(self):\n self._pressed = True",
"def presskey(self, key):\n \"\"\"Method to press any key\n Need to add further code for other keys based on requirements\"\"\"\n action = ActionChains(self.driver)\n action.send_keys(key)\n action.perform()",
"def ev_joybuttonup(self, event: tcod.event.JoystickButton) -> T | None:",
"def power_on(self, wait=0.2):\n print('Powering up O2 Meter ({})...'.format(self.ID))\n self.sensor.write(\"#PWUP\\r\")\n on_status = self.sensor.readline()\n time.sleep(wait)\n\n if 'PWUP' in on_status:\n print(' Ready!')\n elif 'ERR' in on_status:\n print('Power-on error: {}'.format(on_status.rstrip()))\n else:\n print('Something went wrong during power-on.\\n -> Sensor returned {}'.format(on_status))\n return",
"def send_software_trigger(self):\n self.lib.SendSoftwareTrigger()",
"def takeControl(self):\n mainloop()"
] | [
"0.7387342",
"0.7323632",
"0.67839485",
"0.6705996",
"0.665293",
"0.66293794",
"0.6543043",
"0.6536976",
"0.6496271",
"0.6282045",
"0.6209138",
"0.61596024",
"0.614907",
"0.61473614",
"0.61369175",
"0.61359274",
"0.611878",
"0.6076426",
"0.6068141",
"0.60628533",
"0.6014772",
"0.5997996",
"0.5969961",
"0.5969961",
"0.59607565",
"0.5936153",
"0.5934278",
"0.5925694",
"0.5907041",
"0.5815695",
"0.5810306",
"0.5806546",
"0.58064806",
"0.5799476",
"0.57872313",
"0.5769194",
"0.5759034",
"0.5747838",
"0.57419205",
"0.5729787",
"0.57285315",
"0.5720088",
"0.5715847",
"0.5705791",
"0.57048565",
"0.57043135",
"0.57001287",
"0.5673069",
"0.5666173",
"0.5664855",
"0.5664544",
"0.5663791",
"0.5650358",
"0.564134",
"0.5625048",
"0.5622691",
"0.56210184",
"0.56176347",
"0.56124467",
"0.55977464",
"0.55977464",
"0.55956614",
"0.55799294",
"0.55765504",
"0.5574887",
"0.55744046",
"0.55744046",
"0.55744046",
"0.55744046",
"0.55744046",
"0.55744046",
"0.55744046",
"0.55744046",
"0.55744046",
"0.55744046",
"0.55744046",
"0.55735886",
"0.55715984",
"0.5571344",
"0.55610836",
"0.55608135",
"0.5558678",
"0.5557101",
"0.5555794",
"0.55304414",
"0.5528578",
"0.5511082",
"0.5507782",
"0.5506597",
"0.55059505",
"0.5501872",
"0.54994494",
"0.5482662",
"0.5481593",
"0.5478974",
"0.5476014",
"0.546882",
"0.5461028",
"0.5456348",
"0.5450814"
] | 0.7298465 | 2 |
Toggle the power button of server. | def set_host_power(self, power):
power = power.upper()
if (power is not None) and (power not in POWER_STATE):
msg = ("Invalid input '%(pow)s'. "
"The expected input is ON or OFF." %
{'pow': power})
raise exception.IloInvalidInputError(msg)
# Check current power status, do not act if it's in requested state.
cur_status = self.get_host_power_status()
if cur_status == power:
LOG.debug(self._("Node is already in '%(power)s' power state."),
{'power': power})
return
self._perform_power_op(POWER_STATE[power]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def power():\n request_command(tv_command=TVCommand.power)",
"def _toggle_server(self):\r\n\t\t_logger.debug(\"Toggle server button is pressed.\")\r\n\r\n\t\tif not comm_server.is_running():\r\n\t\t\tserver_ip = self.children[\"entry_IP\"].get()\r\n\t\t\tserver_port = int(self.children[\"entry_port\"].get())\r\n\t\t\tif not comm_server.start_server(server_ip, server_port):\r\n\t\t\t\treturn\r\n\t\t\tself._save_server_config(server_ip, server_port)\r\n\r\n\t\t\tself.children[\"btn_toggle_server\"].config(text = \"關閉伺服器\")\r\n\t\t\tself._update_connection_num(\"\")\r\n\t\telse:\r\n\t\t\tcomm_server.stop_server()\r\n\t\t\tself.children[\"btn_toggle_server\"].config(text = \"啟動伺服器\")\r\n\t\t\tself.children[\"label_connections\"].config(text = \"連接數: -/-\")",
"def turn_on(self):\n self._remote.power(1)",
"def toggle(self):\n self._state.is_on = not self._state.is_on\n self.send_command(Command.TOGGLE, [])",
"def togglepow(self,channel):\n if self.rf is not None:\n newstatus = bool(self.pow[channel-1].get())\n finalstatus = self.rf.setrempow(channel-1,newstatus)\n self.messages.log('Remote power channel %d set to '%(channel)+str(finalstatus))\n else:\n self.messages.log('Not connected to a focuser.')",
"async def power_on(self):\n ...",
"def toggle(self):\n s = self.status()\n if s == self.POWER_OFF:\n self.on()\n else:\n self.off()\n return self.status()",
"def turn_on(self) -> None:\n self._monoprice.set_power(self._zone_id, True)",
"def toggle_power(self, duration, wait):\n payload = {\"duration\": duration}\n response = requests.post(self.__api_url('toggle'.format(self.name)), data=payload, headers=self.headers)\n if wait:\n time.sleep(duration)\n return response.text",
"def _doPowerState(self, state=False):\n if state:\n self._cmdPowerOn()\n else:\n self._cmdPowerOff()",
"async def toggle(self, ctx):\r\n serverid = ctx.message.server.id\r\n if self.adkillr[serverid]['toggle'] is True:\r\n self.adkillr[serverid]['toggle'] = False\r\n e = discord.Embed(description='**AntiAdv is now disabled.**')\r\n await self.bot.say(embed=e)\r\n elif self.adkillr[serverid]['toggle'] is False:\r\n self.adkillr[serverid]['toggle'] = True\r\n e = discord.Embed(description='**AntiAdv is now enabled.**')\r\n await self.bot.say(embed=e)\r\n dataIO.save_json(\"data/adkillr/adkillr.json\", self.adkillr)",
"def togglePWMPower(self):\n # PCPWM1 is located at position 6\n mask = 1 << 6\n self._injectFault(\"PCONP\", 0x400FC0C4, mask)",
"async def async_turn_on(self):\n data_cmd = _command(COMMAND_POWER_ON)\n await self._async_send_command(data_cmd)",
"def power_on(self):\n pass",
"def turn_on(self):\n self._lms.query(self._id, 'power', '1')\n self.update_ha_state()",
"def toggle(self):\n self._interrupt_flash()\n GPIO.output(self.pin, GPIO.LOW if self.on else GPIO.HIGH)\n self.on = not self.on",
"def set_power(self, power: bool):\r\n if not self.backlight:\r\n return\r\n\r\n self.backlight.power = power",
"def poweroff_server(self, server=None, server_id=None):\n sid = server_id if server_id is not None else server.sid\n if sid is None:\n raise Exception('No Server Specified.')\n json_scheme = self.gen_def_json_scheme('SetEnqueueServerPowerOff', dict(ServerId=sid))\n json_obj = self.call_method_post('SetEnqueueServerPowerOff', json_scheme=json_scheme)\n return True if json_obj['Success'] is 'True' else False",
"async def async_turn_on(self) -> None:\n self._zone.power = True",
"async def toggle(self, ctx):\r\n server = ctx.guild\r\n if self._logs[str(server.id)][\"toggle\"] == True:\r\n self._logs[str(server.id)][\"toggle\"] = False\r\n dataIO.save_json(self._logs_file, self._logs)\r\n await ctx.send(\"Modlogs are now disabled.\")\r\n return\r\n if self._logs[str(server.id)][\"toggle\"] == False:\r\n self._logs[str(server.id)][\"toggle\"] = True\r\n dataIO.save_json(self._logs_file, self._logs)\r\n await ctx.send(f\"Modlogs are now enabled {self.bot.get_emoji(470063310386233344)}\")\r\n return",
"def force_switch_on(self):\n self.turn_on_modem()",
"def enable_sensor_power():\n sen = digital.SensorPower(\"senpwr\") \n sen.set()",
"def turn_on(self, **kwargs):\n set_sonoff_state(self._host, \"on\")\n self._state = True",
"def toggle_server(self):\n name = request.params.get('name', g.DEFAULT_SERVER)\n log.debug('toggle_server(%s)' % name)\n servers = model.Session.query(model.Server)\n server = servers.filter(model.Server.name == name).one()\n server.server_on = not server.server_on\n model.Session.update(server)\n model.Session.commit()\n redirect_to('/admin/dashboard')",
"def turn_on(self, **kwargs):\n self._state = True\n if(self._device['type'] == '_DT-PLUG' or self._device['type'] == '_THIMR'):\n self._send_cmd(self._device,'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"op\":1 }', 5)\n if(self._device['type'] == '_REALY2' or self._device['type'] == '_REALY4'):\n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"'+ self._data_key +'\":1 }', 5)",
"def _turn_on(self):\n self._turn_display('ON')",
"def modsToggle(self, connect=False, verbose=True):\n try:\n if self._mods_available:\n if connect: \n if verbose: self.logger.info('Enabling mods...')\n self.syslogger.info('Enabling mods...')\n self.sh('echo {} > {}'.format(CONFIG.DEVICE.MODS_ON_OFF['on'], CONFIG.DEVICE.MODS_PATH))\n self._mods_enabled = False\n else:\n if verbose: self.logger.info('Disabling mods...')\n self.syslogger.info('Disabling mods...')\n self.sh('echo {} > {}'.format(CONFIG.DEVICE.MODS_ON_OFF['off'], CONFIG.DEVICE.MODS_PATH))\n self._mods_enabled = False\n except Exception as e:\n self.logger.error('Mods connot be enabled/disabled: {}'.format(e), self.syslogger)",
"def press_pwr_btn(self):\n self._press_pwr_btn()",
"async def power_off(self):\n ...",
"def bulb_toggle():\n tx = zb_explicit_command\n tx[\"dest_addr_long\"] = GE_LINK_BULB_MAC\n tx[\"cluster\"] = CLUSTER_A\n tx[\"data\"] = DATA_TOGGLE\n response = zb.Send(tx)",
"def is_power_onoff(self):\n return self['application'] == 'ccd201_pon_app'",
"def toggle(self, **kwargs):\n self.on = False if self.on else True",
"def toggle(self):\n if self.is_enabled:\n self.disable()\n else:\n self.enable()",
"def poweron(self) -> None:\n self.servo_reset()",
"def turn_on(self, **kwargs):\n self.smartplug.turn_on()",
"def _cmd_don(self, command):\r\n self.l_debug(\"_cmd_don\",\"\")\r\n # TODO: If no PowerOn command, do PowerToggle\r\n return self._send_command('PowerOn')",
"def turn_off(self, **kwargs):\n _LOGGER.error(\"DALI TURN OFF\")\n self._state = False\n\n url = self.urlx + '/toggle'\n headers = {'x-ha-access': 'raspberry',\n 'content-type': 'application/json'}\n\n response = get(url, headers=headers)\n _LOGGER.error(response.text)\n\n json_data = json.loads(response.text)\n _LOGGER.error(json_data)\n\n state = json_data['state']\n\n self._dimmer = 0\n\n self._state = state == 'on'",
"def turn_off(self) -> None:\n self._monoprice.set_power(self._zone_id, False)",
"def turn_off(self, **kwargs: Any) -> None:\n self._device.power_on = False\n _LOGGER.debug(\"Turn off light %s\", self._device.ip)",
"def set_state(self, state: bool) -> None:\n payload = self._cfg.state_power_on if state else self._cfg.state_power_off\n command = f\"{COMMAND_POWER}{self._cfg.idx+1}\"\n self._mqtt_client.publish(\n self._cfg.command_topic + command,\n payload,\n )",
"def turn_on(self, **kwargs: Any) -> None:\n with self._wemo_call_wrapper(\"turn on\"):\n self.wemo.on()",
"async def toggle_play_pause(self):\n _LOGGER.debug(\"[Foobar2k] In Play / Pause\")\n if (self._power == POWER_ON):\n if (self._state == STATE_STOPPED):\n await self.prep_fetch(HTTP_POST, POST_PLAYER_PLAY_PLAYLIST.format(self._current_playlist_id, self._current_index), data=None)\n else: \n await self.prep_fetch(HTTP_POST, POST_PLAYER_PAUSE_TOGGLE, data=None)",
"async def power(self, turn_on):\n\n op = DHumOp.ON if turn_on else DHumOp.OFF\n keys = self._get_cmd_keys(CMD_STATE_OPERATION)\n op_value = self.model_info.enum_value(keys[2], op.value)\n if self._should_poll:\n # different power command for ThinQ1 devices\n cmd = \"Start\" if turn_on else \"Stop\"\n await self.set(keys[0], keys[2], key=None, value=cmd)\n self._status.update_status(keys[2], op_value)\n return\n await self.set(keys[0], keys[1], key=keys[2], value=op_value)",
"def power_on(self):\n return self.inst.write(':OUTP ON')",
"async def async_turn_on(self, **kwargs: Any) -> None:\n await self.coordinator.roku.remote(\"poweron\")\n await self.coordinator.async_request_refresh()",
"def toggle(self):\n if self._state in [STATE_OFF, STATE_IDLE, STATE_STANDBY]:\n self._state = STATE_ON\n else:\n self._state = STATE_OFF",
"def test_api_ucs_power(self):\n # first power off all servers\n self.set_all_server_power_state(\"off\")\n # verify power state is down\n self.check_all_server_power_state(\"down\")\n # now power on the servers\n self.set_all_server_power_state(\"on\")\n # verify power state is up\n self.check_all_server_power_state(\"up\")",
"def _press_pwr_btn(self, pushType=\"Press\"):\n power_settings = {\"Action\": \"PowerButton\",\n \"Target\": \"/Oem/Hp\",\n \"PushType\": pushType}\n\n systems_uri = \"/rest/v1/Systems/1\"\n\n status, headers, response = self._rest_post(systems_uri, None,\n power_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def set_power(sid):\n # Resolve the passed parameters if any\n timer = None\n os = None\n if request.json:\n if timer in request.json:\n timer = request.json.get('timer')\n if os in request.json:\n os = request.json.get('os')\n db = sqlite3.connect('/home/tropius/TROPIUS/TROPIUS.db')\n state = hosts.get(db, sid)['state']\n \n if state == 'on':\n # The host is on -- turn it off\n # TODO make a unix shell util file\n # TODO make a windows util file\n return\n elif state == 'off':\n # The host is off -- turn it on\n if timer is not None:\n sleep(timer)\n netutil.wake_on_lan(db, sid)\n ret = {'power': {'state': 'on'}}\n return jsonify(ret)\n # TODO find a keyboard driver and implement OS parameter",
"def vm_power(self, vm_name, state):\n states = [\"on\", \"off\"]\n if state not in states:\n raise OpenStackConnectorException(f\"Incorrect action was provided for the vm {vm_name} power state change\")\n \n vm_id = self._get_vm_id_by_name(vm_name)\n\n if not vm_id:\n return False\n \n try:\n if state == \"on\":\n self.connection.compute.start_server(vm_id)\n else:\n self.connection.compute.stop_server(vm_id)\n except ConflictException: # This exception block handles the situation when the VM is already in the required power state\n pass\n \n return True",
"def turn_off(self, **kwargs: Any) -> None:\n if (\n DPCODE_LIGHT in self.tuya_device.status\n and DPCODE_SWITCH not in self.tuya_device.status\n ):\n commands = [{\"code\": DPCODE_LIGHT, \"value\": False}]\n else:\n commands = [{\"code\": DPCODE_SWITCH, \"value\": False}]\n self._send_command(commands)",
"async def async_turn_off(self):\n data_cmd = _command(COMMAND_POWER_OFF)\n await self._async_send_command(data_cmd)",
"def toggle(self) -> None:\n ...",
"async def async_toggle(self):\n await self.async_mute_volume(not self._muted)",
"def power_on(self, port, data_sync=True):\n port = int(port)\n self._validate_port(\"power_on\", port)\n if data_sync:\n self.set_mode(SYNC, port)\n else:\n self.set_mode(CHARGE, port)",
"def change_status():\n if self.on:\n connect.SOCKET.sendall(bytes(\"OFF\\n\", \"utf-8\"))\n self.on = False\n else:\n connect.SOCKET.sendall(bytes(\"ON\\n\", \"utf-8\"))\n self.on = True",
"def turn_on(self):\n GPIO.output(self.gpio, True) # turn on light",
"def powerOff(self):\n self._sendCommand(self.SONY_CMD_ExtBackupCommunicator_ForcePowerOff, bufferSize=0)",
"def turn_off(self):\n self._state = False\n if(self._device['type'] == '_DT-PLUG' or self._device['type'] == '_THIMR'):\n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"op\":0 }', 5)\n if(self._device['type'] == '_REALY2' or self._device['type'] == '_REALY4'): \n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"'+ self._data_key +'\":0 }', 5)",
"def togglePWMEnable(self):\n mask = 1 << 3\n self._injectFault(\"PWM1TCR\", self.TCR, mask)",
"def turn_on(self):\n self._interrupt_flash()\n if not self.on:\n GPIO.output(self.pin, GPIO.HIGH)\n self.on = True",
"def key_toggle():\n toggle_main_off()\n lcd.message = format_lcd_message(\n TITLE,\n f\"Keys enabled: {keys_enabled}\",\n \"\",\n \"Toggle Back\"\n )\n switchLight.red.on()\n switchLight.blue.on()\n\n switch.green.wait_for_release()\n\n switch.red.when_pressed = toggle_keys\n\n switch.blue.wait_for_press()\n\n # Blue light pressed - reset and drop out of diagnostics mode\n toggle_main_on()\n update_display(last_result)",
"async def async_turn_on(self):\n await self.async_mute_volume(False)",
"def led_toggle(self):\n if self.state == GPIO.LOW:\n self.state = GPIO.HIGH\n else:\n self.state = GPIO.LOW\n return self.update_status()",
"def powerDispatch(self):\n\n if self.ui.powerDevice.currentText().startswith('INDI'):\n self.app.power.name = self.ui.powerDeviceName.currentText()\n self.app.message.emit('Power enabled', 0)\n self.deviceStat['power'] = False\n else:\n self.app.power.name = ''\n self.app.message.emit('Power disabled', 0)\n self.deviceStat['power'] = None\n\n return True",
"def r7_on_off():\n \n r7_cmd_packet = b'\\x04\\x14\\x40\\x00\\x00\\xa8\\x0f'\n ser_relay.write(r7_cmd_packet)",
"async def async_turn_on(self, **kwargs):\n try:\n if await self._api.set_relay_state(self._dev_id, \"on\"):\n self._is_on = True\n self.async_write_ha_state()\n except Smile.PlugwiseError:\n _LOGGER.error(\"Error while communicating to device\")",
"def power_off(self, port):\n port = int(port)\n self._validate_port(\"power_off\", port)\n self.set_mode(OFF, port)",
"def turnOnSdkMode(self):\n \n command = b\"\\x90\\x01\\x01\"\n #print(\"turnOnSdkMode run, command: \")\n #print(command)\n \n self.sendCommand(command)",
"def power_on(self, default=False):\n if default:\n return self.exec_command('SupplyPowerDefault = 1')\n return self.exec_command('SupplyPower = 1')",
"def on(cls, client_object):\n vm_mor = client_object.get_api()\n return cls._do_power_action(vm_mor.PowerOnVM_Task())",
"def turn_on_modem(self):\n if not self.is_power_on():\n self._logger.debug(\"Switching modem on...\")\n self.set_pin()\n # give modem some time to login\n time.sleep(10)\n else:\n self._logger.debug(\"Modem is already powered on...\")",
"def toggle_gui(state):\r\n self.mainWidget.standbyPushButton.setHidden(state)\r\n self.mainWidget.autoRecordPushButton.setHidden(state)\r\n self.mainWidget.recordPushButton.setVisible(state)\r\n self.mainWidget.recordPushButton.setEnabled(state)\r\n self.mainWidget.pauseToolButton.setVisible(state)\r\n self.mainWidget.eventComboBox.setDisabled(state)\r\n self.mainWidget.roomComboBox.setDisabled(state)\r\n self.mainWidget.dateComboBox.setDisabled(state)\r\n self.mainWidget.talkComboBox.setDisabled(state)\r\n self.mainWidget.audioFeedbackCheckbox.setDisabled(state)",
"def power_off(fast: bool = True, restart: bool = False) -> None:",
"def wifi_on(self):\n self._clear_read_buffer()\n self._write_cmd(\"PE01\")\n time.sleep(100e-3)",
"def turn_on(self, **kwargs):\n self._is_on = True",
"def toggle(self) -> None:",
"def toggle(self) -> None:",
"async def toggle(self, ctx):\n guild = ctx.message.guild\n\n enabled = await self.config.guild(guild).enabled()\n\n enabled = not enabled\n await self.config.guild(guild).enabled.set(enabled)\n\n if enabled is True:\n await ctx.send(\"AntiSpam has been enabled\")\n else:\n await ctx.send(\"AntiSpam has been disabled\")",
"def test_off_reboot_on(self):\n self.openstack('baremetal node power off {0}'\n .format(self.node['uuid']))\n show_prop = self.node_show(self.node['uuid'], ['power_state'])\n self.assertEqual('power off', show_prop['power_state'])\n\n self.openstack('baremetal node reboot {0}'.format(self.node['uuid']))\n show_prop = self.node_show(self.node['uuid'], ['power_state'])\n self.assertEqual('power on', show_prop['power_state'])",
"def turn_on(self, **kwargs):\n self._send_command(\"turn_on\")",
"def enable(self):\n self.switch.enable()\n self._enabled = True",
"async def async_turn_off(self, **kwargs):\n try:\n if await self._api.set_relay_state(self._dev_id, \"off\"):\n self._is_on = False\n self.async_write_ha_state()\n except Smile.PlugwiseError:\n _LOGGER.error(\"Error while communicating to device\")",
"def toggled_comunication(self):\n if self.actionPC_Monitor.isChecked() and self.actionPC_Monitor.isEnabled():\n self.actionPC_Monitor.setEnabled(0)\n self.actionPC_Sensor_Actuador.setChecked(0)\n self.actionPC_Sensor_Actuador.setEnabled(1)\n self.monitor_environment()\n \n elif self.actionPC_Sensor_Actuador.isChecked() and self.actionPC_Sensor_Actuador.isEnabled():\n self.actionPC_Sensor_Actuador.setEnabled(0)\n self.actionPC_Monitor.setChecked(0)\n self.actionPC_Monitor.setEnabled(1)\n self.actuator_environment()",
"async def poweroff(ctx):\n await ctx.send(\"Bye\")\n await bot.logout()",
"def toggle_sound(self):\n self.game_data.set_sound_on(not self.game_data.is_sound_on())\n self.settings_buttons[2].set_images(self.get_sound_button_img(), self.get_sound_button_img_h())",
"def toggle_mute(cls) -> bool:\n raise NotImplementedError",
"def turn_on(self, **kwargs):\n self._is_on = True\n self.schedule_update_ha_state()\n self.hass.data[ZIGATE_DOMAIN].action_onoff(self._device.addr,\n self._endpoint,\n 1)",
"async def async_turn_off(self) -> None:\n self._zone.power = False",
"def poweron(self):\n raise NotImplementedError()",
"async def async_turn_on(self, **kwargs: Any) -> None:\n _LOGGER.debug(\"Tried to switch on %s\", self.name)\n try:\n await self.hass.async_add_executor_job(\n self.device.appliance.set_setting, BSH_POWER_STATE, BSH_POWER_ON\n )\n except HomeConnectError as err:\n _LOGGER.error(\"Error while trying to turn on device: %s\", err)\n self._state = False\n self.async_entity_update()",
"def toggle_pause(self):\n self.m_btn_pause = not self.m_btn_pause",
"def togglePause(self):\n self.model.paused = not self.model.paused\n self.proc.send_signal(signal.SIGUSR1)",
"def set_powerobject(self, boolean):\n if boolean == True:\n self.powerobject = 'P'",
"def poweroff(self) -> None:\n pass",
"async def async_turn_on(self) -> None:\n await self._device.leave_standby()",
"async def async_set_wifi_led_on(self):\n return",
"async def power_on(self) -> str:\n return f\"d2 lamp is {await self.hw_device.lamp('d2')}; halogen lamp is {await self.hw_device.lamp('hal')}\"",
"def ToggleBlink(self):\n self.displaycontrol ^= self.LCD_BLINKON\n self.write(self.LCD_DATA_E1, self.LCD_DISPLAYCONTROL | self.displaycontrol)\n self.write(self.LCD_DATA_E2, self.LCD_DISPLAYCONTROL | self.displaycontrol)",
"def Pause():\n\tDmg.enableButton.SetOff()"
] | [
"0.7578163",
"0.7492358",
"0.74232733",
"0.72111523",
"0.7157098",
"0.7149466",
"0.7111762",
"0.69473296",
"0.6867693",
"0.67997324",
"0.67328346",
"0.6578583",
"0.65758115",
"0.6563602",
"0.65376586",
"0.6518119",
"0.6517999",
"0.6457877",
"0.6456714",
"0.64391696",
"0.64276826",
"0.63936454",
"0.6383528",
"0.6355255",
"0.6351885",
"0.63495183",
"0.6332242",
"0.6300469",
"0.6292949",
"0.62710047",
"0.6257445",
"0.62521446",
"0.621348",
"0.6204684",
"0.6195308",
"0.6186239",
"0.6174548",
"0.61713886",
"0.6162179",
"0.6160544",
"0.6155294",
"0.6133791",
"0.6123957",
"0.61234295",
"0.6121058",
"0.6120047",
"0.61156857",
"0.6112089",
"0.6095479",
"0.6068021",
"0.6065763",
"0.6063611",
"0.60589725",
"0.6054635",
"0.6052994",
"0.6052666",
"0.6041764",
"0.6033559",
"0.6023988",
"0.6016309",
"0.6000973",
"0.5999277",
"0.5998042",
"0.59790397",
"0.5967167",
"0.5962491",
"0.59537995",
"0.59462214",
"0.59450763",
"0.5944931",
"0.5941414",
"0.59341615",
"0.59258837",
"0.5925266",
"0.5922101",
"0.59212965",
"0.5908513",
"0.5908513",
"0.59007925",
"0.5894115",
"0.58932024",
"0.58871305",
"0.5875102",
"0.58623505",
"0.58562315",
"0.5852967",
"0.5849009",
"0.58484596",
"0.5842326",
"0.58408666",
"0.5840718",
"0.58315",
"0.5826371",
"0.58178055",
"0.5813818",
"0.58136",
"0.58063775",
"0.5799747",
"0.57985103",
"0.5797722"
] | 0.5862777 | 83 |
Request the http boot url from system in uefi boot mode. | def get_http_boot_url(self):
if(self._is_boot_mode_uefi() is True):
return self._get_bios_setting('UefiShellStartupUrl')
else:
msg = 'get_http_boot_url is not supported in the BIOS boot mode'
raise exception.IloCommandNotSupportedInBiosError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_http_boot_url(self, url):\n if(self._is_boot_mode_uefi() is True):\n self._change_bios_setting({'UefiShellStartupUrl': url})\n else:\n msg = 'set_http_boot_url is not supported in the BIOS boot mode'\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def get_http_boot_uri(self):\n try:\n sushy_system = self._get_sushy_system()\n http_boot_uri = sushy_system.http_boot_uri.httpbooturi\n except sushy.exceptions.SushyError as e:\n msg = (self._('Not able to find HTTP Boot URI. Error: '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n return http_boot_uri",
"def set_http_boot_uri(self, url):\n try:\n sushy_system = self._get_sushy_system()\n sushy_system.http_boot_uri.set_http_boot_uri(url)\n except sushy.exceptions.SushyError as e:\n msg = (self._('Unable to set HTTP Boot URI. Error '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def is_http_boot_requested(node):\n http_boot_requested = (\n str(node.driver_info.get('enable_uefi_httpboot', 'false')).lower())\n return http_boot_requested == 'true'",
"def _get_bios_boot_resource(self, data):\n try:\n boot_uri = data['links']['Boot']['href']\n except KeyError:\n msg = ('Boot resource not found.')\n raise exception.IloCommandNotSupportedError(msg)\n\n status, headers, boot_settings = self._rest_get(boot_uri)\n\n if status != 200:\n msg = self._get_extended_error(boot_settings)\n raise exception.IloError(msg)\n\n return boot_settings",
"def elim_bootstrap_fetch(tree):\n\n boot = tree.find('.//target[@name=\"boot\"]')\n for child in boot.findall(\"./exec\"):\n boot.remove(child)\n echo = boot.find(\"./echo\")\n echo.attrib[\"message\"] = \"Not fetching bootstrap libraries in the Fedora build\"",
"def flashUboot(self):\n\t\tif self.settings.getKeyValue('flash.uboot?') == 'y':\n\t\t\tloadAddress = self.settings.getKeyValue('u-boot.flash.address')\n\t\t\tcmd = self.settings.getKeyValue('u-boot.load.command')\n\t\t\tcmd = cmd.replace('<u-boot>', 'u-boot.bin.12x.2430')\n\t\t\tself.socket.send(cmd, 5)\n\t\t\t#self.socket.send('protect off 1:0-1\\r', 2)\n\t\t\t#self.socket.send('erase 1:0-1\\r', 2)\n\t\t\t#self.socket.send('cp.b 80000000 %s 2ffff\\r' % loadAddress)\n\t\t\treturn None\n\t\t\t#cmd = cmd.replace('<u-bootloadadress>', self.u-bootloadaddress)",
"def boot(self):\n\t\tmesslen, received = self.socket.send('bootm\\r', 25)\t\t\n\t\treturn None",
"def set_bios_bootmode_uefi(ip, login_account, login_password, system_id):\n result = {}\n login_host = \"https://\" + ip\n try:\n # Connect using the BMC address, account name, and password\n # Create a REDFISH object\n REDFISH_OBJ = redfish.redfish_client(base_url=login_host, username=login_account, timeout=utils.g_timeout,\n password=login_password, default_prefix='/redfish/v1', cafile=utils.g_CAFILE)\n # Login into the server and create a session\n REDFISH_OBJ.login(auth=utils.g_AUTH)\n except:\n traceback.print_exc()\n result = {'ret': False, 'msg': \"Please check the username, password, IP is correct\"}\n return result\n\n # GET the ComputerSystem resource\n system = utils.get_system_url(\"/redfish/v1\", system_id, REDFISH_OBJ)\n if not system:\n result = {'ret': False, 'msg': \"This system id is not exist or system member is None\"}\n REDFISH_OBJ.logout()\n return result\n for i in range(len(system)):\n system_url = system[i]\n response_system_url = REDFISH_OBJ.get(system_url, None)\n if response_system_url.status != 200:\n error_message = utils.get_extended_error(response_system_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (system_url, response_system_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else:\n # Get the bios resource\n bios_url = response_system_url.dict['Bios']['@odata.id']\n response_bios_url = REDFISH_OBJ.get(bios_url, None)\n if response_bios_url.status != 200:\n error_message = utils.get_extended_error(response_bios_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (bios_url, response_bios_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else: # Get bios success\n # Seek boot mode from bios attributes\n attribute_bootmode = None\n attributes = response_bios_url.dict['Attributes']\n for attribute in attributes:\n if attribute == \"BootMode\" or attribute == \"SystemBootMode\":\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"SystemBootMode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"Boot\" in attribute and \"Mode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n result = {'ret': False, 'msg': \"Can not found BootMode attribute in response of url %s\" %(bios_url)}\n REDFISH_OBJ.logout()\n return result\n\n # Get boot mode setting guide from bios registry\n WarningText = None\n ValueName = None\n bios_registry_url = \"/redfish/v1/Registries/\" + response_bios_url.dict['AttributeRegistry']\n response_bios_registry_url = REDFISH_OBJ.get(bios_registry_url, None)\n if response_bios_registry_url.status == 200:\n locations = response_bios_registry_url.dict['Location']\n bios_regjson_url = None\n for location in locations:\n if 'en' in location['Language']:\n bios_regjson_url = location['Uri']\n break\n if bios_regjson_url:\n response_bios_regjson_url = REDFISH_OBJ.get(bios_regjson_url, None)\n if response_bios_regjson_url.status == 200:\n regattributes = response_bios_regjson_url.dict['RegistryEntries']['Attributes']\n for regattribute in regattributes:\n if regattribute['AttributeName'] == attribute_bootmode:\n if 'WarningText' in regattribute:\n WarningText = regattribute['WarningText']\n for value in regattribute['Value']:\n if 'legacy' in value['ValueName'].lower():\n continue\n if 'uefi' in value['ValueName'].lower():\n ValueName = value['ValueName']\n break\n ValueName = value['ValueName']\n break\n \n # Perform patch to set\n if ValueName == None:\n ValueName = \"UEFIMode\"\n pending_url = response_bios_url.dict['@Redfish.Settings']['SettingsObject']['@odata.id']\n parameter = {attribute_bootmode: ValueName}\n attribute = {\"Attributes\": parameter}\n headers = {\"If-Match\": '*'}\n response_pending_url = REDFISH_OBJ.patch(pending_url, body=attribute, headers=headers)\n if response_pending_url.status in [200,204]:\n if WarningText:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful. WarningText: %s'% (WarningText) }\n else:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful'}\n elif response_pending_url.status == 405:\n result = {'ret': False, 'msg': \"Resource not supported\"}\n else:\n error_message = utils.get_extended_error(response_pending_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (\n pending_url, response_pending_url.status, error_message)}\n\n # Logout of the current session\n try:\n REDFISH_OBJ.logout()\n except:\n pass\n return result",
"def get_bootvar(self):\n module = 'bootimage/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n bootdefault = response.json()['bootimage']['oper']['hd-default']\n print(self.device + ' The device is set to boot from: ' + bootdefault + ' in the future')\n return bootdefault",
"def bootloader() -> NoReturn:",
"def test_get_bios_boot_mode_by_moid(self):\n pass",
"def actionFromweb(self):\n print(\"Grabbing %x firmware.\" % self.dev_id)\n print(\"%s\" % firmware[self.dev_id])\n fn=\"/tmp/.goodfet.hex\"\n os.system(\"curl %s >%s\" % (firmware[self.dev_id],fn))\n\n fw=Memory(fn)\n #fw.loadIhex(open(fn,\"rb\"))\n\n sys.stderr.write(\"Program ...\\n\")\n sys.stderr.flush()\n self.programData(fw, self.ACTION_PROGRAM | self.ACTION_VERIFY)\n sys.stderr.write(\"%i bytes programmed.\\n\" % self.byteCtr)\n sys.stderr.flush()",
"def boot(self):\n\n pass",
"def get_boot_order(rfo, api=1, unit=1):\n\n url = f\"/redfish/v{api}/systems/{unit}/bios\"\n res = rfo.get(url)\n if res.status != 200:\n print(f\"Error: {res.status}: {res.read}\")\n return \"XXX\"\n booturl = res.dict['Oem']['Hpe']['Links']['Boot']['@odata.id']\n res = rfo.get(booturl)\n if res.status != 200:\n print(f\"HTTP Fail Status: {res.status} - {res.read}\")\n return \"XXX\"\n return res.dict['DefaultBootOrder']",
"def get_server_url():\n try:\n url = os.environ['API_HOST']\n # print('[ OK ] Server url loaded: ', url)\n except KeyError:\n url = 'http://localhost:3300/'\n print('[ WARNING ] API_HOST environment variable was not found. default server url was set at: ', url)\n\n return url",
"def test_update_bios_boot_mode(self):\n pass",
"def setUbootFlashAddress(self):\n\t\tself.ubootflashaddress = self.settings.getKeyValue('u-boot.flash.address')\n\t\treturn None",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version|json\", \"show hostname\"], None,\n 'mixed')",
"async def get_device_boottime_hostname(self):\n\n if self.transport == 'https':\n cmdlist = [\"show version\", \"show hostname\"]\n else:\n cmdlist = [\"show version|json\", \"show hostname|json\"]\n await self.exec_cmd(self._parse_boottime_hostname, cmdlist, None)",
"def boot(self, boot_node_request):\n return self.client.call('POST',\n self.name + 'boot', payload=boot_node_request)",
"def get_secure_boot_mode(self):\n system = self._get_host_details()\n\n if ('links' not in system['Oem']['Hp'] or\n 'SecureBoot' not in system['Oem']['Hp']['links']):\n msg = ('\"SecureBoot\" resource or feature is not supported'\n ' on this system')\n raise exception.IloCommandNotSupportedError(msg)\n\n secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']\n\n # get the Secure Boot object\n status, headers, secure_boot_settings = self._rest_get(secure_boot_uri)\n\n if status >= 300:\n msg = self._get_extended_error(secure_boot_settings)\n raise exception.IloError(msg)\n\n return secure_boot_settings['SecureBootCurrentState']",
"def start(self, module=None, app=None, delay=None):\n with (app or flask.current_app).test_request_context():\n path = self.url(delay=delay)\n\n url = 'https://{module}-dot-{hostname}{path}'.format(\n module=module or self.module_name,\n hostname=app_identity.get_default_version_hostname(),\n path=path)\n\n urlfetch.fetch(url)",
"def boot():\r\n print \"\"\"\r\n ###### ## ## ### ## ## ## ## ######## ########\r\n ## ## ## ## ## ## ### ## ## ## ## ## ##\r\n ## #### ## ## #### ## ## ## ## ## ##\r\n ## ## ## ## ## ## ## ## ## ######## ######\r\n ## ## ######### ## #### ## ## ## ## ##\r\n ## ## ## ## ## ## ### ## ## ## ## ##\r\n ###### ## ## ## ## ## ####### ## ## ########\r\n\r\n Version %s-%s\r\n\r\n Multi Purpose Artificial Inelegance Program\r\n Copyright (c) Alexandre Gauthier 2010-2011\r\n All Rights Reserved\r\n \"\"\" % ( constants.VERSION, constants.TAGNAME )\r\n\r\n # Initialize log\r\n # TODO: The values should be read from config file.\r\n log.init_log('cyanure.log', 'DEBUG')\r\n\r\n logger.info(\"Cyanure system init: Version %s (%s)\" % (\r\n constants.VERSION, constants.TAGNAME ))",
"def main():\n kernel_params = _parse_kernel_cmdline()\n api_url = kernel_params.get('ipa-api-url')\n if api_url is None:\n _process_error('Mandatory kernel parameter \"ipa-api-url\" is missing.')\n\n boot_mac = kernel_params.get('BOOTIF')\n if boot_mac is None:\n _process_error('Cannot define boot interface, \"BOOTIF\" kernel '\n 'parameter is missing.')\n\n # There is a difference in syntax in BOOTIF variable between pxe and ipxe\n # boot with Ironic.\n # For pxe boot the the leading `01-' denotes the device type (Ethernet)\n # and is not a part of the MAC address\n if boot_mac.startswith('01-'):\n boot_mac = boot_mac[3:].replace('-', ':')\n\n # FIXME(pas-ha) discover all MACs\n node = lookup(api_url, [boot_mac])\n uuid = node['node']['uuid']\n timeout = node['config']['heartbeat_timeout']\n\n heartbeat_url = '{api_url}/v1/heartbeat/{uuid}'.format(api_url=api_url,\n uuid=uuid)\n for n in range(_GET_ADDR_MAX_ITERATION):\n boot_ip = _get_interface_ip(boot_mac)\n if boot_ip is not None:\n break\n time.sleep(_RETRY_INTERVAL)\n else:\n _process_error('Cannot find IP address of boot interface.')\n\n heartbeat(heartbeat_url, boot_ip, timeout)",
"def _get_base_url(self):\n return 'https://'+self.get_address_and_port_string()",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version\"], None)",
"def base_url(self):\n return \"https://api.byte-stack.net\" if self.use_sandbox \\\n else \"https://api.ovo.id\"",
"def start(self):\n self.get(self.url)",
"def get_boot_mode():\n boot_mode = 'Legacy'\n try:\n reg_key = winreg.OpenKey(\n winreg.HKEY_LOCAL_MACHINE, r'System\\CurrentControlSet\\Control')\n reg_value = winreg.QueryValueEx(reg_key, 'PEFirmwareType')[0]\n if reg_value == 2:\n boot_mode = 'UEFI'\n except:\n boot_mode = 'Unknown'\n\n return boot_mode",
"def _base_url(self):\n # URL Protocol\n proto = 'https' if self._ssl else 'http'\n\n # Device port number\n if self._port is None:\n port = 8080 if self._ssl else 8008\n else:\n port = self._port\n \n return f'{proto}://{self._address}:{port}/api/v1'",
"def fusion_api_get_firmware_driver(self, uri=None, api=None, headers=None, param=''):\n return self.driver.get(uri, api, headers, param)",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version\", \"show run hostname\"], None)",
"def getHost():",
"def getHost():",
"def set_bootloader_mode(self, mode):\n self.check_validity()\n\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 9, 'B')",
"def _get_firmware_update_service_resource(self):\n manager, uri = self._get_ilo_details()\n try:\n fw_uri = manager['Oem']['Hp']['links']['UpdateService']['href']\n except KeyError:\n msg = (\"Firmware Update Service resource not found.\")\n raise exception.IloCommandNotSupportedError(msg)\n return fw_uri",
"def pibooth_startup(cfg, app):",
"def get_boot_mode(self, task):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='get_boot_mode')",
"def get_buildbot_url():\n return \"http://10.45.4.98:8001/\"",
"def localhost():\n env.run = local\n env.cd = lcd\n env.deployment = 'local'",
"def test_patch_bios_boot_mode(self):\n pass",
"def get_api_url():\n return \"https://api.basespace.illumina.com/v1pre3\"",
"def load_kernel_module(params) -> None:\n print(\"Loading kernel module...\")\n os.system(\"modprobe -r v4l2loopback >/dev/null 2>&1\")\n cmd = \"modprobe v4l2loopback devices=1 video_nr=\" + params['loopback_nr'] + \\\n \" card_label=\" + params['loopback_name'] + \\\n \" exclusive_caps=\" + params['loopback_exclusive'] + \" >/dev/null 2>&1\"\n if os.system(cmd) == 0:\n print(\" Success !\")\n else:\n print(\" Failure !\")",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"cat /proc/uptime\", \"hostname\", \"show version\"], None, 'text')",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"cat /proc/uptime\", \"hostnamectl\",\n \"cat /etc/os-release\"], None, 'text')",
"def url(vmanage_host,vmanage_port,api):\r\n \"\"\" function to get the url provide api endpoint \"\"\"\r\n \r\n return f\"https://{vmanage_host}:{vmanage_port}{api}\"",
"def get_boot_device(self):\n operation = 'get_boot_device'\n try:\n boot_device = self.sp_manager.get_boot_device()\n return boot_device\n except UcsException as ex:\n print(_(\"Cisco client exception: %(msg)s.\"), {'msg': ex})\n raise exception.UcsOperationError(operation=operation, error=ex)",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show system uptime|display json\",\n \"show version\"], None, 'mixed')",
"def test_get_bios_boot_mode_list(self):\n pass",
"def test_home(self):\n self.selenium.get('{}/'.format(self.live_server_url))",
"def base_url(self):\n return \"http://{0}:{1}/app\".format(self.host, self.port)",
"def read_bootinfo(self, orig_name):\n api_page = \"/configuration/object/read_bootinfo\"\n url = \"{}{}?{}&UIDARUBA={}\".format(\n self.base_url,\n api_page,\n self.config_path,\n self.uidaruba)\n\n obj = {\"_action\": \"modify\",\n \"read_bootinfo_option\": \"ap-name\",\n \"ap-name\": orig_name\n }\n\n json_obj = json.loads(json.dumps(obj))\n resp = self.post(url, json_obj)\n print(\"read_bootinfo_resp: {}\".format(resp.status_code))\n # print(resp.text)",
"def setup_slave_web():\n print(\"Starting slave web\")\n r = req.patch(f\"{SLAVE_API_URL}/formation/web\", json=API_PAYLOAD_1, headers=SLAVE_API_HEADERS)\n if r.status_code != req.codes.ok:\n print(\"Unable to start the web dyno on slave\")\n print(r.text)\n return False\n #wait a bit for the web process to start up\n print(\"Waiting a bit\")\n time.sleep(10)\n r = req.get(SLAVE_URL)\n if not r.text.startswith(\"Index\"):\n print(\"Something is wrong with slave:\")\n print(r.text)\n return False\n\n print(\"Got response from slave:\", r.text)\n return True",
"def _request_bootstrap_server_info() -> str:\n if __debug__:\n logger.info(\"Requesting bootstrap server...\")\n req = BootstrapServerRequest()\n DistroStreamClientHandler.request(req)\n\n # Retrieve answer\n req.wait_processed()\n error = req.get_error_code()\n if error != 0:\n raise BackendException(error, req.get_error_msg())\n\n # Parse answer\n answer = req.get_response_msg()\n if __debug__:\n logger.debug(\"Retrieved bootstrap server information: %s\", answer)\n\n return answer",
"def get_boot_mode(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def service_url(self):\n return \"http://127.0.0.1:%d/wd/hub\"%self.port",
"def main():\n get_obofoundry(force_download=True)",
"def _get_http_ip_address_path(ip_address):\n # grub2 bootloader needs ip based config file name.\n root_dir = get_http_boot_dir()\n return os.path.join(root_dir, ip_address + \".conf\")",
"def _get_local_endpoint():\n return \"https://%s:8446\" % socket.getfqdn()",
"def get_one_time_boot(self):\n system = self._get_host_details()\n try:\n if system['Boot']['BootSourceOverrideEnabled'] == 'Once':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n else:\n # value returned by RIBCL if one-time boot setting are absent\n return 'Normal'\n\n except KeyError as e:\n msg = \"get_one_time_boot failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)",
"def fusion_api_get_server_hardware_firmware(self, uri, api=None, headers=None):\n return self.sh.get(uri=uri, api=api, headers=headers, param='/firmware')",
"def get_internal_url(self):\n prefer_internal_ip = self.charm_config.get(\"prefer-internal-ip\")\n fqdn = socket.getfqdn()\n ip = socket.gethostbyname(fqdn)\n if prefer_internal_ip:\n return \"http://{}:8008\".format(ip)\n return \"http://{}:8008\".format(fqdn)",
"def hw_from_req(req):\n return req.app['com.opentrons.hardware']",
"def get_boot_device(self, task):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n return super(IRMCManagement, self).get_boot_device(task)\n else:\n return super(\n ipmitool.IPMIManagement, self).get_boot_device(task)",
"def lab_network(self) -> None:\n self.host = getattr(self, \"host\")\n try:\n getattr(self.host, \"uboot_network_setup\")(self)\n except AttributeError:\n raise Exception(\n f\"The lab-host {self.host!r} does not seem to support uboot network setup!\"\n )",
"def main():\n with Scrapli(**MY_DEVICE) as conn:\n print(conn.get_prompt())\n print(conn.send_command(\"show run | i hostname\").result)",
"def test_0020_external(self):\n self.setup_defaults()\n app = self.get_app()\n\n with app.test_request_context('/'):\n self.assertEqual(\n url_for('nereid.website.home', _external=True),\n 'http://localhost/'\n )",
"def request_externally(url):\n session = BQServer()\n #session = root\n session.authenticate_mex(identity.mex_authorization_token())\n session.root = request.host_url\n url = session.prepare_url(url)\n log.debug(\"begin routing externally: %s\" % url)\n try:\n resp = session.get(url, headers={'Content-Type':'text/xml'})\n except BQCommError as e:\n log.debug('%s' % str(e))\n return\n\n log.debug(\"end routing externally: status %s\" % resp.status_code)\n return resp",
"def _locate_bootloader():\n pkg_path = os.path.dirname(__file__)\n blpath = os.path.abspath(os.path.join(pkg_path, 'bootloader'))\n if not os.path.isfile(blpath):\n raise InternalError(\"bootloader not found at {}\".format(blpath))\n return blpath",
"def fusion_api_get_server_hardware_bios(self, uri, api=None, headers=None):\n return self.sh.get(uri=uri, api=api, headers=headers, param='/bios')",
"def goto_environment_url(self):\n try:\n self._browser.get(self._environment.url)\n except Exception as e:\n self.logger.error(\"Error going to environment '\" + self._environment.url + \"' : \" + str(e))\n raise",
"def boot_mac_address(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"boot_mac_address\")",
"def wait_for_linux(self):\n super(RPI, self).wait_for_linux()\n\n self.sendline('cat /etc/issue')\n if 0 == self.expect(['OpenEmbedded'] + self.prompt):\n self.routing = False\n self.wan_iface = \"eth0\"\n self.lan_iface = None\n self.expect(self.prompt)\n\n self.sendline(\n 'dmcli eRT getv Device.DeviceInfo.X_RDKCENTRAL-COM_CaptivePortalEnable'\n )\n if self.expect([\n ' type: bool, value: false',\n 'dmcli: not found'\n ] + self.prompt) > 1:\n self.sendline(\n 'dmcli eRT setv Device.DeviceInfo.X_RDKCENTRAL-COM_CaptivePortalEnable bool false'\n )\n self.expect(self.prompt)\n self.sendline('reboot')\n super(RPI, self).wait_for_linux()",
"def set_download(self):\n print 'Setting download command...'\n wget = 0\n urllib = 0\n # JULIE : Cut proxy stuff...was causing problems (see scalapack installer if you want it back)\n if urllib == 0:\n # if urllib2 is not present checks if wget is present\n # in the PATH and if yes it sets the download command\n # to be wget\n print \"Checking availablility of wget...\",\n path=str(os.getenv('PATH')).split(os.pathsep)\n for i in path:\n if (os.path.isfile(os.path.join(i,'wget'))):\n print \"available\"\n wget = 1\n break\n if wget:\n # test wget\n print \"Testing wget...\",\n comm = 'wget --tries=2 --timeout=5 http://www.netlib.org/lapack/index'\n (output, error, retz) = runShellCommand(comm)\n if(retz != 0):\n print 'not working.'\n wget = -1\n else:\n print \"working\"\n self.downcmd=\"wget\"\n os.remove(\"index\")\n return\n else:\n # wget not available\n print \"not available\"\n wget=0",
"def base_url():\n return json.loads('{\"message\": \"Try with /data\", \"success\": false}')",
"def get_pending_boot_mode(self):\n headers, uri, bios_settings = self._check_bios_resource(['BootMode'])\n _, _, settings = self._get_bios_settings_resource(bios_settings)\n boot_mode = settings.get('BootMode')\n if boot_mode == 'LegacyBios':\n boot_mode = 'legacy'\n return boot_mode.upper()",
"def __command_url(self):\n return \"http://\" + self._host + \\\n \"/cgi-bin/hi3510/{}&-usr=\" + \\\n self._username + \"&-pwd=\" + self._password",
"def getDBSApi():\n if 'testbed' in dbs3_url:\n dbs3_url_reader = dbs3_url + '/dbs/int/global/DBSReader'\n else:\n dbs3_url_reader = dbs3_url + '/dbs/prod/global/DBSReader'\n\n from dbs.apis.dbsClient import DbsApi\n\n\n #this needs to come after /data/srv/wmagent/current/apps/wmagent/etc/profile.d/init.sh is sourced \n dbsApi = DbsApi(url = dbs3_url_reader)\n return dbsApi",
"def activator_ui():\n local('activator ui -Dhttp.address=10.0.2.15')",
"def _bootloader_file(self, exe, extension=None):\n # Having console/windowed bootloader makes sense only on Windows and Mac OS.\n if is_win or is_darwin:\n if not self.console:\n exe = exe + 'w'\n # There are two types of bootloaders:\n # run - release, no verbose messages in console.\n # run_d - contains verbose messages in console.\n if self.debug:\n exe = exe + '_d'\n if extension:\n exe = exe + extension\n bootloader_file = os.path.join(HOMEPATH, 'PyInstaller', 'bootloader', PLATFORM, exe)\n logger.info('Bootloader %s' % bootloader_file)\n return bootloader_file",
"def HTTPInterface(config: Configuration, port: int, uid: str, locast_service: LocastService, station_scan=False) -> Flask:\n log = logging.getLogger(\"HTTPInterface\")\n app = Flask(__name__)\n\n host_and_port = f'{config.bind_address}:{port}'\n\n @app.route('/', methods=['GET'])\n @app.route('/device.xml', methods=['GET'])\n def device_xml() -> Response:\n \"\"\"Render an XML when /device.xml is called.\n\n Returns:\n Response: XML response\n \"\"\"\n xml = render_template('device.xml',\n device_model=config.device_model,\n device_version=config.device_version,\n friendly_name=locast_service.city,\n uid=uid,\n host_and_port=host_and_port)\n return Response(xml, mimetype='text/xml')\n\n def _device_id_checksum(device_id: int) -> int:\n \"\"\"Generate a HDHomerun checksum for a device ID.\n HDHomerun considers a device to be valid if the checksum\n is 0. Adding the checksum to the device ID will\n provide a valid checksum though.\n\n Args:\n device_id (int): Device ID\n\n Returns:\n int: Checksum of the device id.\n \"\"\"\n lookup_table = [0xA, 0x5, 0xF, 0x6, 0x7, 0xC, 0x1,\n 0xB, 0x9, 0x2, 0x8, 0xD, 0x4, 0x3, 0xE, 0x0]\n checksum = 0\n checksum ^= lookup_table[(device_id >> 28) & 0x0F]\n checksum ^= (device_id >> 24) & 0x0F\n checksum ^= lookup_table[(device_id >> 20) & 0x0F]\n checksum ^= (device_id >> 16) & 0x0F\n checksum ^= lookup_table[(device_id >> 12) & 0x0F]\n checksum ^= (device_id >> 8) & 0x0F\n checksum ^= lookup_table[(device_id >> 4) & 0x0F]\n checksum ^= (device_id >> 0) & 0x0F\n\n return checksum\n\n @app.route('/discover.json', methods=['GET'])\n def discover_json() -> Response:\n \"\"\"Return data about the device in JSON\n\n Returns:\n Response: JSON response containing device information\n \"\"\"\n\n device_id = int(uid[:8], 16) # Hex string to int\n valid_id = device_id + _device_id_checksum(device_id)\n\n data = {\n \"FriendlyName\": locast_service.city,\n \"Manufacturer\": \"locast2dvr\",\n \"ModelNumber\": config.device_model,\n \"FirmwareName\": config.device_firmware,\n \"TunerCount\": config.tuner_count,\n \"FirmwareVersion\": config.device_version,\n \"DeviceID\": hex(valid_id)[2:],\n \"DeviceAuth\": \"locast2dvr\",\n \"BaseURL\": f\"http://{host_and_port}\",\n \"LineupURL\": f\"http://{host_and_port}/lineup.json\"\n }\n return jsonify(data)\n\n @app.route('/lineup_status.json', methods=['GET'])\n def lineup_status_json() -> Response:\n \"\"\"Provide a (somewhat fake) status about the scanning process\n\n Returns:\n Response: JSON containing scanning information\n \"\"\"\n if station_scan:\n lineup_status = {\n \"ScanInProgress\": True,\n \"Progress\": 50,\n \"Found\": 5\n }\n else:\n lineup_status = {\n \"ScanInProgress\": False,\n \"ScanPossible\": True,\n \"Source\": \"Antenna\",\n \"SourceList\": [\"Antenna\"]\n }\n return jsonify(lineup_status)\n\n @app.route('/lineup.m3u', methods=['GET'])\n @app.route('/tuner.m3u', methods=['GET'])\n def m3u() -> Response:\n \"\"\"Returns all stations in m3u format\n\n Returns:\n Response: m3u in text/plain\n \"\"\"\n m3uText = \"#EXTM3U\\n\"\n for station in locast_service.get_stations():\n callsign = name_only(station.get(\"callSign_remapped\") or station.get(\n \"callSign\") or station.get(\"name\"))\n city = station[\"city\"]\n logo = station.get(\"logoUrl\") or station.get(\"logo226Url\")\n channel = station.get(\"channel_remapped\") or station[\"channel\"]\n networks = \"Network\" if callsign in [\n 'ABC', 'CBS', 'NBC', 'FOX', 'CW', 'PBS'] else \"\"\n groups = \";\".join(filter(None, [city, networks]))\n url = f\"http://{host_and_port}/watch/{station['id']}.m3u\"\n\n tvg_name = f\"{callsign} ({city})\" if config.multiplex else callsign\n\n m3uText += f'#EXTINF:-1 tvg-id=\"channel.{station[\"id\"]}\" tvg-name=\"{tvg_name}\" tvg-logo=\"{logo}\" tvg-chno=\"{channel}\" group-title=\"{groups}\", {callsign}'\n\n if config.multiplex:\n m3uText += f' ({city})'\n m3uText += f'\\n{url}\\n\\n'\n return m3uText\n\n @app.template_filter()\n def name_only(value: str) -> str:\n \"\"\"Get the name part of a callSign. '4.1 CBS' -> 'CBS'\n\n Args:\n value (str): String to parse\n\n Returns:\n str: Parsed string or original value\n \"\"\"\n m = re.match(r'\\d+\\.\\d+ (.+)', value)\n if m:\n return m.group(1)\n else:\n return value\n\n @app.route('/lineup.json', methods=['GET'])\n def lineup_json() -> Response:\n \"\"\"Returns a URL for each station that PMS can use to stream in JSON\n\n Returns:\n Response: JSON containing the GuideNumber, GuideName and URL for each channel\n \"\"\"\n watch = \"watch_direct\" if config.direct else \"watch\"\n\n return jsonify([{\n \"GuideNumber\": station.get('channel_remapped') or station['channel'],\n \"GuideName\": station['name'],\n \"URL\": f\"http://{host_and_port}/{watch}/{station['id']}\"\n } for station in locast_service.get_stations()])\n\n @app.route('/epg', methods=['GET'])\n def epg() -> Response:\n \"\"\"Returns the Electronic Programming Guide in json format\n\n Returns:\n Response: JSON containing the EPG for this DMA\n \"\"\"\n return jsonify(locast_service.get_stations())\n\n @app.route('/config', methods=['GET'])\n def output_config() -> Response:\n \"\"\"Returns the Electronic Programming Guide in json format\n\n Returns:\n Response: JSON containing the EPG for this DMA\n \"\"\"\n c = dict(config)\n c['password'] = \"*********\"\n return jsonify(c)\n\n @app.template_filter()\n def format_date(value: int) -> str:\n \"\"\"Convert an epoch timestamp to YYYYmmdd\n\n Args:\n value (str): Epoch timestamp string\n\n Returns:\n str: String as YYYYmmdd\n \"\"\"\n\n return (datetime(1970, 1, 1) + timedelta(milliseconds=value)).strftime('%Y%m%d')\n\n @app.template_filter()\n def format_date_iso(value: int) -> str:\n \"\"\"Convert an epoch timestamp to YYYY-mm-dd\n\n Args:\n value (str): Epoch timestamp string\n\n Returns:\n str: String as YYYY-mm-dd\n \"\"\"\n\n return (datetime(1970, 1, 1) + timedelta(milliseconds=value)).strftime('%Y-%m-%d')\n\n @app.template_filter()\n def format_time(value: int) -> str:\n \"\"\"Return an epoch timestamp to YYYYmmdddHHMMSS\n\n Args:\n value (str): Epoch timestamp string\n\n Returns:\n str: String as YYYYmmdddHHMMSS\n \"\"\"\n return (datetime(1970, 1, 1) + timedelta(milliseconds=value)).strftime('%Y%m%d%H%M%S')\n\n @app.template_filter()\n def format_time_local_iso(value: int, timezone: str) -> str:\n \"\"\"Return an epoch timestamp to YYYY-mm-dd HH:MM:SS in local timezone\n\n Args:\n value (int): Epoch timestamp string\n timezone (str): Time zone (e.g. America/New_York)\n\n Returns:\n str: String as YYYY-mm-dd HH:MM:SS\n \"\"\"\n datetime_in_utc = datetime(1970, 1, 1) + timedelta(milliseconds=value)\n datetime_in_local = pytz.timezone(timezone).fromutc(datetime_in_utc)\n return datetime_in_local.strftime('%Y-%m-%d %H:%M:%S')\n\n @app.template_filter()\n def aspect(value: str) -> str:\n \"\"\"Convert a locast 'videoProperties' string to an aspect ratio\n\n Args:\n value (str): locast 'videoProperties' string\n\n Returns:\n str: aspect ratio. Either '4:3' or '16:9'\n \"\"\"\n for r in [\"1080\", \"720\", \"HDTV\"]:\n if r in value:\n return \"16:9\"\n return \"4:3\"\n\n @app.template_filter()\n def quality(value: str) -> str:\n \"\"\"Convert a locast 'videoProperties' string to a quality\n\n Args:\n value (str): locast 'videoProperties' string\n\n Returns:\n str: quality. Either 'SD' or 'HDTV'\n \"\"\"\n if \"HDTV\" in value:\n return \"HDTV\"\n else:\n return \"SD\"\n\n @app.route('/epg.xml', methods=['GET'])\n def epg_xml() -> Response:\n \"\"\"Render the EPG as XMLTV. This will trigger a refetch of all stations from locast.\n\n Returns:\n Response: XMLTV\n \"\"\"\n xml = render_template('epg.xml',\n stations=locast_service.get_stations(),\n url_base=host_and_port)\n return Response(xml, mimetype='text/xml')\n\n @app.route('/lineup.xml', methods=['GET'])\n def lineup_xml() -> Response:\n \"\"\"Returns a URL for each station that PMS can use to stream in XML\n\n Returns:\n Response: XML containing the GuideNumber, GuideName and URL for each channel\n \"\"\"\n watch = \"watch_direct\" if config.direct else \"watch\"\n xml = render_template('lineup.xml',\n stations=locast_service.get_stations(),\n url_base=host_and_port,\n watch=watch).encode(\"utf-8\")\n return Response(xml, mimetype='text/xml')\n\n @app.route('/lineup.post', methods=['POST', 'GET'])\n def lineup_post():\n \"\"\"Initiate a rescan of stations for this Tuner\"\"\"\n scan = request.args.get('scan')\n if scan == 'start':\n station_scan = True\n stations = locast_service.get_stations()\n station_scan = False\n return ('', 204)\n\n return (f'{scan} is not a valid scan command', 400)\n\n @app.route('/watch/<channel_id>.m3u')\n def watch_m3u(channel_id: str) -> Response:\n \"\"\"Stream the channel based on it's ID. This route redirects to a locast m3u.\n\n Args:\n channel_id (str): Channel ID\n\n Returns:\n Response: Redirect to a locast m3u\n \"\"\"\n log.info(\n f\"Watching channel {channel_id} on {host_and_port} for {locast_service.city} using m3u\")\n return redirect(locast_service.get_station_stream_uri(channel_id), code=302)\n\n @app.route('/watch/<channel_id>')\n def watch_ffmpeg(channel_id: str) -> Response:\n \"\"\"Stream a channel based on it's ID. The route streams data as long as its connected.\n This method starts ffmpeg and reads n bytes at a time.\n\n Args:\n channel_id (str): Channel ID\n\n Returns:\n Response: HTTP response with content_type 'video/mpeg; codecs=\"avc1.4D401E\"'\n \"\"\"\n log.info(\n f\"Watching channel {channel_id} on {host_and_port} for {locast_service.city} using ffmpeg\")\n uri = locast_service.get_station_stream_uri(channel_id)\n\n ffmpeg = config.ffmpeg or 'ffmpeg'\n\n # Start ffmpeg as a subprocess to extract the mpeg stream and copy it to the incoming\n # connection. ffmpeg will take care of demuxing the mpegts stream and following m3u directions\n ffmpeg_cmd = [ffmpeg, \"-i\", uri, \"-codec\",\n \"copy\", \"-f\", \"mpegts\", \"pipe:1\"]\n\n ffmpeg_proc = subprocess.Popen(\n ffmpeg_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n\n # use a signal to indicate threads running or not\n signal = RunningSignal(True)\n\n # Start a thread that reads ffmpeg stderr and logs it to our logger.\n t = threading.Thread(target=_log_output, args=(\n config, ffmpeg_proc.stderr, signal))\n t.setDaemon(True)\n t.start()\n\n return Response(_stream_ffmpeg(config, ffmpeg_proc, signal), content_type='video/mpeg; codecs=\"avc1.4D401E')\n\n @app.route('/watch_direct/<channel_id>')\n def watch_direct(channel_id: str) -> Response:\n \"\"\"Stream a channel based on it's ID. The route streams data as long as its connected.\n This method starts ffmpeg and reads n bytes at a time.\n\n Args:\n channel_id (str): Channel ID\n\n Returns:\n Response: HTTP response with content_type 'video/mpeg; codecs=\"avc1.4D401E\"'\n \"\"\"\n log.info(\n f\"Watching channel {channel_id} on {host_and_port} for {locast_service.city} using direct\")\n\n stream_uri = locast_service.get_station_stream_uri(channel_id)\n\n return Response(_stream_direct(config, stream_uri, log), content_type='video/mpeg; codecs=\"avc1.4D401E', direct_passthrough=True)\n return app",
"def BASE_URL():\n BASE_URL = \"http://api.zippopotam.us/\"\n return BASE_URL",
"def load_device():",
"def fusion_api_get_server_hardware_remote_console_url(self, uri, api=None, headers=None):\n return self.sh.get(uri=uri, api=api, headers=headers, param='/remoteConsoleUrl')",
"def main():\n kernel_params = _parse_kernel_cmdline()\n api_url = kernel_params.get('ironic_api_url')\n deployment_id = kernel_params.get('deployment_id')\n inspect = kernel_params.get('inspect')\n # TODO(aarefiev): change ssh driver\n ironic_driver = kernel_params.get('callback-driver-name', 'ansible_ssh')\n if inspect and api_url is None:\n _process_error('Ironic ansible callback: Mandatory parameter '\n '\"ironic_api_url\" is missing.')\n if api_url is None or deployment_id is None:\n _process_error('Mandatory parameter (\"ironic_api_url\" or '\n '\"deployment_id\") is missing.')\n\n boot_mac = kernel_params.get('BOOTIF')\n if boot_mac is None:\n _process_error('Cannot define boot interface, \"BOOTIF\" parameter is '\n 'missing.')\n\n # There is a difference in syntax in BOOTIF variable between pxe and ipxe\n # boot with Ironic. For pxe boot the the leading `01-' denotes the device type\n # (Ethernet) and is not a part of the MAC address\n if boot_mac.startswith('01-'):\n boot_mac = boot_mac[3:].replace('-', ':')\n\n for n in range(_GET_ADDR_MAX_ITERATION):\n boot_ip = _get_interface_ip(boot_mac)\n if boot_ip is not None:\n break\n time.sleep(_RETRY_INTERVAL)\n else:\n _process_error('Cannot find IP address of boot interface.')\n\n data = {\"callback_url\": \"ssh://\" + boot_ip}\n\n if inspect:\n passthru = ('%(api-url)s/v1/drivers/%(driver)s/vendor_passthru'\n '/inspect' % {'api-url': api_url,\n 'driver': ironic_driver}\n else:\n passthru = '%(api-url)s/v1/nodes/%(deployment_id)s/vendor_passthru' \\\n '/heartbeat' % {'api-url': api_url,\n 'deployment_id': deployment_id}\n\n for attempt in range(_POST_CALLBACK_MAX_ITERATION):\n try:\n resp = requests.post(passthru, data=json.dumps(data),\n headers={'Content-Type': 'application/json',\n 'Accept': 'application/json'})\n except Exception as e:\n error = str(e)\n else:\n if resp.status_code != 202:\n error= ('Wrong status code %d returned from Ironic API' %\n resp.status_code)\n else:\n break\n\n if attempt == (_POST_CALLBACK_MAX_ITERATION - 1):\n _process_error(error)\n\n time.sleep(_RETRY_INTERVAL)",
"def full_url(resource):\r\n # if (url/resource == '127.0.0.1':)\r\n if resource == '/' or resource == ' ':\r\n url = \"{0}{1}\".format(ROOT_DIRECTORY, URL_TEST)\r\n # else (if url/resource == 'Specific resource')\r\n else:\r\n url = \"{0}{1}\".format(ROOT_DIRECTORY, str(resource).replace('/', '\\\\'))\r\n print(f'the client request = {url}')\r\n return url",
"def load_satellite_endpoint():\n pass",
"def boot_config():\n # quick check to grab a config file from /boot partition.\n # this function helps users who cannot SSH/access the Pi,\n # but can access the microSD card\n if os.path.exists(BOOT_CONFIG_PATH):\n print(\"Configuration loaded from /boot directory.\")\n with open(BOOT_CONFIG_PATH) as boot_file:\n with open(CONFIG_FILE_PATH, 'w+') as config_file:\n for line in boot_file:\n config_file.write(line)",
"def select_binary(base_path, version, name, config=None):\r\n # TODO(John Sirois): finish doc of the path structure expexcted under base_path\r\n config = config or Config.load()\r\n bootstrap_dir = config.getdefault('pants_bootstrapdir')\r\n baseurl = config.getdefault('pants_support_baseurl')\r\n timeout_secs = config.getdefault('pants_support_fetch_timeout_secs', type=int, default=30)\r\n\r\n sysname, _, release, _, machine = os.uname()\r\n os_id = _ID_BY_OS[sysname.lower()]\r\n if os_id:\r\n middle_path = _PATH_BY_ID[os_id(release, machine)]\r\n if middle_path:\r\n binary_path = os.path.join(base_path, *(middle_path + [version, name]))\r\n bootstrapped_binary_path = os.path.join(bootstrap_dir, binary_path)\r\n if not os.path.exists(bootstrapped_binary_path):\r\n url = posixpath.join(baseurl, binary_path)\r\n log.info('Fetching %s binary from: %s' % (name, url))\r\n downloadpath = bootstrapped_binary_path + '~'\r\n try:\r\n with closing(urllib_request.urlopen(url, timeout=timeout_secs)) as binary:\r\n with safe_open(downloadpath, 'wb') as bootstrapped_binary:\r\n bootstrapped_binary.write(binary.read())\r\n\r\n os.rename(downloadpath, bootstrapped_binary_path)\r\n chmod_plus_x(bootstrapped_binary_path)\r\n except (IOError, urllib_error.HTTPError, urllib_error.URLError) as e:\r\n raise TaskError('Failed to fetch binary from %s: %s' % (url, e))\r\n finally:\r\n safe_delete(downloadpath)\r\n log.debug('Selected %s binary bootstrapped to: %s' % (name, bootstrapped_binary_path))\r\n return bootstrapped_binary_path\r\n raise TaskError('No %s binary found for: %s' % (name, (sysname, release, machine)))",
"def __init__(self, name: str, hw_device: KnauerDAD):\n super().__init__(name, hw_device)\n self.lamp = name\n self.add_api_route(\"/lamp_status\", self.get_lamp, methods=[\"GET\"])\n self.add_api_route(\"/status\", self.get_status, methods=[\"GET\"])",
"def set_bootloader_mode(self, mode):\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 'B')",
"def software_load(self, filename: str) -> None:\n pass # Most boards can use serialboot.",
"def bootstrapFrontend(serverName, serverPort, sslPublicCertPath,\n sslPrivateCertPath):\n # Upload files\n put(sslPublicCertPath, 'fluidinfo.pem')\n put(sslPrivateCertPath, 'fluidinfo.key')\n\n # Install requirements.\n sudo('DEBIAN_FRONTEND=noninteractive apt-get install -y nginx haproxy')\n\n # Set up haproxy.\n sudo('/etc/init.d/haproxy stop')\n deployConfigFiles(\n {'server-name': serverName},\n\n ('haproxy/haproxy.cfg', '/etc/haproxy/haproxy.cfg'),\n ('haproxy/haproxy-default', '/etc/default/haproxy'))\n\n sudo('mkdir -p ../var/run/haproxy')\n sudo('chown haproxy:haproxy ../var/run/haproxy')\n sudo('/etc/init.d/haproxy start')\n sudo('curl --silent http://127.0.0.1:9000 > /dev/null && echo Works!')\n\n # Set up nginx.\n sudo('/etc/init.d/nginx stop')\n sudo('mkdir -p /etc/nginx/ssl')\n sudo('mv fluidinfo.pem /etc/nginx/ssl')\n sudo('chmod 600 /etc/nginx/ssl/fluidinfo.pem')\n sudo('mkdir -p /var/lib/fluidinfo/logs')\n\n sudo('mv fluidinfo.key /etc/nginx/ssl')\n sudo('chmod 600 /etc/nginx/ssl/fluidinfo.key')\n deployConfigFiles(\n {'server-name': serverName},\n\n ('nginx/fluidinfo-secure.conf.template',\n '/etc/nginx/sites-available/{server-name}'))\n\n sudo('ln -sf /etc/nginx/sites-available/{0} '\n '/etc/nginx/sites-enabled/{0}'.format(serverName))\n sudo('rm -f /etc/nginx/sites-enabled/default')\n sudo('/etc/init.d/nginx start')\n time.sleep(1)\n sudo('curl --silent http://127.0.0.1:%d > /dev/null && echo Works!'\n % serverPort)",
"def setKernelLoadAddress(self):\n\t\tself.kernelloadaddress = self.settings.getKeyValue('kernel.load.address')\n\t\treturn None",
"def get_global_url():\n return os.environ.get('URL', '')",
"def url_base():\n return \"https://dev-yourOrg.us.auth0.com\"",
"def setBootargs(self):\n\t\tif self.testType == 'auto' or self.testType == 'manual':\n\t\t\tself.bootargs = self.settings.getKeyValue('nfs.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<nfsroot>', self.nfsroot)\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\n\t\telse:\n\t\t\tself.bootargs = self.settings.getKeyValue('ramdisk.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\t\t\n\t\treturn None",
"def fetch_webroot(ip, fingerprint):\n\n global cookie \n _cookie = cookie\n\n url = \"http://{0}:{1}/railo-context/admin/\".format(ip, fingerprint.port)\n if fingerprint.version in [\"3.0\"]:\n url += \"server.cfm\"\n _cookie = checkAuth(ip, fingerprint.port, RINTERFACES.SRV)\n else:\n url += \"web.cfm\"\n\n response = utility.requests_get(url, cookies=_cookie)\n if response.status_code is 200:\n\n if fingerprint.version in [\"3.0\"]:\n data = findall(\"path1\\\" value=\\\"(.*?)\\\" \", \n response.content.translate(None, \"\\n\\t\\r\"))\n elif fingerprint.version in [\"3.3\"]:\n data = findall(\"Webroot</td><td class=\\\"tblContent\\\">(.*?)</td>\", \n response.content.translate(None, \"\\n\\t\\r\"))\n else:\n data = findall(\"Webroot</th><td>(.*?)</td>\",\n response.content.translate(None, \"\\n\\t\\r\"))\n\n if len(data) > 0:\n return data[0]",
"def go_to_setup_home(self):\n url = self.cumulusci.org.lightning_base_url\n self.selenium.go_to(url + \"/lightning/setup/SetupOneHome/home\")\n self.wait_until_loading_is_complete()"
] | [
"0.77584946",
"0.7341055",
"0.71151394",
"0.65061367",
"0.5847443",
"0.58197343",
"0.5644982",
"0.5516919",
"0.54772717",
"0.54441345",
"0.54027355",
"0.53707486",
"0.53131366",
"0.52545315",
"0.5227558",
"0.5223011",
"0.5216488",
"0.5198572",
"0.51643014",
"0.5142962",
"0.51252884",
"0.5124703",
"0.5098322",
"0.50982964",
"0.5086413",
"0.5071438",
"0.50681615",
"0.50618476",
"0.50538963",
"0.50493443",
"0.50224304",
"0.50124973",
"0.5001121",
"0.4995917",
"0.4995917",
"0.4988058",
"0.4976237",
"0.495969",
"0.49582955",
"0.49567014",
"0.4952744",
"0.4951569",
"0.49509773",
"0.4950893",
"0.49459016",
"0.4944423",
"0.4933623",
"0.49261957",
"0.49133104",
"0.49132365",
"0.49123988",
"0.49051994",
"0.4893701",
"0.48925883",
"0.48882955",
"0.4876209",
"0.487362",
"0.4873146",
"0.4870386",
"0.48679975",
"0.4867855",
"0.4858817",
"0.48489895",
"0.48432362",
"0.48393014",
"0.48373055",
"0.48357671",
"0.48256695",
"0.48222944",
"0.4811808",
"0.4810579",
"0.48061067",
"0.4803799",
"0.4800276",
"0.47797734",
"0.47779247",
"0.47747326",
"0.47746262",
"0.47743627",
"0.47617438",
"0.4758685",
"0.47584927",
"0.4756941",
"0.4756141",
"0.4756",
"0.47520438",
"0.47509298",
"0.4750554",
"0.47493345",
"0.47436675",
"0.47408354",
"0.47345737",
"0.47327635",
"0.4720299",
"0.4709508",
"0.4706369",
"0.47029826",
"0.47029683",
"0.46989197",
"0.4698903"
] | 0.81860113 | 0 |
Set url to the UefiShellStartupUrl to the system in uefi boot mode. | def set_http_boot_url(self, url):
if(self._is_boot_mode_uefi() is True):
self._change_bios_setting({'UefiShellStartupUrl': url})
else:
msg = 'set_http_boot_url is not supported in the BIOS boot mode'
raise exception.IloCommandNotSupportedInBiosError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_http_boot_uri(self, url):\n try:\n sushy_system = self._get_sushy_system()\n sushy_system.http_boot_uri.set_http_boot_uri(url)\n except sushy.exceptions.SushyError as e:\n msg = (self._('Unable to set HTTP Boot URI. Error '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def get_http_boot_url(self):\n if(self._is_boot_mode_uefi() is True):\n return self._get_bios_setting('UefiShellStartupUrl')\n else:\n msg = 'get_http_boot_url is not supported in the BIOS boot mode'\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def get_http_boot_uri(self):\n try:\n sushy_system = self._get_sushy_system()\n http_boot_uri = sushy_system.http_boot_uri.httpbooturi\n except sushy.exceptions.SushyError as e:\n msg = (self._('Not able to find HTTP Boot URI. Error: '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n return http_boot_uri",
"def go_to_setup_home(self):\n url = self.cumulusci.org.lightning_base_url\n self.selenium.go_to(url + \"/lightning/setup/SetupOneHome/home\")\n self.wait_until_loading_is_complete()",
"def setUbootFlashAddress(self):\n\t\tself.ubootflashaddress = self.settings.getKeyValue('u-boot.flash.address')\n\t\treturn None",
"def setup(self, url, browser_config):\n\n # navigate to the front page\n browser.open_url(url)",
"def pibooth_startup(cfg, app):",
"def _set_url(self): \n self.url = self.geturl()",
"def set_url(self, url):\n super(Cabling, self).set_url(url)",
"def SetBootloaderEnv(script, name, val):\n script.AppendExtra('set_bootloader_env(\"%s\", \"%s\");' % (name, val))",
"def flashUboot(self):\n\t\tif self.settings.getKeyValue('flash.uboot?') == 'y':\n\t\t\tloadAddress = self.settings.getKeyValue('u-boot.flash.address')\n\t\t\tcmd = self.settings.getKeyValue('u-boot.load.command')\n\t\t\tcmd = cmd.replace('<u-boot>', 'u-boot.bin.12x.2430')\n\t\t\tself.socket.send(cmd, 5)\n\t\t\t#self.socket.send('protect off 1:0-1\\r', 2)\n\t\t\t#self.socket.send('erase 1:0-1\\r', 2)\n\t\t\t#self.socket.send('cp.b 80000000 %s 2ffff\\r' % loadAddress)\n\t\t\treturn None\n\t\t\t#cmd = cmd.replace('<u-bootloadadress>', self.u-bootloadaddress)",
"def goto_environment_url(self):\n try:\n self._browser.get(self._environment.url)\n except Exception as e:\n self.logger.error(\"Error going to environment '\" + self._environment.url + \"' : \" + str(e))\n raise",
"def activator_ui():\n local('activator ui -Dhttp.address=10.0.2.15')",
"def set_url(self, url):\n self.url = url",
"async def snekurl(self, ctx: commands.Context, url=None):\r\n\r\n if not url:\r\n current_url = await self.conf.snekbox_url()\r\n await ctx.send_help()\r\n return await ctx.send(\"`Current snekbox URL: {}`\".format(current_url))\r\n\r\n async with ctx.typing():\r\n if await self._test_snekurl(url):\r\n await self.conf.snekbox_url.set(url)\r\n return await ctx.send(\":white_check_mark: It's working! New url set.\")\r\n\r\n await ctx.send(\":x: URL doesn't seem to work.\")",
"def set_url(self, url):\n self.data['url'] = url",
"def setBootargs(self):\n\t\tif self.testType == 'auto' or self.testType == 'manual':\n\t\t\tself.bootargs = self.settings.getKeyValue('nfs.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<nfsroot>', self.nfsroot)\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\n\t\telse:\n\t\t\tself.bootargs = self.settings.getKeyValue('ramdisk.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\t\t\n\t\treturn None",
"def set_url(self, url):\n if url is not None:\n self.url = url",
"def set_url(self, url):\n self.url = url",
"async def set_event_url(self, event_url: Optional[str]) -> None:\n if not event_url:\n event_url = self._server.url\n url = quote(str(event_url), safe=\"\")\n _LOGGER.info(\"Setting event update URL to %s\", url)\n await self._api_request(f\"postURL/{url}\")",
"def set_lxd_init_auto(self):\n delay = 2\n for attempt in range(5):\n out = utils.run_script(\"conjure-up.lxd init --auto\")\n if out.returncode == 0:\n return\n time.sleep(delay)\n raise Exception(\n \"Problem running lxd init: {}\".format(out.stderr.decode()))",
"def load_path_url():\n web.ctx.path_url = web.ctx.home + web.ctx.path",
"def web_shell(self, web_shell):\n\n self._web_shell = web_shell",
"def setUrl( self, url ):\n self._urlEdit.setText(str(url))",
"def test_home(self):\n self.selenium.get('{}/'.format(self.live_server_url))",
"def step_impl(context, url):\n context.base_url = url",
"async def test_setup(hass: HomeAssistant, ufp: MockUFPFixture) -> None:\n\n await hass.config_entries.async_setup(ufp.entry.entry_id)\n await hass.async_block_till_done()\n\n assert ufp.entry.state == ConfigEntryState.LOADED\n assert ufp.api.update.called\n assert ufp.entry.unique_id == ufp.api.bootstrap.nvr.mac",
"def setKernelLoadAddress(self):\n\t\tself.kernelloadaddress = self.settings.getKeyValue('kernel.load.address')\n\t\treturn None",
"def do_startup(self):\n logger.debug('::startup')\n Gio.Application.do_startup(self)\n action = Gio.SimpleAction.new('quit', None)\n action.connect('activate', self.quit)\n self.add_action(action)\n # Initialize the current profiles, but do not auto load\n try:\n self.load_profile(self._settings.get_string('current-profile'), False, False)\n if self._settings.get_string('lockscreen-profile') != \"\":\n self.load_profile(self._settings.get_string('lockscreen-profile'), True, False)\n except (WallpaperNotFoundError, NotFoundError) as e:\n # If we failed to load the profile, its bad\n logger.error('failed to load profiles on startup: %s', e.message)\n # Connect the settings signals\n self._settings_handlers.append(self._settings.connect(\n 'changed::rotation',\n lambda s, k: self._toggle_timer(self._settings.get_string('rotation'))\n ))\n self._settings_handlers.append(self._settings.connect(\n 'changed::interval',\n lambda s, k: self._toggle_timer(self._settings.get_string('rotation'))\n ))\n self._settings_handlers.append(self._settings.connect('changed::current-profile', self._callback_desktop))\n self._settings_handlers.append(self._settings.connect('changed::lockscreen-profile', self._callback_lockscreen))\n self._settings_handlers.append(self._settings.connect('changed::update-lockscreen', self._callback_lockscreen))",
"def set_normal_environment(self):\n if 'RUSTUP_DIST_SERVER' in os.environ:\n self._download_url = os.environ['RUSTUP_DIST_SERVER']\n else:\n self._download_url = 'https://static.rust-lang.org'",
"def go_home(self):\r\n if self.home_url is not None:\r\n self.set_url(self.home_url)",
"def on_startup(self) -> None:\n ...",
"def set_base_url(url):\n global BASE_URL\n if url is not None:\n BASE_URL = '/'.join((url.split(\"/\")[:-1])) + \"/\"",
"def url_set(self, url):\n self.request('/v1.1/url', 'POST', body={'url': url})",
"def change_endpoint(self, url: str):\n if not is_empty(url):\n self._url = url",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version|json\", \"show hostname\"], None,\n 'mixed')",
"async def confirm_remote_startup(self):\n pass",
"def set_dev_environment(self):\n if 'RUSTUP_DEV_DIST_SERVER' in os.environ:\n self._download_url = os.environ['RUSTUP_DEV_DIST_SERVER']\n else:\n self._download_url = 'https://dev-static.rust-lang.org'",
"def set_ospl_home_bin(self, ospl_home_bin):\r\n self.ospl_home_bin = ospl_home_bin\r\n self.reset_ospl_command()",
"def initialize_app_for_new_client(q):\n\n if not q.user.initialized:\n initialize_app_for_new_user(q)\n\n # Default settings for new browsers\n q.client.tab = 'home'\n q.client.count = 0\n\n # Adding ui elements\n q.page['header'] = ui.header_card(\n box='1 1 11 1',\n title='Exploring Routing',\n subtitle='This application uses tabs and buttons.',\n )\n\n q.client.initialized = True",
"def navigate(self, url, reload=False):\n if self.driver.current_url == url and not reload:\n return\n self.driver.get(url)\n if self.driver.current_url == url:\n return\n if self.driver.current_url != url:\n if self.driver.current_url.endswith(\"/installer/welcome/\"):\n self.setup_new_install()\n else:\n if url.startswith(self.ss_url):\n self.login_ss()\n else:\n self.login()\n self.driver.get(url)",
"def setup_with_endpoint(self, mac='00:11:22:33:33:33'):\n args = self.get_args()\n self.write_config_file(self.create_config_file(), args)\n\n execute_tool(args, test_mode=True)\n\n ip = '3.4.3.4'\n self.assertFalse(self.verify_remote_site_has_entry(mac, ip, 'intersite-testsuite', 'l3out', 'intersite-testsuite-app-epg'))\n\n time.sleep(2)\n self.add_endpoint(mac, ip, 'intersite-testsuite', 'app', 'epg')\n return mac, ip",
"def elActivateGraphicalLogin(self):\n # see http://docs.redhat.com/docs/en-US/Red_Hat_Enterprise_Linux/6/html/Installation_Guide/s1-kickstart2-options.html\n commandSection = self.sectionByName(\"command\")\n commandSection.string = commandSection.string + \"\"\"\n#\n# XWindows configuration information.\nxconfig --startxonboot --defaultdesktop=GNOME\n\"\"\"\n return self",
"def setBaseURL(self,value):\n self.PDFreactorConfiguration.in1[\"baseURL\"] = value",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version\", \"show run hostname\"], None)",
"def boot(self, boot):\n\n self._boot = boot",
"def service_url(self):\n return \"http://127.0.0.1:%d/wd/hub\"%self.port",
"def setup_with_endpoint(self):\n args = self.get_args()\n config = self.create_config_file()\n self.write_config_file(config, args)\n execute_tool(args, test_mode=True)\n\n mac = '00:11:22:33:33:33'\n ip = '3.4.3.4'\n self.assertFalse(self.verify_remote_site_has_entry(mac, ip, 'intersite-testsuite',\n 'l3out', 'intersite-testsuite-app-epg1'))\n\n time.sleep(2)\n self.add_endpoint(mac, ip, 'intersite-testsuite', 'app', 'epg1')\n return mac, ip",
"def set_bios_bootmode_uefi(ip, login_account, login_password, system_id):\n result = {}\n login_host = \"https://\" + ip\n try:\n # Connect using the BMC address, account name, and password\n # Create a REDFISH object\n REDFISH_OBJ = redfish.redfish_client(base_url=login_host, username=login_account, timeout=utils.g_timeout,\n password=login_password, default_prefix='/redfish/v1', cafile=utils.g_CAFILE)\n # Login into the server and create a session\n REDFISH_OBJ.login(auth=utils.g_AUTH)\n except:\n traceback.print_exc()\n result = {'ret': False, 'msg': \"Please check the username, password, IP is correct\"}\n return result\n\n # GET the ComputerSystem resource\n system = utils.get_system_url(\"/redfish/v1\", system_id, REDFISH_OBJ)\n if not system:\n result = {'ret': False, 'msg': \"This system id is not exist or system member is None\"}\n REDFISH_OBJ.logout()\n return result\n for i in range(len(system)):\n system_url = system[i]\n response_system_url = REDFISH_OBJ.get(system_url, None)\n if response_system_url.status != 200:\n error_message = utils.get_extended_error(response_system_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (system_url, response_system_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else:\n # Get the bios resource\n bios_url = response_system_url.dict['Bios']['@odata.id']\n response_bios_url = REDFISH_OBJ.get(bios_url, None)\n if response_bios_url.status != 200:\n error_message = utils.get_extended_error(response_bios_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (bios_url, response_bios_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else: # Get bios success\n # Seek boot mode from bios attributes\n attribute_bootmode = None\n attributes = response_bios_url.dict['Attributes']\n for attribute in attributes:\n if attribute == \"BootMode\" or attribute == \"SystemBootMode\":\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"SystemBootMode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"Boot\" in attribute and \"Mode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n result = {'ret': False, 'msg': \"Can not found BootMode attribute in response of url %s\" %(bios_url)}\n REDFISH_OBJ.logout()\n return result\n\n # Get boot mode setting guide from bios registry\n WarningText = None\n ValueName = None\n bios_registry_url = \"/redfish/v1/Registries/\" + response_bios_url.dict['AttributeRegistry']\n response_bios_registry_url = REDFISH_OBJ.get(bios_registry_url, None)\n if response_bios_registry_url.status == 200:\n locations = response_bios_registry_url.dict['Location']\n bios_regjson_url = None\n for location in locations:\n if 'en' in location['Language']:\n bios_regjson_url = location['Uri']\n break\n if bios_regjson_url:\n response_bios_regjson_url = REDFISH_OBJ.get(bios_regjson_url, None)\n if response_bios_regjson_url.status == 200:\n regattributes = response_bios_regjson_url.dict['RegistryEntries']['Attributes']\n for regattribute in regattributes:\n if regattribute['AttributeName'] == attribute_bootmode:\n if 'WarningText' in regattribute:\n WarningText = regattribute['WarningText']\n for value in regattribute['Value']:\n if 'legacy' in value['ValueName'].lower():\n continue\n if 'uefi' in value['ValueName'].lower():\n ValueName = value['ValueName']\n break\n ValueName = value['ValueName']\n break\n \n # Perform patch to set\n if ValueName == None:\n ValueName = \"UEFIMode\"\n pending_url = response_bios_url.dict['@Redfish.Settings']['SettingsObject']['@odata.id']\n parameter = {attribute_bootmode: ValueName}\n attribute = {\"Attributes\": parameter}\n headers = {\"If-Match\": '*'}\n response_pending_url = REDFISH_OBJ.patch(pending_url, body=attribute, headers=headers)\n if response_pending_url.status in [200,204]:\n if WarningText:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful. WarningText: %s'% (WarningText) }\n else:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful'}\n elif response_pending_url.status == 405:\n result = {'ret': False, 'msg': \"Resource not supported\"}\n else:\n error_message = utils.get_extended_error(response_pending_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (\n pending_url, response_pending_url.status, error_message)}\n\n # Logout of the current session\n try:\n REDFISH_OBJ.logout()\n except:\n pass\n return result",
"def menu_spe_homepage(self, event=None):\n self.link('http://pythonide.stani.be')",
"async def _setup_bridge(self, websession: ClientSession) -> None:\n self._bridge = aiohue.Bridge(\n self.config.hue.ip,\n websession,\n username=self.config.hue.username,\n )\n LOGGER.info(f\"Connecting to Hue Bridge at {self.config.hue.ip}\")\n await self._bridge.initialize()",
"def setUp(self):\n self.brow = webdriver.Firefox()\n staging_server = os.environ.get('STAGING_SERVER')\n if staging_server:\n self.live_server_url = \"http://\" + staging_server",
"def setup(bus):\n\n import webbrowser\n\n bus.register_service(DOMAIN_BROWSER, SERVICE_BROWSE_URL,\n lambda service: webbrowser.open(service.data['url']))\n\n return True",
"def _startup():\n from octoprint_dashboard.model import User, Config\n if Config.query.scalar() is None:\n print(\"No config, add config via command 'python -m flask config'\")\n shutdown_server()\n if User.query.filter_by(superadmin=True).count() == 0:\n print(\"No superadmin, add superadmin via command 'python -m flask add_superadmin <username>'\")\n shutdown_server()\n\n scheduler.start() # starts background task scheduler\n zeroconf_browser.start() # starts MDNS service discovery",
"def startup(self) -> None:",
"def url(self, url: str):\n self._url = url",
"async def async_setup(self):\n self._unsub_stop = self.hass.bus.async_listen(\n EVENT_HOMEASSISTANT_STOP, self._handle_ha_stop\n )\n dev_reg = await device_registry.async_get_registry(self.hass)\n model_type = self.device.settings[\"device\"][\"type\"]\n dev_reg.async_get_or_create(\n config_entry_id=self.entry.entry_id,\n name=self.name,\n connections={(device_registry.CONNECTION_NETWORK_MAC, self.mac)},\n # This is duplicate but otherwise via_device can't work\n identifiers={(DOMAIN, self.mac)},\n manufacturer=\"Shelly\",\n model=aioshelly.MODEL_NAMES.get(model_type, model_type),\n sw_version=self.device.settings[\"fw\"],\n )",
"def go_to_setup_object_manager(self):\n url = self.cumulusci.org.lightning_base_url\n self.selenium.go_to(url + \"/lightning/setup/ObjectManager/home\")\n self.wait_until_loading_is_complete()",
"def run():\r\n autostartup()",
"def start_test(url):\n \n Debug.user(' ################# start Test ######################')\n App.open('firefox --private-window '+url)\n wait(\"1501595436606.png\", 10)\n\n click(\"1501595453560.png\")\n\n if exists():\n \n click()\n else:\n click()\n \n\n\n if exists(\"1499781534684.png\"):\n click(\"1499781552298.png\")\n type('root')\n click(\"1499781563870.png\")\n else:\n pass\n click(\"1499781591282.png\")",
"def set_callback_url(self, callback_url):\n self.callback_url = callback_url",
"def boot(self):\n\n pass",
"def set_bootloader_mode(self, mode):\n self.check_validity()\n\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 9, 'B')",
"def initial_load(pass_message,fail_message, driver, find_element):\r\n initial_load_of_application(pass_message=pass_message,fail_message=fail_message, driver=driver, find_element=find_element)",
"def user_url(self, user_url):\n\n self._user_url = user_url",
"def init_login(app):\n lm.init_app(app)",
"def set_endpoint(endpoint_url):\n log.info(\"Called set_endpoint with args %s\", locals())\n if 'cb/api' in endpoint_url:\n log.debug(\"Setting Cloudbreak endpoint to %s\", endpoint_url)\n this_config = config.cb_config\n elif ':7189' in endpoint_url:\n log.debug(\"Setting Altus Director endpoint to %s\", endpoint_url)\n this_config = config.cd_config\n else:\n raise ValueError(\"Unrecognised API Endpoint\")\n try:\n if this_config.api_client:\n log.debug(\"Found Active API Client, updating...\")\n this_config.api_client.host = endpoint_url\n except AttributeError:\n log.debug(\"No Active API Client found to update\")\n this_config.host = endpoint_url\n if this_config.host == endpoint_url:\n return True\n return False",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show system uptime|display json\",\n \"show version\"], None, 'mixed')",
"def localhost():\n env.run = local\n env.cd = lcd\n env.deployment = 'local'",
"def default_setup_script(self, **kwargs):\n all_args = kwargs.copy()\n all_args['azure_url'] = names.azure_url(all_args['dns_name'])\n return '\\n'.join([\n \"sudo apt-get update\",\n \"sudo apt-get install docker.io -y\",\n \"git clone {git_repo} repo\",\n \"cd repo\",\n \"sudo docker build -t web-app .\",\n (\"sudo docker run -d -e \"\n \"AZURE_URL={azure_url} -p {port}:{port} web-app\")]\n ).format(**all_args)",
"def configure(self, manager):\n success, display = manager.c.eval(\"self.core.display_name\")\n assert success\n self.env[\"WAYLAND_DISPLAY\"] = display",
"def switchToAppInstaller(dev):\n print('Switching to app install mode')\n SonyExtCmdCamera(dev).switchToAppInstaller()",
"def openurl(device, url):\n command = 'openurl \"%s\" \"%s\"' % (device.udid, url)\n _run_command(command)",
"def startup(self):\n pass",
"async def emojiapiurl(self, ctx, url: str):\n await self.config.url.set(url)\n await ctx.tick()",
"def create_url(self):\n self.base_url = self.base + self.strs[jpn.path_latest]",
"def onAboutLeoUrl(self,event=None):\n \n try:\n import webbrowser\n webbrowser.open(self.url)\n except:\n g.es(\"not found: \" + self.url)",
"def main(self):\n base_url = self.env.get(\"base_url\", BASE_URL)\n self.env[\"url\"] = self.get_opera_url(base_url)\n self.output(\"Found URL %s\" % self.env[\"url\"])",
"def start(self):\n self.get(self.url)",
"def set_auto_login(self, value):\n raise NotImplementedError('set_auto_login')",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"cat /proc/uptime\", \"hostnamectl\",\n \"cat /etc/os-release\"], None, 'text')",
"def install_from_url(self) -> None:\n pass",
"def launch(uri):\n comp=urlparse.urlparse(uri)\n handler=get(comp[0])\n if not handler:\n return\n if '%s' in handler:\n cmd=handler % uri\n else:\n cmd=handler+' '+uri\n #print cmd\n\n return os.spawnlp(os.P_NOWAIT, 'sh', 'sh', '-c', cmd)",
"def set_one_time_boot(self, device, mac=None):\n self._update_persistent_boot([device], persistent=False, mac=mac)",
"def set_external(args):\n if args.host:\n os.environ[\"TEST_HOSTNAME\"] = args.host\n # If user gave a hostname, determine oob name from it and set it.\n if \".\" in args.host:\n index = args.host.index(\".\")\n os.environ[\"TEST_BMC_HOSTNAME\"] = (\n args.host[:index] + \"-oob\" + args.host[index:]\n )\n\n if args.bmc_host:\n os.environ[\"TEST_BMC_HOSTNAME\"] = args.bmc_host",
"def __init__(self, url):\n self.url = url\n self.admin_url = os.path.join(url, \"__admin\")\n self.admin_mapping_url = os.path.join(self.admin_url, \"mappings\")\n self.mapping_reset_url = os.path.join(self.admin_mapping_url, 'reset')\n self.requests_url = \"%s/requests\" % self.admin_url",
"def init_job_page(self, base_url):\n self.driver.get(base_url)\n self.driver.implicitly_wait(100)",
"def setup_url_for_address(host, port):\n\n # Force hostnames into IP addresses\n try:\n # Attempt to register {host} as an IP address; if this fails ({host} is\n # not an IP address), this will throw a ValueError.\n ip_address(host)\n except ValueError:\n # The provided {host} should be treated as a hostname.\n host = hostname_lookup(host)\n\n # Automatically determine the port if not provided.\n if not port:\n port = probe_wemo(host)\n\n if not port:\n return None\n\n return \"http://%s:%s/setup.xml\" % (host, port)",
"def bootstrap_config(self):\n self.logger.info(\"applying bootstrap configuration\")\n self.wait_write(\"\\r\", None)\n # Wait for the prompt\n time.sleep(1)\n self.wait_write(\"system-view\", \"<HPE>\")\n self.wait_write(\"ssh server enable\", \"[HPE]\")\n self.wait_write(\"user-interface class vty\", \"[HPE]\")\n self.wait_write(\"authentication-mode scheme\", \"[HPE-line-class-vty]\")\n self.wait_write(\"protocol inbound ssh\", \"[HPE-line-class-vty]\")\n self.wait_write(\"quit\", \"[HPE-line-class-vty]\")\n self.wait_write(\"local-user %s\" % (self.username), \"[HPE]\")\n self.wait_write(\"password simple %s\" % (self.password), \"[HPE-luser-manage-%s]\" % (self.username))\n self.wait_write(\"service-type ssh\", \"[HPE-luser-manage-%s]\" % (self.username))\n self.wait_write(\"authorization-attribute user-role network-admin\", \"[HPE-luser-manage-%s]\" % (self.username))\n self.wait_write(\"quit\", \"[HPE-luser-manage-%s]\" % (self.username))\n self.wait_write(\"interface GigabitEthernet%s/0\" % (self.num_nics + 1), \"[HPE]\")\n self.wait_write(\"ip address 10.0.0.15 255.255.255.0\", \"[HPE-GigabitEthernet%s/0]\" % (self.num_nics + 1))\n self.wait_write(\"quit\", \"[HPE-GigabitEthernet%s/0]\" % (self.num_nics + 1))\n self.wait_write(\"quit\", \"[HPE]\")\n self.wait_write(\"quit\", \"<HPE>\")\n self.logger.info(\"completed bootstrap configuration\")",
"def desktop_session(self):\n self.user['desktop_environment'] = {'name': self.user['desktop']}\n if self.user['desktop'] is not None:\n\n # Append required packages\n if self.user['desktop'] in [10, 11, 12]:\n self.user['desktop_environment']['requirements'] = \\\n '{xorg} {xinit} {numlock}'.format(\n xorg=self.packages['xorg'],\n xinit=self.packages['xinit'],\n numlock=self.packages['numlock'])\n else:\n self.user['desktop_environment']['requirements'] = \\\n '{xorg} {numlock}'.format(xorg=self.packages['xorg'],\n numlock=self.packages['numlock'])\n\n # Set desktop environment name\n self.user['desktop_environment']['name'] = \\\n self.packages['desktop']['name'][self.user['desktop']]\n\n # Append desktop environment packages\n self.user['desktop_environment']['packages'] = \\\n self.packages['desktop']['packages'][self.user['desktop']]\n\n # Append desktop environment extra packages\n if self.user['desktop_extra'] is True:\n self.user['desktop_environment']['packages'] += ' {x}'.format(\n x=self.packages['desktop']['extras'][self.user['desktop']])\n\n # Set start command\n self.user['desktop_environment']['startcmd'] = \\\n self.packages['desktop']['startcmd'][self.user['desktop']]",
"def launch_app(self):\n os.system (\"adb shell am start -n com.tencent.mm/com.tencent.mm.ui.LauncherUI/\")\n time.sleep (5)",
"def main():\n\n if os.path.isfile(os.path.join(os.getcwd(), 'fose_loader.exe')):\n util.replace_command('FalloutLauncher.exe', 'fose_loader.exe')",
"def setUp(self):\n self.browser = webdriver.Firefox()\n self.url = 'http://127.0.0.1:8000/'",
"def setUp(self):\n pyauto.PyUITest.setUp(self)\n\n webapp = self.InstallExtension(self.GetWebappPath())\n self.host.LaunchApp(webapp)\n self.account = self.GetPrivateInfo()['test_chromoting_account']",
"def __init__(self, fore_url=None, back_url=None):\n self.foreground_url = fore_url\n self.background_url = back_url\n self.output_image = None\n self.output_image_name = None",
"def set_base_url(self, base_url):\n\n while base_url[-1] == '/':\n base_url = base_url[:-1]\n self.url = base_url\n self._update_children_url()",
"def _startup_system(self):\n\n self._config_path.set(filedialog.asksaveasfilename())\n self._system = System(self._config_path.get())\n\n self._start_frame.pack_forget()\n self._main_frame.pack()",
"def set_short_url_base(url):",
"def bootup(debug_port, lines):\n lines.skip_until(\"Booting...\")\n lines.skip_until(\"Loading blocks...\")\n lines.skip_until(\"Starting user space\")\n authenticate(debug_port, lines)\n lines.expect_next(\"Enter command\")",
"def url(self, url: str):\n\n self._url = url"
] | [
"0.72488326",
"0.7194045",
"0.5525388",
"0.55191153",
"0.5504752",
"0.5186038",
"0.51711583",
"0.5048918",
"0.50152254",
"0.49929443",
"0.49760246",
"0.49735522",
"0.49065635",
"0.48945484",
"0.48868546",
"0.48841438",
"0.483911",
"0.48351452",
"0.4814122",
"0.48077625",
"0.4805337",
"0.47425383",
"0.47400776",
"0.47386676",
"0.4734691",
"0.4729684",
"0.47245958",
"0.47183496",
"0.4716726",
"0.4693407",
"0.46626538",
"0.4661256",
"0.4653746",
"0.4627383",
"0.46230942",
"0.46151075",
"0.4603417",
"0.4601851",
"0.45793933",
"0.4578715",
"0.45675215",
"0.45597154",
"0.45508093",
"0.45430595",
"0.45146048",
"0.4510129",
"0.45094585",
"0.45083398",
"0.45025083",
"0.44985637",
"0.44906074",
"0.44824415",
"0.44795638",
"0.44740757",
"0.44646597",
"0.44640592",
"0.44629264",
"0.44616488",
"0.44572276",
"0.4456709",
"0.4448739",
"0.44478887",
"0.44315627",
"0.44243905",
"0.44221845",
"0.4421216",
"0.44183585",
"0.44163248",
"0.44126666",
"0.44098684",
"0.44081837",
"0.4407662",
"0.44013876",
"0.43999144",
"0.43991897",
"0.4396867",
"0.43919736",
"0.43819752",
"0.43795916",
"0.43791977",
"0.4378196",
"0.43771142",
"0.43767428",
"0.4376516",
"0.43764067",
"0.43759382",
"0.43706334",
"0.43653876",
"0.43639576",
"0.43637395",
"0.43636203",
"0.43605006",
"0.4353913",
"0.4352699",
"0.43510434",
"0.43442863",
"0.4340916",
"0.43408284",
"0.4340167",
"0.43381876"
] | 0.8233862 | 0 |
Set iscsi details of the system in uefi boot mode. The iSCSI initiator is identified by the MAC provided. The initiator system is set with the target details like IQN, LUN, IP, Port etc. | def set_iscsi_boot_info(self, mac, target_name, lun, ip_address,
port='3260', auth_method=None, username=None,
password=None):
if(self._is_boot_mode_uefi() is True):
iscsi_info = {}
iscsi_info['iSCSITargetName'] = target_name
iscsi_info['iSCSIBootLUN'] = lun
iscsi_info['iSCSITargetIpAddress'] = ip_address
iscsi_info['iSCSITargetTcpPort'] = int(port)
iscsi_info['iSCSITargetInfoViaDHCP'] = False
iscsi_info['iSCSIBootEnable'] = 'Enabled'
if (auth_method == 'CHAP'):
iscsi_info['iSCSIAuthenticationMethod'] = 'Chap'
iscsi_info['iSCSIChapUsername'] = username
iscsi_info['iSCSIChapSecret'] = password
self._change_iscsi_settings(mac.upper(), iscsi_info)
else:
msg = 'iscsi boot is not supported in the BIOS boot mode'
raise exception.IloCommandNotSupportedInBiosError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def unset_iscsi_boot_info(self, mac):\n if(self._is_boot_mode_uefi() is True):\n iscsi_info = {'iSCSIBootEnable': 'Disabled'}\n self._change_iscsi_settings(mac.upper(), iscsi_info)\n else:\n msg = 'iscsi boot is not supported in the BIOS boot mode'\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def set_bios_bootmode_uefi(ip, login_account, login_password, system_id):\n result = {}\n login_host = \"https://\" + ip\n try:\n # Connect using the BMC address, account name, and password\n # Create a REDFISH object\n REDFISH_OBJ = redfish.redfish_client(base_url=login_host, username=login_account, timeout=utils.g_timeout,\n password=login_password, default_prefix='/redfish/v1', cafile=utils.g_CAFILE)\n # Login into the server and create a session\n REDFISH_OBJ.login(auth=utils.g_AUTH)\n except:\n traceback.print_exc()\n result = {'ret': False, 'msg': \"Please check the username, password, IP is correct\"}\n return result\n\n # GET the ComputerSystem resource\n system = utils.get_system_url(\"/redfish/v1\", system_id, REDFISH_OBJ)\n if not system:\n result = {'ret': False, 'msg': \"This system id is not exist or system member is None\"}\n REDFISH_OBJ.logout()\n return result\n for i in range(len(system)):\n system_url = system[i]\n response_system_url = REDFISH_OBJ.get(system_url, None)\n if response_system_url.status != 200:\n error_message = utils.get_extended_error(response_system_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (system_url, response_system_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else:\n # Get the bios resource\n bios_url = response_system_url.dict['Bios']['@odata.id']\n response_bios_url = REDFISH_OBJ.get(bios_url, None)\n if response_bios_url.status != 200:\n error_message = utils.get_extended_error(response_bios_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (bios_url, response_bios_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else: # Get bios success\n # Seek boot mode from bios attributes\n attribute_bootmode = None\n attributes = response_bios_url.dict['Attributes']\n for attribute in attributes:\n if attribute == \"BootMode\" or attribute == \"SystemBootMode\":\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"SystemBootMode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"Boot\" in attribute and \"Mode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n result = {'ret': False, 'msg': \"Can not found BootMode attribute in response of url %s\" %(bios_url)}\n REDFISH_OBJ.logout()\n return result\n\n # Get boot mode setting guide from bios registry\n WarningText = None\n ValueName = None\n bios_registry_url = \"/redfish/v1/Registries/\" + response_bios_url.dict['AttributeRegistry']\n response_bios_registry_url = REDFISH_OBJ.get(bios_registry_url, None)\n if response_bios_registry_url.status == 200:\n locations = response_bios_registry_url.dict['Location']\n bios_regjson_url = None\n for location in locations:\n if 'en' in location['Language']:\n bios_regjson_url = location['Uri']\n break\n if bios_regjson_url:\n response_bios_regjson_url = REDFISH_OBJ.get(bios_regjson_url, None)\n if response_bios_regjson_url.status == 200:\n regattributes = response_bios_regjson_url.dict['RegistryEntries']['Attributes']\n for regattribute in regattributes:\n if regattribute['AttributeName'] == attribute_bootmode:\n if 'WarningText' in regattribute:\n WarningText = regattribute['WarningText']\n for value in regattribute['Value']:\n if 'legacy' in value['ValueName'].lower():\n continue\n if 'uefi' in value['ValueName'].lower():\n ValueName = value['ValueName']\n break\n ValueName = value['ValueName']\n break\n \n # Perform patch to set\n if ValueName == None:\n ValueName = \"UEFIMode\"\n pending_url = response_bios_url.dict['@Redfish.Settings']['SettingsObject']['@odata.id']\n parameter = {attribute_bootmode: ValueName}\n attribute = {\"Attributes\": parameter}\n headers = {\"If-Match\": '*'}\n response_pending_url = REDFISH_OBJ.patch(pending_url, body=attribute, headers=headers)\n if response_pending_url.status in [200,204]:\n if WarningText:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful. WarningText: %s'% (WarningText) }\n else:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful'}\n elif response_pending_url.status == 405:\n result = {'ret': False, 'msg': \"Resource not supported\"}\n else:\n error_message = utils.get_extended_error(response_pending_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (\n pending_url, response_pending_url.status, error_message)}\n\n # Logout of the current session\n try:\n REDFISH_OBJ.logout()\n except:\n pass\n return result",
"def set_boot_device(self, task, device, persistent=False):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified.\") % device)\n\n uefi_mode = (\n boot_mode_utils.get_boot_mode(task.node) == 'uefi')\n\n # disable 60 secs timer\n timeout_disable = \"0x00 0x08 0x03 0x08\"\n ipmitool.send_raw(task, timeout_disable)\n\n # note(naohirot):\n # Set System Boot Options : ipmi cmd '0x08', bootparam '0x05'\n #\n # $ ipmitool raw 0x00 0x08 0x05 data1 data2 0x00 0x00 0x00\n #\n # data1 : '0xe0' persistent + uefi\n # '0xc0' persistent + bios\n # '0xa0' next only + uefi\n # '0x80' next only + bios\n # data2 : boot device defined in the dict _BOOTPARAM5_DATA2\n\n bootparam5 = '0x00 0x08 0x05 %s %s 0x00 0x00 0x00'\n if persistent:\n data1 = '0xe0' if uefi_mode else '0xc0'\n else:\n data1 = '0xa0' if uefi_mode else '0x80'\n data2 = _BOOTPARAM5_DATA2[device]\n\n cmd8 = bootparam5 % (data1, data2)\n ipmitool.send_raw(task, cmd8)\n else:\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified. \"\n \"Current iRMC firmware condition doesn't support IPMI \"\n \"but Redfish.\") % device)\n super(ipmitool.IPMIManagement, self).set_boot_device(\n task, device, persistent)",
"def test_update_bios_boot_mode(self):\n pass",
"def setMAC( self, intf, mac ):\n result = self.cmd( 'ifconfig', intf, 'down' )\n result += self.cmd( 'ifconfig', intf, 'hw', 'ether', mac )\n result += self.cmd( 'ifconfig', intf, 'up' )\n return result",
"def set_boot_options(self, image_name, **vendor_specifics):\n current_boot = self.show(\"show running-config | inc ^boot system \")\n file_system = vendor_specifics.get(\"file_system\")\n if file_system is None:\n file_system = self._get_file_system()\n\n file_system_files = self.show(f\"dir {file_system}\")\n if re.search(image_name, file_system_files) is None:\n log.error(\"Host %s: File not found error for image %s.\", self.host, image_name)\n raise NTCFileNotFoundError(\n # TODO: Update to use hostname\n hostname=self.host,\n file=image_name,\n directory=file_system,\n )\n\n current_images = current_boot.splitlines()\n commands_to_exec = [f\"no {image}\" for image in current_images]\n commands_to_exec.append(f\"boot system {file_system}/{image_name}\")\n self.config(commands_to_exec)\n\n self.save()\n if self.boot_options[\"sys\"] != image_name:\n log.error(\"Host %s: Setting boot command did not yield expected results\", self.host)\n raise CommandError(\n command=f\"boot system {file_system}/{image_name}\",\n message=\"Setting boot command did not yield expected results\",\n )\n\n log.info(\"Host %s: boot options have been set to %s\", self.host, image_name)",
"def iscsi_target(self, iscsi_target):\n\n self._iscsi_target = iscsi_target",
"def test_patch_bios_boot_mode(self):\n pass",
"def set_os_mtu(self, iface=None, mtu=None):\n pytest.skip(\"Method is not supported by Iperf TG\")",
"def _update_persistent_boot(self, device_type=[], persistent=False,\n mac=None):\n tenure = 'Once'\n new_device = device_type[0]\n # If it is a standard device, we need to convert in RIS convention\n if device_type[0].upper() in DEVICE_COMMON_TO_RIS:\n new_device = DEVICE_COMMON_TO_RIS[device_type[0].upper()]\n\n if persistent:\n tenure = 'Continuous'\n\n systems_uri = \"/rest/v1/Systems/1\"\n # Need to set this option first if device is 'UefiTarget'\n if new_device is 'UefiTarget':\n if not mac:\n msg = ('Mac is needed for iscsi uefi boot')\n raise exception.IloInvalidInputError(msg)\n\n headers, bios_uri, bios_settings = self._check_bios_resource()\n # Get the Boot resource and Mappings resource.\n boot_settings = self._get_bios_boot_resource(bios_settings)\n StructuredBootString = None\n\n for boot_setting in boot_settings['BootSources']:\n if(mac.upper() in boot_setting['UEFIDevicePath'] and\n 'iSCSI' in boot_setting['UEFIDevicePath']):\n StructuredBootString = boot_setting['StructuredBootString']\n break\n if not StructuredBootString:\n msg = ('MAC provided is Invalid \"%s\"' % mac)\n raise exception.IloInvalidInputError(msg)\n\n new_boot_settings = {}\n new_boot_settings['Boot'] = {'UefiTargetBootSourceOverride':\n StructuredBootString}\n status, headers, response = self._rest_patch(systems_uri, None,\n new_boot_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)\n\n new_boot_settings = {}\n new_boot_settings['Boot'] = {'BootSourceOverrideEnabled': tenure,\n 'BootSourceOverrideTarget': new_device}\n status, headers, response = self._rest_patch(systems_uri, None,\n new_boot_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def login_iscsi_target(self, portal_config, target_config):\n ip = portal_config.get('ip')\n port = portal_config.get('port')\n iqn = target_config.get('iqn')\n if ip and port and iqn:\n command = 'iscsiadm -m node -l -T %s -p %s:%d' % (iqn, ip, port)\n self.cmd(command)",
"def setBootargs(self):\n\t\tif self.testType == 'auto' or self.testType == 'manual':\n\t\t\tself.bootargs = self.settings.getKeyValue('nfs.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<nfsroot>', self.nfsroot)\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\n\t\telse:\n\t\t\tself.bootargs = self.settings.getKeyValue('ramdisk.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\t\t\n\t\treturn None",
"def configure(self):\n self.node.get_logger().info('Configuring device...')\n try:\n data = self.con.receive(registers.BNO055_CHIP_ID_ADDR, 1)\n if data[0] != registers.BNO055_ID:\n raise IOError('Device ID=%s is incorrect' % data)\n # print(\"device sent \", binascii.hexlify(data))\n except Exception as e: # noqa: B902\n # This is the first communication - exit if it does not work\n self.node.get_logger().error('Communication error: %s' % e)\n self.node.get_logger().error('Shutting down ROS node...')\n sys.exit(1)\n\n # IMU connected => apply IMU Configuration:\n if not (self.con.transmit(registers.BNO055_OPR_MODE_ADDR, 1, bytes([registers.OPERATION_MODE_CONFIG]))):\n self.node.get_logger().warn('Unable to set IMU into config mode.')\n\n if not (self.con.transmit(registers.BNO055_PWR_MODE_ADDR, 1, bytes([registers.POWER_MODE_NORMAL]))):\n self.node.get_logger().warn('Unable to set IMU normal power mode.')\n\n if not (self.con.transmit(registers.BNO055_PAGE_ID_ADDR, 1, bytes([0x00]))):\n self.node.get_logger().warn('Unable to set IMU register page 0.')\n\n if not (self.con.transmit(registers.BNO055_SYS_TRIGGER_ADDR, 1, bytes([0x00]))):\n self.node.get_logger().warn('Unable to start IMU.')\n\n if not (self.con.transmit(registers.BNO055_UNIT_SEL_ADDR, 1, bytes([0x83]))):\n self.node.get_logger().warn('Unable to set IMU units.')\n\n # The sensor placement configuration (Axis remapping) defines the\n # position and orientation of the sensor mount.\n # See also Bosch BNO055 datasheet section Axis Remap\n mount_positions = {\n 'P0': bytes(b'\\x21\\x04'),\n 'P1': bytes(b'\\x24\\x00'),\n 'P2': bytes(b'\\x24\\x06'),\n 'P3': bytes(b'\\x21\\x02'),\n 'P4': bytes(b'\\x24\\x03'),\n 'P5': bytes(b'\\x21\\x02'),\n 'P6': bytes(b'\\x21\\x07'),\n 'P7': bytes(b'\\x24\\x05')\n }\n if not (self.con.transmit(registers.BNO055_AXIS_MAP_CONFIG_ADDR, 2,\n mount_positions[self.param.placement_axis_remap.value])):\n self.node.get_logger().warn('Unable to set sensor placement configuration.')\n\n # Show the current sensor offsets\n self.node.get_logger().info('Current sensor offsets:')\n self.print_calib_data()\n if self.param.set_offsets.value:\n configured_offsets = \\\n self.set_calib_offsets(\n self.param.offset_acc,\n self.param.offset_mag,\n self.param.offset_gyr,\n self.param.radius_mag,\n self.param.radius_acc)\n if configured_offsets:\n self.node.get_logger().info('Successfully configured sensor offsets to:')\n self.print_calib_data()\n else:\n self.node.get_logger().warn('setting offsets failed')\n\n\n # Set Device mode\n device_mode = self.param.operation_mode.value\n self.node.get_logger().info(f\"Setting device_mode to {device_mode}\")\n\n if not (self.con.transmit(registers.BNO055_OPR_MODE_ADDR, 1, bytes([device_mode]))):\n self.node.get_logger().warn('Unable to set IMU operation mode into operation mode.')\n\n self.node.get_logger().info('Bosch BNO055 IMU configuration complete.')",
"def setUp(self):\n super().setUp()\n for intf in self.send_ifs:\n self.vapi.ip_reassembly_enable_disable(\n sw_if_index=intf.sw_if_index, enable_ip6=True\n )\n self.vapi.ip_reassembly_set(\n timeout_ms=0,\n max_reassemblies=1000,\n max_reassembly_length=1000,\n expire_walk_interval_ms=10,\n is_ip6=1,\n )\n self.virtual_sleep(0.25)\n self.vapi.ip_reassembly_set(\n timeout_ms=1000000,\n max_reassemblies=1000,\n max_reassembly_length=1000,\n expire_walk_interval_ms=1000,\n is_ip6=1,\n )",
"def gather_system_versions(self):\n # Get Mac model ID\n self.hw_version = str(\n IORegistryEntryCreateCFProperty(\n IOServiceGetMatchingService(\n 0,\n IOServiceMatching(\"IOPlatformExpertDevice\")),\n \"model\",\n None,\n 0)).replace(\n \"\\x00\",\n \"\")\n\n if \"imacpro\" in self.hw_version.lower():\n # iMac Pro stores it's EFI data different due it's new architecture\n # so grab the EFI & SMC ROM versions appropriately\n raw_efi_list = []\n raw_rom_info = str(\n IORegistryEntryCreateCFProperty(\n IORegistryEntryFromPath(\n 0,\n \"IODeviceTree:/rom\"),\n \"apple-rom-info\",\n None,\n 0))\n for data in raw_rom_info.split(\"\\n\"):\n if data.strip().startswith(\"BIOS ID\"):\n raw_efi_list = data.split(\":\")[1].strip().split(\".\")\n break\n else:\n self.message(\n \"[-] Could not find raw EFI data to determine EFI versions. Exiting....\")\n return False\n\n self.efi_version = \"%s.%s.%s\" % (\n raw_efi_list[0], raw_efi_list[2], raw_efi_list[3])\n # Can't currently find the SMC version like this on imac pros ....\n # self.smc_version = str(IORegistryEntryCreateCFProperty(IOServiceGetMatchingService(0, IOServiceMatching(\"AppleSMC\")), \"smc-version\", None, 0))\n self.smc_version = \"\"\n else:\n # EFI & SMC ROM versions\n self.smc_version = str(\n IORegistryEntryCreateCFProperty(\n IOServiceGetMatchingService(\n 0,\n IOServiceMatching(\"AppleSMC\")),\n \"smc-version\",\n None,\n 0))\n raw_efi = str(\n IORegistryEntryCreateCFProperty(\n IORegistryEntryFromPath(\n 0,\n \"IODeviceTree:/rom\"),\n \"version\",\n None,\n 0)).replace(\n \"\\x00\",\n \"\").split(\".\")\n self.efi_version = \"%s.%s.%s\" % (\n raw_efi[0], raw_efi[2], raw_efi[3])\n\n # Set the salt to be the MAC address of the system, using the MAC as a salt in this manner\n # helps ensure that the hashed sysuuid is pseudonymous. We don't want to know the sysuuid's\n # value, but we do want it to be unique however. The Salt value is\n # never submitted to the API\n salt = hex(getnode())\n sys_uuid = str(\n IORegistryEntryCreateCFProperty(\n IOServiceGetMatchingService(\n 0,\n IOServiceMatching(\"IOPlatformExpertDevice\")),\n \"IOPlatformUUID\",\n None,\n 0)).replace(\n \"\\x00\",\n \"\")\n self.h_sys_uuid = hashlib.sha256(salt + sys_uuid).hexdigest()\n\n # Get the Board-ID, this is how EFI files are matched to running\n # hardware - Nastee\n self.board_id = str(\n IORegistryEntryCreateCFProperty(\n IOServiceGetMatchingService(\n 0,\n IOServiceMatching(\"IOPlatformExpertDevice\")),\n \"board-id\",\n None,\n 0)).replace(\n \"\\x00\",\n \"\")\n\n # Get OS version\n self.os_version = commands.getoutput(\"sw_vers -productVersion\")\n\n # Get build number\n self.build_num = commands.getoutput(\"sw_vers -buildVersion\")\n\n # Carve out the major version as we use this a bunch\n # self.os_maj_ver = \".\".join(self.os_version.split(\".\")[:2])\n\n # Add gathered info to the dictionary to query the API with\n self.endpoints_to_check[\"127.0.0.1\"] = {\n \"hashed_uuid\": self.h_sys_uuid,\n \"hw_ver\": self.hw_version,\n \"rom_ver\": self.efi_version,\n \"smc_ver\": self.smc_version,\n \"board_id\": self.board_id,\n \"os_ver\": self.os_version,\n \"build_num\": self.build_num}\n\n return True",
"def set_bios_settings(self, data=None):\n\n if not data:\n raise exception.SDFlexError(\"Could not apply settings with\"\n \" empty data\")\n sushy_system = self._get_sushy_system()\n\n try:\n for key in data.keys():\n sushy_system.bios.set_attribute(key, data[key])\n except sushy.exceptions.SushyError as e:\n message_extended_info = e.body.get('@Message.ExtendedInfo')\n error_message = message_extended_info[0]['Message']\n\n msg = (self._(\"Setting the value of Bios attribute \"\n \"'%(atrribute)s' is not succesfull. \"\n \"Error: %(error)s\") %\n {'error': str(error_message), 'atrribute': key})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def ipmi_setup():\n\n verify_ipmi_user_parm_accepted()",
"def set_interface(self, iface):\n\t\tf = os.path.join(self.config_dir, \"iface-%s\" % LibvirtFile.TEMPLATE_FILE)\n\t\tself.iface_xml = cziso.fill_template(f, iface=iface)",
"def prepare_node(self, node):\n self.interface = IpmiInterface(\n node.get('fencer-ip'),\n node.get('fencer-user'),\n node.get('fencer-password'),\n verbose=CONF.debug)",
"def test_get_bios_boot_mode_by_moid(self):\n pass",
"def setup_vm_env(self, driver='default'):\n if self.env_done:\n return\n\n # bind to default driver\n self.bind_nic_driver(self.dut_ports[:2], driver=\"\")\n\n self.used_dut_port_0 = self.dut_ports[0]\n self.host_intf0 = self.dut.ports_info[self.used_dut_port_0]['intf']\n tester_port = self.tester.get_local_port(self.used_dut_port_0)\n self.tester_intf0 = self.tester.get_interface(tester_port)\n\n self.dut.generate_sriov_vfs_by_port(\n self.used_dut_port_0, 1, driver=driver)\n self.sriov_vfs_port_0 = self.dut.ports_info[\n self.used_dut_port_0]['vfs_port']\n self.vf0_mac = \"00:10:00:00:00:00\"\n self.dut.send_expect(\"ip link set %s vf 0 mac %s\" %\n (self.host_intf0, self.vf0_mac), \"# \")\n\n self.used_dut_port_1 = self.dut_ports[1]\n self.host_intf1 = self.dut.ports_info[self.used_dut_port_1]['intf']\n self.dut.generate_sriov_vfs_by_port(\n self.used_dut_port_1, 1, driver=driver)\n self.sriov_vfs_port_1 = self.dut.ports_info[\n self.used_dut_port_1]['vfs_port']\n tester_port = self.tester.get_local_port(self.used_dut_port_1)\n self.tester_intf1 = self.tester.get_interface(tester_port)\n\n self.vf1_mac = \"00:20:00:00:00:00\"\n self.dut.send_expect(\"ip link set %s vf 0 mac %s\" %\n (self.host_intf1, self.vf1_mac), \"# \")\n\n try:\n\n for port in self.sriov_vfs_port_0:\n port.bind_driver('pci-stub')\n\n for port in self.sriov_vfs_port_1:\n port.bind_driver('pci-stub')\n\n time.sleep(1)\n vf0_prop = {'opt_host': self.sriov_vfs_port_0[0].pci}\n vf1_prop = {'opt_host': self.sriov_vfs_port_1[0].pci}\n\n # set up VM0 ENV\n self.vm0 = QEMUKvm(self.dut, 'vm0', 'vf_vlan')\n self.vm0.set_vm_device(driver='pci-assign', **vf0_prop)\n self.vm0.set_vm_device(driver='pci-assign', **vf1_prop)\n self.vm_dut_0 = self.vm0.start()\n if self.vm_dut_0 is None:\n raise Exception(\"Set up VM0 ENV failed!\")\n\n except Exception as e:\n self.destroy_vm_env()\n raise Exception(e)\n\n self.env_done = True",
"def set_one_time_boot(self, device, mac=None):\n self._update_persistent_boot([device], persistent=False, mac=mac)",
"def setup_device(device):\n try:\n # Gets around \"Resource busy\" errors\n device.detach_kernel_driver(0)\n except Exception:\n pass\n device.set_configuration()",
"def base_setup(self, request, interface_iterate):\n self.interface = interface_iterate\n\n if self.interface.lower() == \"cephfs\":\n self.interface = constants.CEPHFILESYSTEM\n self.sc_obj = constants.DEFAULT_STORAGECLASS_CEPHFS\n if self.interface.lower() == \"rbd\":\n self.interface = constants.CEPHBLOCKPOOL\n self.sc_obj = constants.DEFAULT_STORAGECLASS_RBD",
"def __init__(self, machine):\n super().__init__(machine)\n self.features['has_i2c'] = True",
"def __init__(self, machine):\n super().__init__(machine)\n self.features['has_i2c'] = True",
"def command_setup(self, *args):\n def usage():\n print(self.command_setup.__doc__)\n sys.exit(1)\n\n if len(args) == 0:\n usage()\n\n try:\n # All of these (except mount_opt) map directly to the model properties\n # We allow several `mount_opt` flags and merge their values, before\n # assigning to the `mount_opts` property (which expects a list).\n fields = [\n \"id\", \"host\", \"port\", \"user\",\n \"mount_opt\", \"mount_point\",\n \"ssh_key\", \"cmd_before_mount\",\n \"auth_method\",\n ]\n opts, _ = getopt.getopt(args, \"\", [\"%s=\" % s for s in fields])\n except getopt.GetoptError as e:\n sys.stderr.write('Error: %s\\n\\n' % e)\n usage()\n\n system = SystemModel()\n mount_opts = []\n for name, value in opts:\n name = name.lstrip('-')\n if not hasattr(system, name):\n continue\n if name == 'mount_opt':\n mount_opts.append(value)\n continue\n setattr(system, name, value)\n system.mount_opts = mount_opts\n\n is_valid, errors = system.validate()\n if not is_valid:\n sys.stderr.write('Invalid data found:\\n')\n for field_name, msg in errors:\n sys.stderr.write(' - %s / %s\\n' % (field_name, msg))\n sys.stderr.write('\\n')\n usage()\n sys.exit(1)\n\n system.save(self.environment)\n print('Configuration created.')\n print('You can try mounting now: `sftpman mount %s`' % system.id)",
"def do_setup(self, context):\n super(RBDISCSIDriver, self).do_setup(context)\n if client is None:\n msg = _(\"You must install rbd-iscsi-client python package \"\n \"before using this driver.\")\n raise exception.VolumeDriverException(data=msg)\n\n # Make sure we have the basic settings we need to talk to the\n # iscsi api service\n config = self.configuration\n self.client = self._create_client()\n self.client.set_debug_flag(config.safe_get('rbd_iscsi_api_debug'))\n resp, body = self.client.get_api()\n if not self._is_status_200(resp):\n # failed to fetch the open api url\n raise exception.InvalidConfigurationValue(\n option='rbd_iscsi_api_url',\n value='Could not talk to the rbd-target-api')\n\n # The admin had to have setup a target_iqn in the iscsi gateway\n # already in order for the gateways to work properly\n self.target_iqn = self.configuration.safe_get('rbd_iscsi_target_iqn')\n LOG.info(\"Using target_iqn '%s'\", self.target_iqn)",
"def change_mac(interface, mac):\r\n print(\"Changing MAC-address for \" + interface + \" to \" + mac)\r\n subprocess.call([\"sudo\", \"ifconfig\", interface, \"down\"])\r\n subprocess.call([\"sudo\", \"ifconfig\", interface, \"hw\", \"ether\", mac])\r\n subprocess.call([\"sudo\", \"ifconfig\", interface, \"up\"])",
"def main():\n kernel_params = _parse_kernel_cmdline()\n api_url = kernel_params.get('ipa-api-url')\n if api_url is None:\n _process_error('Mandatory kernel parameter \"ipa-api-url\" is missing.')\n\n boot_mac = kernel_params.get('BOOTIF')\n if boot_mac is None:\n _process_error('Cannot define boot interface, \"BOOTIF\" kernel '\n 'parameter is missing.')\n\n # There is a difference in syntax in BOOTIF variable between pxe and ipxe\n # boot with Ironic.\n # For pxe boot the the leading `01-' denotes the device type (Ethernet)\n # and is not a part of the MAC address\n if boot_mac.startswith('01-'):\n boot_mac = boot_mac[3:].replace('-', ':')\n\n # FIXME(pas-ha) discover all MACs\n node = lookup(api_url, [boot_mac])\n uuid = node['node']['uuid']\n timeout = node['config']['heartbeat_timeout']\n\n heartbeat_url = '{api_url}/v1/heartbeat/{uuid}'.format(api_url=api_url,\n uuid=uuid)\n for n in range(_GET_ADDR_MAX_ITERATION):\n boot_ip = _get_interface_ip(boot_mac)\n if boot_ip is not None:\n break\n time.sleep(_RETRY_INTERVAL)\n else:\n _process_error('Cannot find IP address of boot interface.')\n\n heartbeat(heartbeat_url, boot_ip, timeout)",
"def connect(self):\n try:\n portal = \"%s:%s\" % (self.ip, self.port)\n execute('iscsiadm', mode='discovery', type='sendtargets',\n portal=self.ip)\n execute('iscsiadm', mode='node', targetname=self.iqn,\n portal=self.ip, login=None)\n except ProcessError, e:\n logger.exception(\"iscsi login failed with '%s'\" % e)\n raise ISCSILoginFailed()\n if not self.connected:\n raise ISCSINotConnected(\"ISCSI device doesn't exist\")",
"def setup_dhcp_env(device):\n raise NotImplementedError",
"def __init__(self, busRestriction=0, devAddressRestriction=0, serialNumber=\"\"):\n self.handle = libcaer.caerDeviceOpen(1, libcaer.CAER_DEVICE_DAVIS, busRestriction, devAddressRestriction, serialNumber)\n self.info = libcaer.caerDavisInfoGet(self.handle)\n\n print(\"device ID: \" + str(libcaer.caer_davis_info_deviceID_get(self.info)))\n\n if (libcaer.caer_davis_info_deviceIsMaster_get(self.info)):\n print(\"device is Master\")\n else:\n print(\"device is Slave\")\n\n print(\"device Serial Number: \" + str(libcaer.caer_davis_info_deviceSerialNumber_get(self.info)))\n print(libcaer.caer_davis_info_deviceString_get(self.info))\n\n self.dvsSizeX = libcaer.caer_davis_info_dvsSizeX_get(self.info)\n self.dvsSizeY = libcaer.caer_davis_info_dvsSizeY_get(self.info)\n\n self.apsSizeX = libcaer.caer_davis_info_apsSizeX_get(self.info)\n self.apsSizeY = libcaer.caer_davis_info_apsSizeY_get(self.info)\n\n # init default biases\n ret = libcaer.caerDeviceSendDefaultConfig(self.handle)\n if(ret == True):\n print(\"Default biases loaded\")\n else:\n print(\"Error while loading default biases\")\n raise Exception\n\n # set blocking data exchange\n ret = libcaer.caerDeviceConfigSet(self.handle, libcaer.CAER_HOST_CONFIG_DATAEXCHANGE, libcaer.CAER_HOST_CONFIG_DATAEXCHANGE_BLOCKING, True)\n if(ret == True):\n print(\"Data exchange set to blocking mode\")\n else:\n print(\"Error in communicating with the device, please check your setup\")\n raise Exception\n\n # start data transfer from device\n ret = libcaer.caerDeviceDataStart(self.handle, None, None, None, None, None)\n if(ret == True):\n print(\"Data transfer started\")\n else:\n print(\"Error in starting data transfer\")\n raise Exception",
"def spoof(self, mac, air=False):\n\t\t\n\t\tif air:\n\t\t\tos.system(\n\t\t\t\t'sudo '\n\t\t\t\t'/System/Library/PrivateFrameworks'\n\t\t\t\t'/Apple80211.framework/Versions'\n\t\t\t\t'/Current/Resources/airport -z'\n\t\t\t)\n\t\t\n\t\t_status = os.system('sudo ifconfig %s ether %s' % (self.id, mac))\n\t\t\n\t\treturn 'Interface %s (%s) => (%s)' % (self.id, self.mac, mac)",
"def mac(self, mac):\n\n self._mac = mac",
"def boot(self, **kwargs):\n\n cloud = kwargs.get('cloud', Default.cloud)\n name = kwargs.get('name', Vm.generate_vm_name())\n image = kwargs.get('image', Default.image)\n flavor = kwargs.get('flavor', Default.flavor)\n key = kwargs.get('key', Default.key)\n secgroup = kwargs.get('secgroup', Default.secgroup)\n group = kwargs.get('group', Default.group)\n username = kwargs.get('username', Image.guess_username(image))\n cluster = kwargs.get('cluster', None)\n\n # shorthand for getting a dict of all the vm details\n #\n # IMPORTANT: anything declared prior to the call to `locals()`\n # may be passed to `Vm.boot`, so make sure that only parameters are\n # defined above this comment.\n details = locals()\n details.pop('kwargs')\n\n # currently, Vm.boot returns the instance UUID from the provider for openstack images\n # 2016/12/12\n uuid = Vm.boot(**details)\n\n\n # helper function: the Vm.boot only returns a UUID, but we\n # need to use the VM model instead. Additionally, we'll need\n # to poll the VM to wait until it is active.\n #\n # The kwargs are used to select the item from the DB:\n # eg: uuid=???, cm_id=???, etc\n def get_vm(**kwargs):\n \"\"\"Selects the VM based on the given properties\"\"\"\n model = self.db.vm_table_from_provider('openstack')\n vm = self.db.select(model, **kwargs).all()\n assert len(vm) == 1, vm\n vm = vm[0]\n return vm\n\n # get the VM from the UUID\n vm = get_vm(uuid=uuid)\n cm_id = vm.cm_id\n\n def is_active():\n Vm.refresh(cloud=cloud)\n vm = get_vm(cm_id=cm_id)\n return vm.status == 'ACTIVE'\n\n if not exponential_backoff(is_active):\n Console.error('Failed to get ACTIVE vm within timeframe')\n raise ValueError\n\n assert is_active()\n vm = get_vm(cm_id=cm_id)\n assert isinstance(vm, VM_OPENSTACK), vm.__class__\n\n return OpenstackNode(model=vm, provider=self)",
"def __init__(self):\n self.dhcp_client_state = store.MacToIP() # mac => DHCP_State",
"def bootup(debug_port, lines):\n lines.skip_until(\"Booting...\")\n lines.skip_until(\"Loading blocks...\")\n lines.skip_until(\"Starting user space\")\n authenticate(debug_port, lines)\n lines.expect_next(\"Enter command\")",
"def Bootimage(self, kernel, init, *args):\n\n # FIXME: I don't at all like the way this currently works. It is very kludgy\n # If you find a bug in here, you probably really want to talk to me (Benno)\n # because it is a very evil bit of code\n others = []\n for arg in args:\n if type(arg) == TupleType:\n others.append(arg)\n else:\n others.append((arg, \"\"))\n\n depends = [kernel]\n for binary in [init] + others:\n last = depends[-1]\n flags = ''\n if type(binary) == TupleType:\n flags = binary[1]\n binary = binary[0]\n if flags != \"raw\":\n def buildreloc_str(targets, source, env):\n return \"=> Relocating %s\" % targets[0]\n new_binary = self.Command(str(binary) + \".reloc\", [binary, last], Action(buildreloc(flags), buildreloc_str))\n # SCons changed its API :(\n if scons_version <= (0, 95):\n new_binary = [new_binary]\n\n new_binary = new_binary[0]\n else:\n new_binary = binary\n last = new_binary\n depends.append(new_binary)\n\n def simulate(target, source, env):\n cmdline = env.machine.sim(target, source, env)\n os.system(cmdline)\n\n def simulatestr(target, source, env):\n return \"=> Simulating %s\" % source[0]\n\n def test(target, source, env):\n\t def print_log(log):\n\t print '--- SIMULATOR LOG ' + '-'*60\n\t print log.getvalue()\n\t print '-'*78\n test_data = env[\"EXPECT_TEST_DATA\"]\n if test_data is None:\n raise UserError, \"No expected test output supplied for this build\"\n try:\n import pexpect\n except ImportError:\n raise UserError, \"There was a problem importing the pexpect library.\\n\" \\\n \"This is required for running the simulate_test target.\"\n\t if pexpect.__version__ >= '2.0':\n\t\tlog = StringIO()\n\t\tx = pexpect.spawn(env.machine.sim(target, source, env), timeout=float(timeout), logfile=log)\n\t else:\n\t\tx = pexpect.spawn(env.machine.sim(target, source, env), timeout=float(timeout))\n\t\tlog = StringIO()\n\t\tx.setlog(log)\n for in_, out_ in test_data:\n try:\n x.expect(in_)\n except pexpect.ExceptionPexpect, e:\n\t\t if isinstance(e, pexpect.TIMEOUT):\n\t\t\tprint \"Timed out waiting for: <%s>\" % in_\n\t\t elif isinstance(e, pexpect.EOF):\n\t\t\tprint \"Simulator exited while waiting for: <%s>\" % in_\n\t\t print_log(log)\n raise UserError, \"Failed test.\"\n if not out_ is None:\n x.sendline(out_)\n\t if print_sim_log:\n\t print_log(log)\n def teststr(target, source, env):\n return \"=> Testing %s\" % source[0]\n\n if self.arch in [\"arm\", \"mips64\", \"alpha\", \"powerpc\", \"powerpc64\"]:\n self.dite = \"tools/build/dite/dite\"\n SConscript(\"tools/dite/src/SConstruct\", build_dir=\"tools/build/dite\", duplicate=0)\n cmd = self.Command(\"%s/bootimg.dite\" % self.builddir, depends,\n BootImageBuilder(self.arch, kernel,\n [(init, \"i\")] + others, dite_cmd=self.dite))\n self.Depends(cmd, self.dite)\n sim_cmd = cmd\n\n if self.machine.elfloader:\n # I don't like this hack at all, but seems to work.\n Export(\"cmd\")\n if self.arch in [\"arm\"]:\n self[\"CPPFLAGS\"] = []\n elf_loader_env = self.Copy(\"elf-loader\")\n elf_loader_env.AddLibrary(\"l4\")\n elf_loader_env.AddLibrary(\"c\", system = \"baremetal\")\n elf_loader_env.AddLibrary(\"elf\")\n elf_loader = elf_loader_env.Application(\"loaders/elf-loader\")\n cmd = elf_loader\n\n installed = []\n installed.append(self.InstallAs(\"%s/%s\" % (self.install_dir, self.install_name), cmd))\n self.Alias(\"install\", installed)\n elif self.arch == \"ia32\":\n # On ia32 we need kickstart\n kickstart_env = self.Copy(\"kickstart\")\n kickstart_env.AddLibrary(\"l4\")\n kickstart_env.AddLibrary(\"c\", system=\"baremetal\")\n kickstart_env.AddLibrary(\"elf\")\n kickstart = kickstart_env.Application(\"loaders/kickstart\")\n\n \n install_menulst = self.Command(\"%s/inst/menu.lst\" % self.builddir, depends,\n BuildMenuLst, ROOT=boot_path)\n env.Depends(install_menulst, Value(boot_path))\n # Build a menu.lst\n installed = []\n installed.append(self.Install(self.install_dir, kickstart))\n installed.append(self.Install(self.install_dir, kernel))\n installed.append(self.Install(self.install_dir, install_menulst))\n for binary in [init] + others:\n flags = \"\"\n if type(binary) == TupleType:\n flags = binary[1]\n binary = binary[0]\n if flags != \"raw\":\n installed.append(self.InstallAs(\"%s/%s\" % (self.install_dir, os.path.basename(str(binary))), \"%s.reloc\" % binary))\n else:\n installed.append(self.InstallAs(\"%s/%s\" % (self.install_dir, os.path.basename(str(binary))), binary)) \n self.Alias(\"install\", installed)\n\n # Generate an image that can be used for the simulator...\n sim_menulst = self.Command(\"%s/sim/menu.lst\" % self.builddir, depends,\n BuildMenuLst, ROOT=\"/boot/grub\")\n simdeps = copy.copy(depends)\n simdeps.append(sim_menulst)\n simdeps.append(kickstart)\n mtools = SConscript(\"tools/mtools/SConstruct\", build_dir=\"#tools/build/mtools\",\n duplicate=0, exports=[\"tool_prefix\"])\n build_dir = \"tools/build/grub\"\n Export(\"build_dir\")\n grub = SConscript(\"tools/grub/SConstruct\", build_dir=\"#tools/build/grub\",\n duplicate=0, exports=[\"tool_prefix\"])\n cmd = self.Command(\"%s/c.img\" % self.builddir, simdeps, GrubBootImage)\n self.Depends(cmd, mtools)\n self.Depends(cmd, grub)\n sim_cmd = cmd\n\n # Generate an image that can be used for the usb\n usb_menulst = self.Command(\"%s/usbfloppy/menu.lst\" % self.builddir, depends,\n BuildMenuLst, ROOT=\"/boot/grub\")\n usbdeps = copy.copy(depends)\n usbdeps.append(usb_menulst)\n usbdeps.append(kickstart)\n build_dir = \"tools/build/grub\"\n Export(\"build_dir\")\n grub = SConscript(\"tools/grub/SConstruct\", build_dir=\"#tools/build/grub\",\n duplicate=0, exports=[\"tool_prefix\"])\n cmd = self.Command(\"%s/usb.img\" % self.builddir, usbdeps, GrubFloppyImage)\n self.Depends(cmd, mtools)\n self.Depends(cmd, grub)\n self.Alias(\"usbimage\", cmd)\n\n self.Command(\"simulate\", sim_cmd, Action(simulate, simulatestr))\n self.Command(\"simulate_test\", sim_cmd, Action(test, teststr))\n return cmd",
"def create(self, spec, force_cache):\n\n instance_id = self.get_instance_id(spec)\n instance_dir = os.path.join(self.directory, instance_id)\n # create the directory to hold all the bits\n logger.info(\"Creating directory %s\" % (instance_dir, ))\n os.mkdir(instance_dir)\n\n logger.info(\"Creating virtual machine\")\n self.vboxmanage(\"createvm\", name=instance_id, directory=self.directory, ostype=self.ostype[spec.image.distro])\n self.vboxmanage(\"configurevm\", name=instance_id, memsize=spec.hardware.memory)\n network = self.guess_network()\n network.configurevm(instance_id)\n\n logger.info(\"Creating disk image from %s\" % (spec.image, ))\n # create the disk image and attach it\n disk = os.path.join(instance_dir, instance_id + \"_disk1.vdi\")\n self.qemu_img(\"convert\", source=spec.image.fetch(self.image_dir, force_cache), destination=disk, format=\"vdi\")\n self.vboxmanage(\"create_sata\", name=instance_id)\n self.vboxmanage(\"attach_disk\", name=instance_id, disk=disk)\n\n # create the seed ISO\n logger.info(\"Creating cloudinit seed\")\n config_class = self.configs[spec.image.distro]\n cloud_config = config_class(spec)\n meta_data = MetaData(spec.name)\n seed = Seed(instance_dir, cloud_config=cloud_config, meta_data=meta_data)\n seed.write()\n\n logger.info(\"Attaching devices\")\n # connect the seed ISO and the tools ISO\n self.vboxmanage(\"create_ide\", name=instance_id)\n self.vboxmanage(\"attach_ide\", name=instance_id, port=\"0\", device=\"0\", filename=seed.pathname)\n self.vboxmanage(\"attach_ide\", name=instance_id, port=\"0\", device=\"1\", filename=\"/usr/share/virtualbox/VBoxGuestAdditions.iso\")\n logger.info(\"Machine created\")\n\n logger.info(\"Mounting host drive\")\n hostpath = os.path.expanduser(\"~\")\n self.vboxmanage(\"mount\", name=instance_id, hostpath=hostpath)\n return self.load(instance_id)",
"def set_macaddr(self):\n if not self.macaddr:\n return\n try:\n cmd = [rcEnv.syspaths.nsenter, \"--net=\"+self.netns, \"ip\", \"link\", \"set\", self.final_guest_dev, \"address\", self.macaddr]\n ret, out, err = self.vcall(cmd)\n except ex.excError:\n pass\n if ret != 0:\n return ret, out, err",
"def test_update_bios_unit(self):\n pass",
"def __init__(self, hdw=['Soundcard'], devicename='dev1'):\n self.debugFlag = False\n self.task = None # NI Task\n self.required_hardware = hdw # Require specific hardware \n self.hardware = [] # list of hardware actually found on this system\n self.find_hardware(device_info={'devicename': devicename}) # population the self.hardware list",
"def iscsi_target_num(self, iscsi_target_num):\n\n self._iscsi_target_num = iscsi_target_num",
"def lxd_init(self, iface):\n lxd_init_cmds = [\n self.set_lxd_init_auto,\n self.set_lxc_config,\n self.set_lxd_storage,\n partial(self.setup_bridge_network, iface),\n self.setup_unused_bridge_network,\n self.set_default_profile\n ]\n\n for cmd in lxd_init_cmds:\n app.log.debug(\"LXD Init: {}\".format(cmd))\n cmd()",
"def set_provisioning_state(self, arch, subarch, initrd_desc, kernel_desc,\n kernel_opts=\"\", preseed_name=None, netboot=None):\n if arch == '' or subarch == '' or initrd_desc == '' or kernel_desc == '':\n raise ClientError(\"Missing arguments for setting machine's state\")\n\n image_controller = ImageControl(self.urlhandler)\n url = \"/api/v1/machine/{}\".format(self.machine_id)\n\n if initrd_desc and kernel_desc:\n initrd_id = image_controller.get_image_id(\n initrd_desc, \"Initrd\", arch)\n\n kernel_id = image_controller.get_image_id(kernel_desc,\n \"Kernel\", arch)\n else:\n raise ClientError(\"Invalid Initrd and Kernel description\")\n\n parameters = dict()\n\n if preseed_name is not None:\n preseed_controller = PreseedControl(self.urlhandler)\n preseed_id = preseed_controller.get_preseed_id(preseed_name)\n if preseed_name and preseed_id is None:\n raise ProvisionerError(\"Preseed '{0}' unknown\".format(preseed_name))\n parameters['preseed_id'] = preseed_id\n\n if kernel_opts:\n parameters['kernel_opts'] = kernel_opts\n\n parameters['kernel_id'] = kernel_id\n parameters['initrd_id'] = initrd_id\n parameters['subarch'] = subarch\n\n if netboot is not None:\n parameters['netboot_enabled'] = netboot\n\n data = json.dumps(parameters)\n\n return self.urlhandler.put(url, data)",
"def _setBootable(self, bootable):\n if self.partedPartition:\n if arch.isS390():\n return\n if self.flagAvailable(parted.PARTITION_BOOT):\n if bootable:\n self.setFlag(parted.PARTITION_BOOT)\n else:\n self.unsetFlag(parted.PARTITION_BOOT)\n else:\n raise errors.DeviceError(\"boot flag not available for this partition\", self.name)\n\n self._bootable = bootable\n else:\n self.req_bootable = bootable",
"def load_ethernet_aux(self, _type, _path, ethernet_data):\n\n support_ipv6 = True\n dhcpv4curr = dhcpv4conf = oem_dhcpv4curr = oem_dhcpv4conf = dict()\n dhcpv6curr = dhcpv6conf = oem_dhcpv6curr = oem_dhcpv6conf = dict()\n errors = []\n\n ident_eth = False\n if \"EthernetInterface\" in _type:\n for curr_sel in self.rdmc.app.select(\n _type.split(\".\")[0] + \".\",\n (\n self.rdmc.app.typepath.defs.hrefstring,\n self.rdmc.app.typepath.defs.managerpath + \"*\",\n ),\n path_refresh=True,\n ):\n if curr_sel.path == _path:\n ident_eth = True\n break\n # 'links/self/href' required when using iLO 4 (rest).\n elif \"EthernetNetworkInterface\" in _type:\n for curr_sel in self.rdmc.app.select(\n _type.split(\".\")[0] + \".\",\n (\n \"links/self/\" + self.rdmc.app.typepath.defs.hrefstring,\n self.rdmc.app.typepath.defs.managerpath + \"*\",\n ),\n path_refresh=True,\n ):\n if curr_sel.path == _path:\n ident_eth = True\n break\n else:\n raise Exception(\"Invalid type in management NIC load operation: '%s'\" % _type)\n\n if not ident_eth:\n raise InvalidPathError(\n \"Path: '%s' is invalid/not identified on this server.\\n\" % _path\n )\n\n ident_name = curr_sel.dict.get(\"Name\")\n ident_id = curr_sel.dict.get(\"Id\")\n # ENABLING ETHERNET INTERFACE SECTION\n try:\n # Enable the Interface if called for and not already enabled\n if ethernet_data.get(\"InterfaceEnabled\") and not curr_sel.dict.get(\n \"InterfaceEnabled\"\n ):\n self.rdmc.app.patch_handler(\n _path, {\"InterfaceEnabled\": True}, silent=True\n )\n self.rdmc.ui.printer(\"NIC Interface Enabled.\\n\")\n # Disable the Interface if called for and not disabled already\n # No need to do anything else, just return\n elif not ethernet_data.get(\"InterfaceEnabled\") and not curr_sel.dict.get(\n \"InterfaceEnabled\"\n ):\n self.rdmc.app.patch_handler(\n _path, {\"InterfaceEnabled\": False}, silent=True\n )\n self.rdmc.ui.warn(\n \"NIC Interface Disabled. All additional configurations \" \"omitted.\"\n )\n return\n except (KeyError, NameError, TypeError, AttributeError):\n # check OEM for NICEnabled instead\n if (\n not curr_sel.dict[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"NICEnabled\"]\n and ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"NICEnabled\"]\n ):\n self.rdmc.app.patch_handler(\n _path,\n {\"Oem\": {self.rdmc.app.typepath.defs.oemhp: {\"NICEnabled\": True}}},\n silent=True,\n )\n self.rdmc.ui.printer(\"NIC Interface Enabled.\\n\")\n elif (\n not curr_sel.dict[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"NICEnabled\"]\n and not ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\n \"NICEnabled\"\n ]\n ):\n self.rdmc.app.patch_handler(\n _path,\n {\"Oem\": {self.rdmc.app.typepath.defs.oemhp: {\"NICEnabled\": False}}},\n silent=True,\n )\n self.rdmc.ui.printer(\"NIC Interface Disabled.\\n\")\n return\n # except IloResponseError should just be raised and captured by decorator. No point in\n # performing any other operations if the interface can not be set.\n\n # END ENABLING ETHERNET INTEFACE SECTION\n # ---------------------------------------\n # DETERMINE DHCPv4 and DHCPv6 States and associated flags\n\n if \"NICSupportsIPv6\" in list(\n curr_sel.dict[\"Oem\"][self.rdmc.app.typepath.defs.oemhp].keys()\n ):\n support_ipv6 = curr_sel.dict[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\n \"NICSupportsIPv6\"\n ]\n\n # obtain DHCPv4 Config and OEM\n try:\n if \"DHCPv4\" in list(curr_sel.dict.keys()) and \"DHCPv4\" in list(\n ethernet_data.keys()\n ):\n dhcpv4curr = copy.deepcopy(curr_sel.dict[\"DHCPv4\"])\n dhcpv4conf = copy.deepcopy(ethernet_data[\"DHCPv4\"])\n except (KeyError, NameError, TypeError, AttributeError):\n errors.append(\"Unable to find Redfish DHCPv4 Settings.\\n\")\n finally:\n try:\n oem_dhcpv4curr = copy.deepcopy(\n curr_sel.dict[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"DHCPv4\"]\n )\n oem_dhcpv4conf = copy.deepcopy(\n ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"DHCPv4\"]\n )\n ipv4curr = copy.deepcopy(\n curr_sel.dict[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"IPv4\"]\n )\n ipv4conf = copy.deepcopy(\n ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"IPv4\"]\n )\n except (KeyError, NameError):\n errors.append(\"Unable to find OEM Keys for DHCPv4 or IPv4\")\n\n try:\n if support_ipv6:\n if \"DHCPv6\" in list(curr_sel.dict.keys()) and \"DHCPv6\" in list(\n ethernet_data.keys()\n ):\n dhcpv6curr = copy.deepcopy(curr_sel.dict[\"DHCPv6\"])\n dhcpv6conf = copy.deepcopy(ethernet_data[\"DHCPv6\"])\n else:\n self.rdmc.ui.warn(\"NIC Does not support IPv6.\")\n except (KeyError, NameError, TypeError, AttributeError):\n errors.append(\"Unable to find Redfish DHCPv6 Settings.\\n\")\n finally:\n try:\n oem_dhcpv4curr = copy.deepcopy(\n curr_sel.dict[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"DHCPv6\"]\n )\n oem_dhcpv6conf = copy.deepcopy(\n ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"DHCPv6\"]\n )\n ipv6curr = copy.deepcopy(\n curr_sel.dict[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"IPv6\"]\n )\n ipv6conf = copy.deepcopy(\n ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"IPv6\"]\n )\n except (KeyError, NameError):\n errors.append(\"Unable to find OEM Keys for DHCPv6 or IPv6\")\n\n try:\n # if DHCP Enable request but not currently enabled\n if (\n dhcpv4conf.get(\"DHCPEnabled\")\n and not curr_sel.dict[\"DHCPv4\"][\"DHCPEnabled\"]\n ):\n self.rdmc.app.patch_handler(\n _path, {\"DHCPv4\": {\"DHCPEnabled\": True}}, silent=True\n )\n self.rdmc.ui.printer(\"DHCP Enabled.\\n\")\n # if DHCP Disable request but currently enabled\n elif not dhcpv4conf[\"DHCPEnabled\"] and curr_sel.dict[\"DHCPv4\"][\"DHCPEnabled\"]:\n self.rdmc.app.patch_handler(\n _path, {\"DHCPv4\": {\"DHCPEnabled\": False}}, silent=True\n )\n dhcpv4conf[\"UseDNSServers\"] = False\n dhcpv4conf[\"UseNTPServers\"] = False\n dhcpv4conf[\"UseGateway\"] = False\n dhcpv4conf[\"UseDomainName\"] = False\n self.rdmc.ui.printer(\"DHCP Disabled.\\n\")\n except (KeyError, NameError, TypeError, AttributeError):\n # try with OEM\n try:\n if (\n oem_dhcpv4conf.get(\"Enabled\")\n and not curr_sel.dict[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\n \"DHCPv4\"\n ][\"Enabled\"]\n ):\n self.rdmc.app.patch_handler(\n _path,\n {\n \"Oem\": {\n self.rdmc.app.typepath.defs.oemhp: {\n \"DHCPv4\": {\"DHCPEnabled\": True}\n }\n }\n },\n silent=True,\n )\n self.rdmc.ui.printer(\"DHCP Enabled.\\n\")\n if \"IPv4Addresses\" in ethernet_data:\n del ethernet_data[\"IPv4Addresses\"]\n elif (\n not oem_dhcpv4conf.get(\"Enabled\")\n and curr_sel.dict[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"DHCPv4\"][\n \"Enabled\"\n ]\n ):\n oem_dhcpv4conf[\"UseDNSServers\"] = False\n oem_dhcpv4conf[\"UseNTPServers\"] = False\n oem_dhcpv4conf[\"UseGateway\"] = False\n oem_dhcpv4conf[\"UseDomainName\"] = False\n self.rdmc.ui.printer(\"DHCP Disabled.\\n\")\n except (KeyError, NameError) as exp:\n errors.append(\n \"Failure in parsing or removing data in OEM DHCPv4: %s.\\n\" % exp\n )\n\n try:\n # if the ClientIDType is custom and we are missing the ClientID then this property can\n # not be set.\n if \"ClientIdType\" in list(dhcpv4conf.keys()):\n if dhcpv4conf[\"ClientIdType\"] == \"Custom\" and \"ClientID\" not in list(\n dhcpv4conf.keys()\n ):\n del ethernet_data[\"DHCPv4\"][\"ClientIdType\"]\n elif \"ClientIdType\" in list(oem_dhcpv4conf.keys()):\n if oem_dhcpv4conf[\"ClientIdType\"] == \"Custom\" and \"ClientID\" not in list(\n oem_dhcpv4conf.keys()\n ):\n del ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\"DHCPv4\"][\n \"ClientIdType\"\n ]\n except (KeyError, NameError, TypeError, AttributeError):\n try:\n if \"ClientIdType\" in list(oem_dhcpv4conf.keys()):\n if oem_dhcpv4conf[\n \"ClientIdType\"\n ] == \"Custom\" and \"ClientID\" not in list(oem_dhcpv4conf.keys()):\n del ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\n \"DHCPv4\"\n ][\"ClientIdType\"]\n except (KeyError, NameError) as exp:\n errors.append(\"Unable to remove property %s.\\n\" % exp)\n\n # special considerations go here for things that need to stay despite diffdict\n # EX: IPv4 addresses (aka bug). Changing only one property within the\n # IPv4StaticAddresses or IPv4Addresses causes an issue during load. Must include IP,\n # subnet mask and gateway (they can not be patched individually).\n # spec_dict = {'Oem': {self.rdmc.app.typepath.defs.oemhp: {}}}\n spec_dict = dict()\n if \"IPv4Addresses\" in ethernet_data:\n spec_dict[\"IPv4Addresses\"] = copy.deepcopy(ethernet_data[\"IPv4Addresses\"])\n try:\n if \"IPv4Addresses\" in ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp]:\n spec_dict[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\n \"IPv4Addresses\"\n ] = copy.deepcopy(\n ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\n \"IPv4StaticAddresses\"\n ]\n )\n except (KeyError, NameError, TypeError, AttributeError):\n pass\n\n # diff and overwrite the original payload\n ethernet_data = diffdict(ethernet_data, curr_sel.dict)\n ethernet_data.update(spec_dict)\n\n # verify dependencies on those flags which are to be applied are eliminated\n try:\n # delete Domain name and FQDN if UseDomainName for DHCPv4 or DHCPv6\n # is present. can wait to apply at the end\n if dhcpv4conf.get(\"UseDomainName\"): # or dhcpv6conf['UseDomainName']:\n if (\n \"DomainName\"\n in ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp]\n ):\n del ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\n \"DomainName\"\n ]\n if \"FQDN\" in ethernet_data:\n del ethernet_data[\"FQDN\"]\n except (KeyError, NameError, TypeError, AttributeError):\n # try again with OEM\n try:\n if oem_dhcpv4conf.get(\"UseDomainName\") or oem_dhcpv6conf.get(\n \"UseDomainName\"\n ):\n if (\n \"DomainName\"\n in ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp]\n ):\n del ethernet_data[\"Oem\"][self.rdmc.app.typepath.defs.oemhp][\n \"DomainName\"\n ]\n if \"FQDN\" in ethernet_data:\n del ethernet_data[\"FQDN\"]\n except (KeyError, NameError) as exp:\n errors.append(\"Unable to remove property %s.\\n\" % exp)\n\n try:\n # delete DHCP4 DNSServers from IPV4 dict if UseDNSServers Enabled\n # can wait to apply at the end\n if dhcpv4conf.get(\"UseDNSServers\"): # and ethernet_data.get('NameServers'):\n json_traversal_delete_empty(\n data=ethernet_data, remove_list=[\"NameServers\"]\n )\n except (KeyError, NameError, TypeError, AttributeError):\n pass\n finally:\n try:\n if oem_dhcpv4conf.get(\"UseDNSServers\"):\n # del_sections('DNSServers', ethernet_data)\n json_traversal_delete_empty(\n data=ethernet_data, remove_list=[\"DNSServers\"]\n )\n except (KeyError, NameError) as exp:\n errors.append(\"Unable to remove property %s.\\n\" % exp)\n try:\n if dhcpv4conf.get(\"UseWINSServers\"):\n json_traversal_delete_empty(\n data=ethernet_data, remove_list=[\"WINServers\"]\n )\n except (KeyError, NameError, TypeError, AttributeError):\n pass\n finally:\n try:\n if oem_dhcpv4conf.get(\"UseWINSServers\"):\n json_traversal_delete_empty(\n data=ethernet_data,\n remove_list=[\"WINServers\", \"WINSRegistration\"],\n )\n except (KeyError, NameError) as exp:\n errors.append(\"Unable to remove property %s.\\n\" % exp)\n\n try:\n if dhcpv4conf.get(\"UseStaticRoutes\"):\n json_traversal_delete_empty(\n data=ethernet_data, remove_list=[\"StaticRoutes\"]\n )\n except (KeyError, NameError, TypeError, AttributeError):\n pass\n finally:\n try:\n if oem_dhcpv4conf.get(\"UseStaticRoutes\"):\n json_traversal_delete_empty(\n data=ethernet_data, remove_list=[\"StaticRoutes\"]\n )\n except (KeyError, NameError) as exp:\n errors.append(\"Unable to remove property %s.\\n\" % exp)\n\n try:\n # if using DHCPv4, remove static addresses\n if dhcpv4conf.get(\"DHCPEnabled\"):\n json_traversal_delete_empty(\n data=ethernet_data,\n remove_list=[\"IPv4Addresses\", \"IPv4StaticAddresses\"],\n )\n except (KeyError, NameError, TypeError, AttributeError):\n pass\n finally:\n try:\n if oem_dhcpv4conf.get(\"Enabled\"):\n json_traversal_delete_empty(\n data=ethernet_data,\n remove_list=[\"IPv4Addresses\", \"IPv4StaticAddresses\"],\n )\n except (KeyError, NameError) as exp:\n errors.append(\"Unable to remove property %s.\\n\" % exp)\n\n try:\n # if not using DHCPv6, remove static addresses from payload\n if dhcpv6conf.get(\"OperatingMode\") == \"Disabled\":\n json_traversal_delete_empty(\n data=ethernet_data,\n remove_list=[\"IPv6Addresses\", \"IPv6StaticAddresses\"],\n )\n except (KeyError, NameError, TypeError, AttributeError):\n pass\n finally:\n try:\n if not oem_dhcpv6conf.get(\"StatefulModeEnabled\"):\n json_traversal_delete_empty(\n data=ethernet_data,\n remove_list=[\"IPv6Addresses\", \"IPv6StaticAddresses\"],\n )\n except (KeyError, NameError) as exp:\n errors.append(\"Unable to remove property %s.\\n\" % exp)\n\n flags = dict()\n if dhcpv4conf:\n flags[\"DHCPv4\"] = dhcpv4conf\n if dhcpv6conf:\n flags[\"DHCPv6\"] = dhcpv6conf\n if oem_dhcpv4conf:\n flags[\"Oem\"] = {self.rdmc.app.typepath.defs.oemhp: {\"DHCPv4\": oem_dhcpv4conf}}\n if oem_dhcpv6conf:\n flags[\"Oem\"] = {self.rdmc.app.typepath.defs.oemhp: {\"DHCPv6\": oem_dhcpv6conf}}\n\n # verify dependencies on those flags which are to be applied are eliminated\n\n try:\n self.rdmc.app.patch_handler(_path, flags, silent=True)\n except IloResponseError as excp:\n errors.append(\n \"iLO Responded with the following errors setting DHCP: %s.\\n\" % excp\n )\n\n try:\n if \"AutoNeg\" not in list(ethernet_data.keys()):\n json_traversal_delete_empty(\n data=ethernet_data, remove_list=[\"FullDuplex\", \"SpeedMbps\"]\n )\n\n # if Full Duplex exists, check if FullDuplexing enabled. If so,\n # remove Speed setting.\n elif \"FullDuplex\" in list(ethernet_data.keys()):\n json_traversal_delete_empty(\n data=ethernet_data, remove_list=[\"FullDuplex\", \"SpeedMbps\"]\n )\n except (KeyError, NameError) as exp:\n errors.append(\"Unable to remove property %s.\\n\" % exp)\n\n try:\n if \"FrameSize\" in list(ethernet_data.keys()):\n json_traversal_delete_empty(data=ethernet_data, remove_list=[\"FrameSize\"])\n except (KeyError, NameError) as exp:\n errors.append(\"Unable to remove property %s.\\n\" % exp)\n\n self.patch_eth(_path, ethernet_data, errors)\n\n if errors and \"Virtual\" not in ident_name:\n raise RdmcError(\n \"Ethernet configuration errors were found collectively on adapter: \"\n \"'%s, %s'\\ntype: %s\\nerrors: %s\" % (ident_name, ident_id, _type, errors)\n )",
"def test_configured_mac(self, mock_discover, *args):\n mock_discover.return_value = [self.mock_insight_2, self.mock_insight]\n discover_event = Event()\n blk = EventWeMoDiscovery(discover_event)\n # Looking for the 2nd insight in the list only\n self.configure_block(blk, {'device_mac': 'mac'})\n blk.start()\n self.assertTrue(discover_event.wait(1))\n self.assertEqual(blk.device, self.mock_insight)\n blk.stop()\n\n # if the specified MAC isn't found discovery continues\n discover_event.clear()\n blk = EventWeMoDiscovery(discover_event)\n self.configure_block(blk, {'device_mac': 'other'})\n blk.start()\n # Wait some time but don't expect discover to actually finish\n self.assertFalse(discover_event.wait(0.2))\n self.assertIsNone(blk.device)\n self.assertTrue(blk._discovering)\n blk.stop()",
"def _setup_io_devices(self) -> None:\n # Add PCI\n self.platform.pci_host.pio = self.iobus.mem_side_ports\n\n # Add Ethernet card\n self.ethernet = IGbE_e1000(\n pci_bus=0, pci_dev=0, pci_func=0, InterruptLine=1, InterruptPin=1\n )\n\n self.ethernet.host = self.platform.pci_host\n self.ethernet.pio = self.iobus.mem_side_ports\n self.ethernet.dma = self.iobus.cpu_side_ports\n\n if self.get_cache_hierarchy().is_ruby():\n for device in self._off_chip_devices + self._on_chip_devices:\n device.pio = self.iobus.mem_side_ports\n\n else:\n for device in self._off_chip_devices:\n device.pio = self.iobus.mem_side_ports\n for device in self._on_chip_devices:\n device.pio = self.get_cache_hierarchy().get_mem_side_port()\n\n self.bridge = Bridge(delay=\"10ns\")\n self.bridge.mem_side_port = self.iobus.cpu_side_ports\n self.bridge.cpu_side_port = (\n self.get_cache_hierarchy().get_mem_side_port()\n )\n self.bridge.ranges = [\n AddrRange(dev.pio_addr, size=dev.pio_size)\n for dev in self._off_chip_devices\n ]\n\n # PCI\n self.bridge.ranges.append(AddrRange(0x2F000000, size=\"16MB\"))\n self.bridge.ranges.append(AddrRange(0x30000000, size=\"256MB\"))\n self.bridge.ranges.append(AddrRange(0x40000000, size=\"512MB\"))",
"def bootstrap_spin(self):\n\n if self.spins > 300:\n # too many spins with no result -> give up\n self.stop()\n self.start()\n return\n\n (ridx, match, res) = self.tn.expect([b\"Performing automatic\"], 1)\n if match: # got a match!\n if ridx == 0: # login\n self.logger.debug(\"VM started\")\n\n self.wait_write(\"\", wait=\"(qemu)\", con=self.qm)\n\n # To allow access to aux0 serial console\n self.logger.debug(\"Writing to QEMU Monitor\")\n\n # Cred to @plajjan for this one\n commands = \"\"\"\\x04\n\n\nsystem-view\nuser-interface aux 0\nauthentication-mode none\nuser-role network-admin\nquit\n\n\"\"\"\n\n key_map = {\n '\\x04': 'ctrl-d',\n ' ': 'spc',\n '-': 'minus',\n '\\n': 'kp_enter'\n }\n\n qemu_commands = [ \"sendkey {}\".format(key_map.get(c) or c) for c in commands ]\n\n for c in qemu_commands:\n self.wait_write(c, wait=\"(qemu)\", con=self.qm)\n # Pace the characters sent via QEMU Monitor\n time.sleep(0.1)\n\n self.logger.debug(\"Done writing to QEMU Monitor\")\n self.logger.debug(\"Switching to line aux0\")\n\n self.tn = telnetlib.Telnet(\"127.0.0.1\", 5000 + self.num)\n\n # run main config!\n self.bootstrap_config()\n # close telnet connection\n self.tn.close()\n # startup time?\n startup_time = datetime.datetime.now() - self.start_time\n self.logger.info(\"Startup complete in: %s\" % startup_time)\n # mark as running\n self.running = True\n return\n\n # no match, if we saw some output from the router it's probably\n # booting, so let's give it some more time\n if res != b'':\n self.logger.trace(\"OUTPUT: %s\" % res.decode())\n # reset spins if we saw some output\n self.spins = 0\n\n self.spins += 1\n\n return",
"def setup(self, tenant, inside_vlan_arg, outside_vlan_arg,\n inside_ip, inside_mask, inside_gw, inside_sec_gw,\n outside_ip, outside_mask, outside_gw, outside_sec_gw,\n interface_in, interface_out):\n LOG.debug(\"asa_setup: %s %d %d %s %s %s %s\",\n tenant, inside_vlan_arg, outside_vlan_arg,\n inside_ip, inside_mask, outside_ip, outside_mask)\n inside_vlan = str(inside_vlan_arg)\n outside_vlan = str(outside_vlan_arg)\n context = tenant\n cmds = [\"conf t\", \"changeto system\"]\n inside_int = interface_in + '.' + inside_vlan\n cmds.append(\"int \" + inside_int)\n cmds.append(\"vlan \" + inside_vlan)\n outside_int = interface_out + '.' + outside_vlan\n cmds.append(\"int \" + outside_int)\n cmds.append(\"vlan \" + outside_vlan)\n cmds.append(\"context \" + context)\n cmds.append(\"allocate-interface \" + inside_int)\n cmds.append(\"allocate-interface \" + outside_int)\n cmds.append(\"config-url disk0:/\" + context + \".cfg\")\n cmds.append(\"write memory\")\n cmds.append(\"changeto context \" + context)\n cmds.append(\"int \" + inside_int)\n cmds.append(\"nameif Inside\")\n cmds.append(\"security-level 100\")\n cmds.append(\"ip address \" + inside_ip + \" \" + inside_mask)\n cmds.append(\"int \" + outside_int)\n cmds.append(\"nameif Outside\")\n cmds.append(\"security-level 0\")\n cmds.append(\"ip address \" + outside_ip + \" \" + outside_mask)\n\n cmds.append(\"router ospf 1\")\n cmds.append(\"network \" + inside_ip + \" \" + inside_mask + \" area 0\")\n cmds.append(\"network \" + outside_ip + \" \" + outside_mask + \" area 0\")\n cmds.append(\"area 0\")\n cmds.append(\"route Outside 0.0.0.0 0.0.0.0 \" + outside_gw + \" 1\")\n cmds.append(\"route Outside 0.0.0.0 0.0.0.0 \" + outside_sec_gw + \" 1\")\n cmds.append(\"end\")\n cmds.append(\"write memory\")\n\n if tenant not in self.tenant_rule:\n self.tenant_rule[tenant] = dict()\n self.tenant_rule[tenant]['rule_lst'] = []\n\n data = {\"commands\": cmds}\n return self.rest_send_cli(data)",
"def set_console_xen(self):\n print \"\"\n self.exec_cmd(\"echo \\\"xvc0\\\" >> %s/etc/securetty\" % self.rep_vhosts_vm) \n if os.path.isfile(\"%s/etc/inittab\" % self.rep_vhosts_vm):\n self.exec_cmd(\"echo \\\"7:2345:respawn:/sbin/getty 38400 xvc0\\\" >> %s/etc/inittab\" % self.rep_vhosts_vm) \n\n if os.path.isfile(\"%s/etc/event.d/tty1\" % self.rep_vhosts_vm):\n self.exec_cmd(\"cp %s/etc/event.d/tty1 %s/etc/event.d/xvc0\" % (self.rep_vhosts_vm,self.rep_vhosts_vm))\n self.exec_cmd(\"sed -i \\\"s@tty1@xvc0@\\\" %s/etc/event.d/xvc0\" % self.rep_vhosts_vm)\n \n if os.path.isfile(\"%s/etc/init/tty1.conf\" % self.rep_vhosts_vm):\n self.exec_cmd(\"cp %s/etc/init/tty1.conf %s/etc/init/xvc0.conf\" % (self.rep_vhosts_vm,self.rep_vhosts_vm))\n self.exec_cmd(\"sed -i \\\"s@tty1@xvc0@\\\" %s/etc/init/xvc0.conf\" % self.rep_vhosts_vm)",
"def setup_scaleio(self, ipaddr, args):\n\n # get the network interface name\n command = 'ip -o link show | grep -v \"lo:\" | grep \"UP\" | awk \\'{print $2}\\''\n rc, interface, error = self.node_execute_command(ipaddr, args.USERNAME, args.PASSWORD, command)\n interface = interface.split(':')[0]\n\n # get the device we will add to scaleio\n # this is the user specified value (on the command line)\n # - if that device is specified in the /etc/raw-devices file\n # or\n # - if the /etc/raw-devices file does not exist\n # otherwise, it is the first device in /etc/raw-devices\n #\n # NOTE: another way to detect not partioned disks is to run\n # partprobe -d -s <device>\n # Unpartitioned devices will return NO output\n siodevice = \"\"\n command = 'grep {} /etc/raw-devices'.format(args.scaleio_device)\n rc, siodevice, error = self.node_execute_command(ipaddr, args.USERNAME, args.PASSWORD, command)\n siodevice=self._get_first_token(siodevice)\n if (siodevice is None or siodevice == \"\"):\n command = 'head -n 1 /etc/raw-devices 2>/dev/null || echo {}'.format(args.scaleio_device)\n rc, siodevice, error = self.node_execute_command(ipaddr, args.USERNAME, args.PASSWORD, command)\n siodevice=self._get_first_token(siodevice)\n if (siodevice is None or siodevice == \"\"):\n print(\"Unable to determine which device to add to scaleio\")\n raise Exception()\n\n\t print(\"Will add {} to ScaleIO\".format(siodevice))\n\n _commands = []\n # install some pre-reqs\n _commands.append(self.ubuntu_only_command(\"apt-add-repository -y -u ppa:ansible/ansible\"))\n _commands.append(self.ubuntu_only_command('apt-get install -y ansible git wget'))\n _commands.append(self.centos_only_command('yum install -y ansible git wget'))\n _commands.append(self.redhat_only_command('yum install -y git wget'))\n _commands.append(self.redhat_only_command('(curl https://bootstrap.pypa.io/get-pip.py | python) && pip install ansible'))\n _commands.append(self.sles_only_command(\"zypper install -y python-setuptools && easy_install pip && pip install paramiko ansible\"))\n # clone the ansible-scaleio playbooks and customize them\n _commands.append('cd /; mkdir git; chmod -R 777 /git')\n _commands.append(\"cd /git && git clone https://github.com/eric-young/ansible-scaleio.git\")\n _commands.append(\"mkdir -p /git/files && mkdir -p /git/temp\")\n _commands.append(\"cd /git/temp && \"\n \"wget -r --no-parent -A '*.tar' {} || true\".format(args.PACKAGE_URL))\n _commands.append(\"cd /git/temp && \"\n \"wget -r --no-parent -A '*.deb' {} || true\".format(args.PACKAGE_URL))\n _commands.append(\"cd /git/temp && \"\n \"wget -r --no-parent -A '*.rpm' {} || true\".format(args.PACKAGE_URL))\n _commands.append(\"cd /git/temp && find . -type f -exec mv {} /git/files \\;\")\n _commands.append(\"rm -rf /git/temp\")\n _commands.append(\"cd /git/ansible-scaleio && cp hosts-3_node hosts\")\n _commands.append(\"cd /git/ansible-scaleio && sed -i 's|NODE0|{}|g' hosts\".format(args.IP[0]))\n _commands.append(\"cd /git/ansible-scaleio && sed -i 's|NODE1|{}|g' hosts\".format(args.IP[1]))\n _commands.append(\"cd /git/ansible-scaleio && sed -i 's|NODE2|{}|g' hosts\".format(args.IP[2]))\n _commands.append(\"cd /git/ansible-scaleio && sed -i 's|PASSWORD|{}|g' hosts\".format(args.PASSWORD))\n _commands.append(\"cd /git/ansible-scaleio/group_vars && sed -i 's|80|{}|g' all\".format(args.gateway_http_port))\n _commands.append(\"cd /git/ansible-scaleio/group_vars && sed -i 's|443|{}|g' all\".format(args.gateway_ssl_port))\n _commands.append(\"cd /git/ansible-scaleio/group_vars && sed -i 's|domain1|{}|g' all\".format(args.domain))\n _commands.append(\"cd /git/ansible-scaleio/group_vars && sed -i 's|pool1|{}|g' all\".format(args.pool))\n _commands.append(\"cd /git/ansible-scaleio/group_vars && sed -i 's|/dev/sdb|{}|g' all\".format(siodevice))\n _commands.append(\"cd /git/ansible-scaleio/group_vars && sed -i 's|eth1|{}|g' all\".format(interface))\n _commands.append(\"cd /git/ansible-scaleio/group_vars && sed -i 's|5_node|3_node|g' all\")\n\n self.node_execute_multiple(ipaddr, args.USERNAME, args.PASSWORD, _commands)\n\n if not args.preponly:\n self.node_execute_command(ipaddr,\n args.USERNAME,\n args.PASSWORD,\n \"cd /git/ansible-scaleio && ansible-playbook -f 1 -i hosts site-no-gui-no-sdc.yml\")\n else:\n print(\"To setup ScaleIO, log onto {} as root and run:\".format(args.IP[0]))\n print(\" \\\"cd /git/ansible-scaleio && ansible-playbook -f 1 -i hosts site-no-gui-no-sdc.yml\\\"\")",
"def initialize_dpdk_framework(node, if1, if2, nic_driver):\n if node[u\"type\"] == NodeType.DUT:\n pci_address1 = Topology.get_interface_pci_addr(node, if1)\n pci_address2 = Topology.get_interface_pci_addr(node, if2)\n\n command = f\"{Constants.REMOTE_FW_DIR}/{Constants.RESOURCES_LIB_SH}\"\\\n f\"/entry/init_dpdk.sh \" \\\n f\"{nic_driver} {pci_address1} {pci_address2}\"\n message = u\"Initialize the DPDK failed!\"\n exec_cmd_no_error(node, command, timeout=600, message=message)",
"def set_interface(self, ifname):\n \n if not self._slave_dhcp_process is None:\n raise Exception('DhcpClientAlreadyStarted')\n \n self._ifname = ifname",
"def __init__(__self__, *,\n mac_address: str,\n name: str):\n pulumi.set(__self__, \"mac_address\", mac_address)\n pulumi.set(__self__, \"name\", name)",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"cat /proc/uptime\", \"hostnamectl\",\n \"cat /etc/os-release\"], None, 'text')",
"def mpii_setup():\n base_command = (\n 'python mpii.py '\n '--train_file %train_file% '\n '--test_file %test_file% '\n '--arch %arch% '\n '--feat_keys %feat_keys% '\n '--out_dir %out_dir% '\n '--learning_rate %learning_rate% '\n '--lstm_hidden_dim %lstm_hidden_dim% '\n '--image_dir /localhome/kwaki/frames '\n '--cuda_device 0 '\n '--hantman_mini_batch=10 '\n '--hantman_perframeloss=WEIGHTED_MSE '\n '--seq_len=5000 '\n '--total_epochs=100 '\n '--hantman_perframe_weight=100.0 '\n '--hantman_struct_weight=1.0 '\n '--hantman_tp=10.0 '\n '--hantman_fp=0.25 '\n '--hantman_fn=20.0 '\n '--reweight --normalize'\n )\n\n # main parameters\n # 'val_file': '/nrs/branson/kwaki/data/20180328_mpiicooking2',\n main_params = {\n 'train_file': '/nrs/branson/kwaki/data/20180328_mpiicooking2/temp_data/hdf5/train.hdf5',\n 'test_file': '/nrs/branson/kwaki/data/20180328_mpiicooking2/temp_data/hdf5/test.hdf5',\n 'arch': 'bidirconcat',\n 'feat_keys': 'vgg',\n 'out_dir': '',\n 'learning_rate': '',\n 'lstm_hidden_dim': ''\n }\n\n # learning_rates = [0.01, 0.001, 0.0001]\n # hidden_dims = [64, 128, 256, 512]\n learning_rates = [0.0001]\n hidden_dims = [256]\n\n output_dir = '/nrs/branson/kwaki/outputs/20180403_mpii_sweep_test'\n # output_dir = '/nrs/branson/kwaki/outputs/20180411_mpii_tests'\n\n return base_command, main_params, learning_rates, hidden_dims, output_dir",
"def bootstrap():\n validate_configurator_version()\n\n # put new mkinitcpio.conf in place\n run(\"mv /etc/mkinitcpio.conf.pacnew /etc/mkinitcpio.conf\")\n sed(\"/etc/mkinitcpio.conf\",\n 'MODULES=\"\"',\n 'MODULES=\"xen-blkfront xen-fbfront xen-kbdfront xen-netfront xen-pcifront xenbus_probe_frontend xenfs\"') # nopep8\n sed(\"/etc/mkinitcpio.conf\",\n 'HOOKS=\"base udev autodetect modconf block filesystems keyboard fsck',\n 'HOOKS=\"base udev block filesystems shutdown autodetect\"')\n\n # upgrade pacakges\n run(\"pacman --noconfirm -Syu\")\n\n # put new pacman.conf in place\n run(\"mv /etc/pacman.conf.pacnew /etc/pacman.conf\")\n\n # install essential packages\n run(\"pacman --noconfirm -S base-devel\")\n run(\"pacman --noconfirm -S curl git rsync\")\n\n # create a user, named 'aur', to safely install AUR packages under fakeroot\n # uid and gid values auto increment from 1000\n # to prevent conficts set the 'aur' user's gid and uid to 902\n run(\"groupadd -g 902 aur && useradd -m -u 902 -g 902 -G wheel aur\")\n\n # allow users in the wheel group to sudo without a password\n uncomment(\"/etc/sudoers\", \"wheel.*NOPASSWD\")\n\n # install yaourt and upgrade non-pacman rackspace installed packages\n sudo(\"rm -rf /home/aur/.builds && mkdir /home/aur/.builds/\", user=\"aur\")\n with cd(\"/home/aur/.builds/\"):\n sudo(\"bash <(curl aur.sh) -si --noconfirm package-query yaourt\", user=\"aur\")\n sudo(\"yaourt --noconfirm -S xe-guest-utilities\", user=\"aur\")\n\n # allow fabric to sftp with contrib.files.put\n # http://stackoverflow.com/questions/10221839/cant-use-fabric-put-is-there-any-server-configuration-needed # nopep8\n # change before reboot because then the sshd config will be reloaded\n # sed(\"/etc/ssh/sshd_config\", \"Subsystem sftp /usr/lib/openssh/sftp-server\",\n # \"Subsystem sftp internal-sftp\")\n\n # systemd\n sed(\"/boot/grub/menu.lst\",\n \"kernel /boot/vmlinuz-linux root=/dev/xvda1 ro console=hvc0\",\n \"kernel /boot/vmlinuz-linux root=/dev/xvda1 ro console=hvc0 init=/usr/lib/systemd/systemd\")\n reboot()\n if not contains(\"/proc/1/comm\", \"systemd\"):\n abort(\"systemd is not installed properly\")\n server = [s for s in env.bootmachine_servers if s.public_ip == env.host][0]\n run(\"hostnamectl set-hostname {0}\".format(server.name))\n run(\"mv /etc/locale.gen.pacnew /etc/locale.gen.conf\")\n uncomment(\"/etc/locale.gen\", \"en_US.UTF-8 UTF-8\")\n uncomment(\"/etc/locale.gen\", \"en_US ISO-8859-1\")\n run(\"locale-gen\")\n run(\"localectl set-locale LANG='en_US.utf8'\")\n run(\"timedatectl set-timezone US/Central\")",
"def __init__(self, pci_fid: str, **kwargs):\n super().__init__(**kwargs)\n self.fid = pci_fid",
"def setup_salt():\n server = [s for s in env.bootmachine_servers if s.public_ip == env.host][0]\n\n if env.host == env.master_server.public_ip:\n run(\"touch /etc/salt/master\")\n append(\"/etc/salt/master\", \"file_roots:\\n base:\\n - {0}\".format(\n settings.REMOTE_STATES_DIR))\n append(\"/etc/salt/master\", \"pillar_roots:\\n base:\\n - {0}\".format(\n settings.REMOTE_PILLARS_DIR))\n run(\"systemctl enable salt-master\")\n run(\"touch /etc/salt/minion\")\n append(\"/etc/salt/minion\", \"master: {0}\".format(env.master_server.private_ip))\n append(\"/etc/salt/minion\", \"id: {0}\".format(server.name))\n append(\"/etc/salt/minion\", \"grains:\\n roles:\")\n for role in server.roles:\n append(\"/etc/salt/minion\", \" - {0}\".format(role))\n run(\"systemctl enable salt-minion\")",
"def configure(args):\n\n emu = Emulator(args,\n cpu='68030',\n frequency=24 * 1000 * 1000)\n # initially only the EEPROM exists; aliased at 0 all the way up to 0xfe000000\n # we only map the low and high aliases, as the intermediates aren't interesting\n emu.add_memory(base=0, size=512 * 1024, writable=False, from_file=args.rom)\n emu.add_memory(base=0xfe000000, size=512 * 1024, writable=False, from_file=args.rom)\n\n emu.add_device(args,\n MC68681,\n address=0xfffff000,\n interrupt=m68k.IRQ_2,\n register_arrangement='16-bit-doubled')\n emu.add_device(args,\n CompactFlash,\n address=0xffffe000,\n register_arrangement='8-bit' if args.cf_width == 8 else '16-bit')\n emu.add_device(args,\n CB030Remap,\n address=0xffff8000)\n emu.add_device(args,\n CB030Ticker,\n address=0xffff9000,\n interrupt=m68k.IRQ_6)\n return emu",
"def init(self):\n self.reset()\n\n self.__interface.send_command('POWER_SETTING')\n self.__interface.send_data(0x37)\n self.__interface.send_data(0x00)\n\n self.__interface.send_command('PANEL_SETTING')\n self.__interface.send_data(0xCF)\n self.__interface.send_data(0x08)\n\n self.__interface.send_command('BOOSTER_SOFT_START')\n self.__interface.send_data(0xc7)\n self.__interface.send_data(0xcc)\n self.__interface.send_data(0x28)\n\n self.__interface.send_command('POWER_ON')\n self.wait_until_idle()\n\n self.__interface.send_command('PLL_CONTROL')\n self.__interface.send_data(0x3c)\n\n self.__interface.send_command('TEMPERATURE_CALIBRATION')\n self.__interface.send_data(0x00)\n\n self.__interface.send_command('VCOM_AND_DATA_INTERVAL_SETTING')\n self.__interface.send_data(0x77)\n\n self.__interface.send_command('TCON_SETTING')\n self.__interface.send_data(0x22)\n\n self.__interface.send_command('TCON_RESOLUTION')\n self.__interface.send_data(0x02) #source 640\n self.__interface.send_data(0x80)\n self.__interface.send_data(0x01) #gate 384\n self.__interface.send_data(0x80)\n\n self.__interface.send_command('VCM_DC_SETTING')\n self.__interface.send_data(0x1E) #decide by LUT file\n\n self.__interface.send_command(0xe5, False) #FLASH MODE\n self.__interface.send_data(0x03)",
"def doInitializeDevice(self):\n try:\n\n if self.serialNumber == \"*\" or self.serialNumber == \".*\":\n self.device = OISpectrometer.matchUniqueUSBDevice( idProduct=self.idProduct)\n else:\n self.device = OISpectrometer.matchUniqueUSBDevice( idProduct=self.idProduct,\n serialNumber=self.serialNumber)\n\n \"\"\" Below are all the USB protocol details. This requires reading\n the USB documentation, the Spectrometer documentation and many other \n details. What follows may sound like gibberish.\n\n There is a single USB Configuration (default) with a single USB Interface \n without alternate settings, so we can use (0,0).\n \"\"\"\n self.device.set_configuration()\n self.configuration = self.device.get_active_configuration()\n self.interface = self.configuration[(0,0)]\n\n \"\"\"\n We are working on the reasonable assumption from the documentation\n that the first input and output endpoints are the main endpoints and the\n second input is the data endpoint. If that is not the case, the subclass can\n simply reassign the endpoints properly in its __init__ function. \n \"\"\"\n for endpoint in self.interface:\n \"\"\" The endpoint address has the 8th bit set to 1 when it is an input.\n We can check with the bitwise operator & (and) 0x80. It will be zero\n if an output and non-zero if an input. \"\"\"\n if endpoint.bEndpointAddress & 0x80 != 0:\n self.inputEndpoints.append(endpoint)\n else:\n self.outputEndpoints.append(endpoint)\n\n\n if len(self.inputEndpoints) >= 2 or len(self.outputEndpoints) > 0:\n \"\"\" We have at least 2 input endpoints and 1 output. We assign the\n endpoints according to the documentation, otherwise\n the subclass will need to assign them.\"\"\"\n self.epCommandOut = self.outputEndpoints[self.epCommandOutIdx]\n self.epMainIn = self.inputEndpoints[self.epMainInIdx]\n self.epSecondaryIn = self.inputEndpoints[self.epSecondaryInIdx]\n self.epParameters = self.inputEndpoints[self.epParametersIdx]\n self.epStatus = self.inputEndpoints[self.epStatusIdx]\n\n self.flushEndpoints()\n self.sendCommand(b'0x01')\n time.sleep(0.1)\n self.getCalibration()\n except Exception as err:\n raise UnableToInitialize(\"Error when initializing device: {0}\".format(err))",
"def ifaces_init(*ifnames):\n for ifname in ifnames:\n _set_eth_admin_state(ifname, schema.InterfaceState.ABSENT)",
"def __init__(self):\n self.hw = dev_hwinfo.device()\n self.ethKey=\"Ethernet\"\n self.ethAllInterfaceName=[]\n dir_path = os.path.dirname(os.path.realpath(__file__))\n self.myDefine = init_define.main()\n self.mPlatform=self.hw.getPlatform()",
"def init_ip(list_of_ips):\n pms = db['phy_mach']\n\n data = []\n id_counter = 1\n\n for ip in list_of_ips:\n info = {}\n\n info['pmid'] = id_counter\n info['vm_count'] = 0\n info['vm_id'] = []\n info['ip'] = ip\n info['uri'] = 'qemu+ssh://'+str(ip)+'/system'\n if ip == '127.0.0.1':\n info['uri'] = 'qemu:///system'\n available = info['available'] = {}\n free = info['free'] = {}\n # TODO: Sun Aug 30 03:32:25 IST 2015 Error Handling.\n conn = libvirt.open(info['uri'])\n available['vcpu'] = free['vcpu'] = conn.getMaxVcpus(None)\n \n mem = conn.getMemoryStats(0) # Returns memory in KiB\n available['memory'] = mem['total']/1024\n \n data.append(info)\n conn.close()\n id_counter += 1\n \n pms.insert(data)",
"def bios_uuid(self, bios_uuid):\n\n self._bios_uuid = bios_uuid",
"def _Install(vm):\n if vm.OS_TYPE not in MOFED_OS_MAPPING:\n raise ValueError('OS type {} not in {}'.format(vm.OS_TYPE,\n sorted(MOFED_OS_MAPPING)))\n driver = MOFED_DRIVER.format(version=FLAGS.mofed_version,\n os=MOFED_OS_MAPPING[vm.OS_TYPE])\n vm.InstallPackages('libdapl2 libmlx4-1')\n try:\n vm.RemoteCommand('curl -fSsL {} | tar -zxpf -'.format(driver))\n except:\n raise errors.Setup.InvalidSetupError('Failed to download {}'.format(driver))\n stdout, _ = vm.RemoteCommand('cd MLNX_OFED_LINUX-* && sudo ./mlnxofedinstall '\n '--force')\n if not regex_util.ExtractExactlyOneMatch(r'Installation passed successfully',\n stdout):\n raise errors.Benchmarks.PrepareException(\n 'Mellanox OpenFabrics driver isn\\'t installed successfully.')\n vm.RemoteCommand('sudo /etc/init.d/openibd restart')\n vm.RemoteCommand(\"sudo sed -i -e 's/# OS.EnableRDMA=y/\"\n \"OS.EnableRDMA=y/g' /etc/waagent.conf\")\n vm.RemoteCommand(\"sudo sed -i -e 's/# OS.UpdateRdmaDriver=y/\"\n \"OS.UpdateRdmaDriver=y/g' /etc/waagent.conf\")\n # https://docs.microsoft.com/en-us/azure/virtual-machines/linux/sizes-hpc#rdma-capable-instances\n vm.RemoteCommand('cat << EOF | sudo tee -a /etc/security/limits.conf\\n'\n '* hard memlock unlimited\\n'\n '* soft memlock unlimited\\n'\n '* hard nofile 65535\\n'\n '* soft nofile 65535\\n'\n 'EOF')",
"def boot(self):\n\t\tmesslen, received = self.socket.send('bootm\\r', 25)\t\t\n\t\treturn None",
"def setup_with_endpoint(self, mac='00:11:22:33:33:33'):\n args = self.get_args()\n self.write_config_file(self.create_config_file(), args)\n\n execute_tool(args, test_mode=True)\n\n ip = '3.4.3.4'\n self.assertFalse(self.verify_remote_site_has_entry(mac, ip, 'intersite-testsuite', 'l3out', 'intersite-testsuite-app-epg'))\n\n time.sleep(2)\n self.add_endpoint(mac, ip, 'intersite-testsuite', 'app', 'epg')\n return mac, ip",
"def get_boot_device(self, task):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n return super(IRMCManagement, self).get_boot_device(task)\n else:\n return super(\n ipmitool.IPMIManagement, self).get_boot_device(task)",
"def setUp(self, mock_paramiko):\n self.installer = ODFEInstaller(\n \"ec2.amazon.com\", \"ec2\", \"ubuntu\", \"./ODFEAMIInstanceKey.pem\", \"1.4.0\", \"7.4.2\"\n )",
"def setup_vm_env(self, driver='igb_uio'):\n if self.env_done is False:\n self.bind_nic_driver(self.dut_ports[:1], driver=\"igb_uio\")\n self.used_dut_port = self.dut_ports[0]\n tester_port = self.tester.get_local_port(self.used_dut_port)\n self.tester_intf = self.tester.get_interface(tester_port)\n self.dut.generate_sriov_vfs_by_port(\n self.used_dut_port, 1, driver=driver)\n self.sriov_vfs_port = self.dut.ports_info[\n self.used_dut_port]['vfs_port']\n for port in self.sriov_vfs_port:\n port.bind_driver(self.vf_driver)\n time.sleep(1)\n self.dut_testpmd = PmdOutput(self.dut)\n time.sleep(1)\n vf0_prop = {'opt_host': self.sriov_vfs_port[0].pci}\n # set up VM0 ENV\n self.vm0 = QEMUKvm(self.dut, 'vm0', 'ddp_gtp')\n self.vm0.set_vm_device(driver=self.vf_assign_method, **vf0_prop)\n try:\n self.vm0_dut = self.vm0.start()\n if self.vm0_dut is None:\n raise Exception(\"Set up VM0 ENV failed!\")\n except Exception as e:\n self.destroy_vm_env()\n raise Exception(e)\n self.vm0_dut_ports = self.vm0_dut.get_ports('any')\n self.vm0_testpmd = PmdOutput(self.vm0_dut)\n self.env_done = True",
"def configure_ipu_system(config, device=\"cpu\"):\n if not (isinstance(config, IpuOptions)):\n raise Exception(\"`config` must be an IpuOptions instance\")\n\n g = ops.Graph()\n with g.as_default():\n with ops.device(device):\n cfg_op = gen_ipu_ops.ipu_configure_hardware(config.SerializeToString())\n\n with session_lib.Session(graph=g) as sess:\n sess.run(cfg_op)",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version|json\", \"show hostname\"], None,\n 'mixed')",
"def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901\n\n hass.data[DOMAIN] = {}\n\n # Parse configuration into a dict of device name to physical address\n # represented as a list of four elements.\n device_aliases = {}\n devices = base_config[DOMAIN].get(CONF_DEVICES, {})\n _LOGGER.debug(\"Parsing config %s\", devices)\n device_aliases.update(parse_mapping(devices))\n _LOGGER.debug(\"Parsed devices: %s\", device_aliases)\n\n platform = base_config[DOMAIN].get(CONF_PLATFORM, SWITCH)\n\n loop = (\n # Create own thread if more than 1 CPU\n hass.loop\n if multiprocessing.cpu_count() < 2\n else None\n )\n host = base_config[DOMAIN].get(CONF_HOST)\n display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME)\n if host:\n adapter = TcpAdapter(host, name=display_name, activate_source=False)\n else:\n adapter = CecAdapter(name=display_name[:12], activate_source=False)\n hdmi_network = HDMINetwork(adapter, loop=loop)\n\n def _adapter_watchdog(now=None):\n _LOGGER.debug(\"Reached _adapter_watchdog\")\n event.call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job)\n if not adapter.initialized:\n _LOGGER.info(\"Adapter not initialized; Trying to restart\")\n hass.bus.fire(EVENT_HDMI_CEC_UNAVAILABLE)\n adapter.init()\n\n _adapter_watchdog_job = HassJob(_adapter_watchdog, cancel_on_shutdown=True)\n\n @callback\n def _async_initialized_callback(*_: Any):\n \"\"\"Add watchdog on initialization.\"\"\"\n return event.async_call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job)\n\n hdmi_network.set_initialized_callback(_async_initialized_callback)\n\n def _volume(call: ServiceCall) -> None:\n \"\"\"Increase/decrease volume and mute/unmute system.\"\"\"\n mute_key_mapping = {\n ATTR_TOGGLE: KEY_MUTE_TOGGLE,\n ATTR_ON: KEY_MUTE_ON,\n ATTR_OFF: KEY_MUTE_OFF,\n }\n for cmd, att in call.data.items():\n if cmd == CMD_UP:\n _process_volume(KEY_VOLUME_UP, att)\n elif cmd == CMD_DOWN:\n _process_volume(KEY_VOLUME_DOWN, att)\n elif cmd == CMD_MUTE:\n hdmi_network.send_command(\n KeyPressCommand(mute_key_mapping[att], dst=ADDR_AUDIOSYSTEM)\n )\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n _LOGGER.info(\"Audio muted\")\n else:\n _LOGGER.warning(\"Unknown command %s\", cmd)\n\n def _process_volume(cmd, att):\n if isinstance(att, (str,)):\n att = att.strip()\n if att == CMD_PRESS:\n hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))\n elif att == CMD_RELEASE:\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n else:\n att = 1 if att == \"\" else int(att)\n for _ in range(0, att):\n hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n\n def _tx(call: ServiceCall) -> None:\n \"\"\"Send CEC command.\"\"\"\n data = call.data\n if ATTR_RAW in data:\n command = CecCommand(data[ATTR_RAW])\n else:\n src = data.get(ATTR_SRC, ADDR_UNREGISTERED)\n dst = data.get(ATTR_DST, ADDR_BROADCAST)\n if ATTR_CMD in data:\n cmd = data[ATTR_CMD]\n else:\n _LOGGER.error(\"Attribute 'cmd' is missing\")\n return\n if ATTR_ATT in data:\n if isinstance(data[ATTR_ATT], (list,)):\n att = data[ATTR_ATT]\n else:\n att = reduce(lambda x, y: f\"{x}:{y:x}\", data[ATTR_ATT])\n else:\n att = \"\"\n command = CecCommand(cmd, dst, src, att)\n hdmi_network.send_command(command)\n\n def _standby(call: ServiceCall) -> None:\n hdmi_network.standby()\n\n def _power_on(call: ServiceCall) -> None:\n hdmi_network.power_on()\n\n def _select_device(call: ServiceCall) -> None:\n \"\"\"Select the active device.\"\"\"\n if not (addr := call.data[ATTR_DEVICE]):\n _LOGGER.error(\"Device not found: %s\", call.data[ATTR_DEVICE])\n return\n if addr in device_aliases:\n addr = device_aliases[addr]\n else:\n entity = hass.states.get(addr)\n _LOGGER.debug(\"Selecting entity %s\", entity)\n if entity is not None:\n addr = entity.attributes[\"physical_address\"]\n _LOGGER.debug(\"Address acquired: %s\", addr)\n if addr is None:\n _LOGGER.error(\n \"Device %s has not physical address\", call.data[ATTR_DEVICE]\n )\n return\n if not isinstance(addr, (PhysicalAddress,)):\n addr = PhysicalAddress(addr)\n hdmi_network.active_source(addr)\n _LOGGER.info(\"Selected %s (%s)\", call.data[ATTR_DEVICE], addr)\n\n def _update(call: ServiceCall) -> None:\n \"\"\"Update if device update is needed.\n\n Called by service, requests CEC network to update data.\n \"\"\"\n hdmi_network.scan()\n\n def _new_device(device):\n \"\"\"Handle new devices which are detected by HDMI network.\"\"\"\n key = f\"{DOMAIN}.{device.name}\"\n hass.data[DOMAIN][key] = device\n ent_platform = base_config[DOMAIN][CONF_TYPES].get(key, platform)\n discovery.load_platform(\n hass,\n ent_platform,\n DOMAIN,\n discovered={ATTR_NEW: [key]},\n hass_config=base_config,\n )\n\n def _shutdown(call):\n hdmi_network.stop()\n\n def _start_cec(callback_event):\n \"\"\"Register services and start HDMI network to watch for devices.\"\"\"\n hass.services.register(\n DOMAIN, SERVICE_SEND_COMMAND, _tx, SERVICE_SEND_COMMAND_SCHEMA\n )\n hass.services.register(\n DOMAIN, SERVICE_VOLUME, _volume, schema=SERVICE_VOLUME_SCHEMA\n )\n hass.services.register(\n DOMAIN,\n SERVICE_UPDATE_DEVICES,\n _update,\n schema=SERVICE_UPDATE_DEVICES_SCHEMA,\n )\n hass.services.register(DOMAIN, SERVICE_POWER_ON, _power_on)\n hass.services.register(DOMAIN, SERVICE_STANDBY, _standby)\n hass.services.register(DOMAIN, SERVICE_SELECT_DEVICE, _select_device)\n\n hdmi_network.set_new_device_callback(_new_device)\n hdmi_network.start()\n\n hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_cec)\n hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)\n return True",
"def _set_mac(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"mac\", rest_name=\"mac\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"mac must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"mac\", rest_name=\"mac\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__mac = t\n if hasattr(self, '_set'):\n self._set()",
"def _use_existing_configuration(self):\n HW_Init(self.ftdi, None)",
"def setUp(self):\n super().setUp()\n for intf in self.send_ifs:\n self.vapi.ip_reassembly_enable_disable(\n sw_if_index=intf.sw_if_index, enable_ip4=True\n )\n self.vapi.ip_reassembly_set(\n timeout_ms=0,\n max_reassemblies=1000,\n max_reassembly_length=1000,\n expire_walk_interval_ms=10,\n )\n self.virtual_sleep(0.25)\n self.vapi.ip_reassembly_set(\n timeout_ms=1000000,\n max_reassemblies=1000,\n max_reassembly_length=1000,\n expire_walk_interval_ms=10000,\n )",
"def test_register_insufficient_sapsf_metadata(self):\n self._configure_testshib_provider(\n identity_provider_type='sap_success_factors',\n metadata_source=TESTSHIB_METADATA_URL,\n other_settings='{\"key_i_dont_need\":\"value_i_also_dont_need\"}',\n )\n # Because we're getting details from the assertion, fall back to the initial set of details.\n self.USER_EMAIL = \"[email protected]\"\n self.USER_NAME = \"Me Myself And I\"\n self.USER_USERNAME = \"myself\"\n self._test_register()",
"def _init_hardware(self):\n return",
"def do_configure_partition(cls, part, source_params, creator, cr_workdir,\n oe_builddir, bootimg_dir, kernel_dir,\n native_sysroot):\n if not cls.__imgBiosObj:\n cls.__instanciateBIOSClass()\n\n bootloader = creator.ks.bootloader\n\n if not bootloader.configfile:\n splash = os.path.join(cr_workdir, \"/hdd/boot/splash.jpg\")\n if os.path.exists(splash):\n splashline = \"menu background splash.jpg\"\n else:\n splashline = \"\"\n\n syslinux_conf = \"\"\n syslinux_conf += \"PROMPT 0\\n\"\n syslinux_conf += \"TIMEOUT \" + str(bootloader.timeout) + \"\\n\"\n syslinux_conf += \"\\n\"\n syslinux_conf += \"ALLOWOPTIONS 1\\n\"\n syslinux_conf += \"\\n\"\n if splashline:\n syslinux_conf += \"%s\\n\" % splashline\n\n syslinux_conf += \"DEFAULT boot\\n\"\n syslinux_conf += \"LABEL boot\\n\"\n syslinux_conf += \" KERNEL mboot.c32\\n\"\n\n # Split the bootloader args at '---' to separate the Xen args\n # from the Linux kernel args.\n # The Xen args here are defaults; overridden by bootloader append.\n xen_args = \"console=com1,vga com1=115200,8n1\"\n kernel_append = \"\"\n if bootloader.append:\n separator_pos = bootloader.append.find('---')\n if separator_pos != -1:\n xen_args = bootloader.append[:separator_pos]\n kernel_append = bootloader.append[separator_pos+3:]\n else:\n kernel_append = bootloader.append\n\n kernel_args = \"label=boot root=%s %s\" % \\\n (creator.rootdev, kernel_append)\n\n syslinux_conf += \" APPEND /xen.gz %s --- /vmlinuz %s\" % \\\n (xen_args, kernel_args)\n\n initrd = source_params.get('initrd')\n if initrd:\n initrds = initrd.split(';')\n for initrd_file in initrds:\n syslinux_conf += \" --- /%s\" % os.path.basename(initrd_file)\n syslinux_conf += \"\\n\"\n\n logger.debug(\"Writing syslinux config %s/hdd/boot/syslinux.cfg\",\n cr_workdir)\n\n hdddir = \"%s/hdd/boot\" % cr_workdir\n install_cmd = \"install -d %s\" % hdddir\n exec_cmd(install_cmd)\n\n cfg = open(\"%s/hdd/boot/syslinux.cfg\" % cr_workdir, \"w\")\n cfg.write(syslinux_conf)\n cfg.close()\n\n else:\n cls.__imgBiosObj.do_configure_partition(part, source_params,\n creator, cr_workdir,\n oe_builddir, bootimg_dir,\n kernel_dir, native_sysroot)",
"def set_mac_address(self, iface):\n if os.path.exists(\"/sys/class/net/%s\" % iface):\n return open(\"/sys/class/net/%s/address\" % iface).read().strip()\n return \"none\"",
"def set_mac_address(self, iface):\n if os.path.exists(\"/sys/class/net/%s\" % iface):\n return open(\"/sys/class/net/%s/address\" % iface).read().strip()\n return \"none\"",
"async def init(self):\n logger.info(\"Init device: %s\", self._serial)\n self._callback(STATUS_INIT)\n\n self._init_binaries()\n self._init_apks()\n await self._init_forwards()\n\n await adb.shell(self._serial, \"/data/local/tmp/atx-agent server --stop\")\n await adb.shell(self._serial, \"/data/local/tmp/atx-agent server --nouia -d\")",
"def setUp(self):\n super().setUp()\n self.vapi.ip_reassembly_enable_disable(\n sw_if_index=self.src_if.sw_if_index,\n enable_ip6=True,\n type=VppEnum.vl_api_ip_reass_type_t.IP_REASS_TYPE_SHALLOW_VIRTUAL,\n )\n self.vapi.ip_reassembly_set(\n timeout_ms=0,\n max_reassemblies=1000,\n max_reassembly_length=1000,\n type=VppEnum.vl_api_ip_reass_type_t.IP_REASS_TYPE_SHALLOW_VIRTUAL,\n expire_walk_interval_ms=10,\n is_ip6=1,\n )\n self.virtual_sleep(0.25)\n self.vapi.ip_reassembly_set(\n timeout_ms=1000000,\n max_reassemblies=1000,\n max_reassembly_length=1000,\n type=VppEnum.vl_api_ip_reass_type_t.IP_REASS_TYPE_SHALLOW_VIRTUAL,\n expire_walk_interval_ms=10000,\n is_ip6=1,\n )",
"def setup_platform(hass, config, add_devices, discovery_info=None) -> None:\n friendly_name = config.get(CONF_FRIENDLY_NAME)\n mac_addr = config.get(CONF_MAC)\n add_devices([Switchmate(mac_addr, friendly_name)], True)",
"def reset_secure_boot_keys(self):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('ResetToDefaultKeys', True)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def setUp(self):\n super().setUp()\n self.vapi.ip_reassembly_enable_disable(\n sw_if_index=self.src_if.sw_if_index, enable_ip6=True\n )\n self.vapi.ip_reassembly_set(\n timeout_ms=0,\n max_reassemblies=1000,\n max_reassembly_length=1000,\n expire_walk_interval_ms=10,\n is_ip6=1,\n )\n self.virtual_sleep(0.25)\n self.vapi.ip_reassembly_set(\n timeout_ms=1000000,\n max_reassemblies=1000,\n max_reassembly_length=1000,\n expire_walk_interval_ms=10000,\n is_ip6=1,\n )\n self.logger.debug(self.vapi.ppcli(\"show ip6-full-reassembly details\"))\n self.logger.debug(self.vapi.ppcli(\"show buffers\"))",
"def Execute(self, mac_address, device_name, release, version, pxe_server='192.168.100.4', pxe_user='root', pxe_pass='SolidF1r3', bash=False, csv=False, debug=False):\n self.ValidateArgs(locals())\n if debug:\n mylog.showDebug()\n else:\n mylog.hideDebug()\n if bash or csv:\n mylog.silence = True\n\n # Content and location for the PXE boot file\n pxe_file_name = '/tftpboot/pxelinux.cfg/01-{}'.format(mac_address.replace(':', '-'))\n pxe_file_contents = textwrap.dedent(\n '''\\\n DEFAULT BootImage\n TIMEOUT 1\n ONTIMEOUT BootImage\n PROMPT 0\n LABEL BootImage\n KERNEL images/fdva/solidfire-fdva-{release}-{version}/casper/vmlinuz\n INITRD images/fdva/solidfire-fdva-{release}-{version}/casper/initrd.lz\n APPEND console=tty0 ip=:::::eth0:dhcp boot=casper vga=791 fetch=ftp://{pxeServer}/images/fdva/solidfire-fdva-{release}-{version}/casper/filesystem.squashfs sf_start_rtfi=1 sf_test_hardware=0 --\n LABEL BootLocal\n localboot 0\n ''').format(release=release, version=version, pxeServer=pxe_server)\n\n # Connect to the PXE server\n try:\n client = libsf.ConnectSsh(pxe_server, pxe_user, pxe_pass)\n except libsf.SfError as e:\n mylog.error(str(e))\n return False\n\n # Write the content to a local temp file and upload that file to the PXE server\n with tempfile.NamedTemporaryFile() as temp:\n mylog.info('PXE config file: {}'.format(pxe_file_name))\n mylog.info('PXE file contents:')\n for line in pxe_file_contents.split('\\n'):\n mylog.raw(line)\n temp.write(pxe_file_contents)\n temp.flush()\n sftp = client.open_sftp()\n sftp.put(temp.name, pxe_file_name)\n sftp.close()\n\n client.close()\n mylog.passed('Successfully wrote PXE config file to PXE server')\n return True",
"def configure_nccl():\n os.environ[\"NCCL_SOCKET_IFNAME\"] = \"ib0\"\n os.environ[\"NCCL_IB_DISABLE\"] = \"1\"\n\n os.environ[\"NCCL_LAUNCH_MODE\"] = \"PARALLEL\"\n os.environ[\"NCCL_IB_HCA\"] = subprocess.getoutput(\n \"cd /sys/class/infiniband/ > /dev/null; for i in mlx5_*; \"\n \"do cat $i/ports/1/gid_attrs/types/* 2>/dev/null \"\n \"| grep v >/dev/null && echo $i ; done; > /dev/null\"\n )\n os.environ[\"NCCL_IB_GID_INDEX\"] = \"3\"\n os.environ[\"NCCL_IB_TC\"] = \"106\"",
"def boot():\r\n print \"\"\"\r\n ###### ## ## ### ## ## ## ## ######## ########\r\n ## ## ## ## ## ## ### ## ## ## ## ## ##\r\n ## #### ## ## #### ## ## ## ## ## ##\r\n ## ## ## ## ## ## ## ## ## ######## ######\r\n ## ## ######### ## #### ## ## ## ## ##\r\n ## ## ## ## ## ## ### ## ## ## ## ##\r\n ###### ## ## ## ## ## ####### ## ## ########\r\n\r\n Version %s-%s\r\n\r\n Multi Purpose Artificial Inelegance Program\r\n Copyright (c) Alexandre Gauthier 2010-2011\r\n All Rights Reserved\r\n \"\"\" % ( constants.VERSION, constants.TAGNAME )\r\n\r\n # Initialize log\r\n # TODO: The values should be read from config file.\r\n log.init_log('cyanure.log', 'DEBUG')\r\n\r\n logger.info(\"Cyanure system init: Version %s (%s)\" % (\r\n constants.VERSION, constants.TAGNAME ))",
"def boot(self):\n\n pass",
"def __init__(__self__, *,\n device_name: Optional[str] = None,\n ebs: Optional['outputs.ImageRecipeEbsInstanceBlockDeviceSpecification'] = None,\n no_device: Optional[str] = None,\n virtual_name: Optional[str] = None):\n if device_name is not None:\n pulumi.set(__self__, \"device_name\", device_name)\n if ebs is not None:\n pulumi.set(__self__, \"ebs\", ebs)\n if no_device is not None:\n pulumi.set(__self__, \"no_device\", no_device)\n if virtual_name is not None:\n pulumi.set(__self__, \"virtual_name\", virtual_name)",
"def Init(self, factory_reset=True):\n # Create a new serial device every time since the serial driver\n # on chameleon board is not very stable.\n result = self.CreateSerialDevice()\n\n if factory_reset:\n # Enter command mode to issue commands.\n # This must happen first, so that other commands work\n result = self.EnterCommandMode() and result\n\n # Do a factory reset to make sure it is in a known initial state.\n # Do the factory reset before proceeding to set parameters below.\n result = self.FactoryReset() and result\n\n # Set HID as the service profile.\n result = self.SetServiceProfileHID() and result\n\n # Set the HID device type.\n result = self.SetHIDType(self.device_type) and result\n\n # Set the default class of service.\n result = self.SetDefaultClassOfService() and result\n\n # Set the class of device (CoD) according to the hid device type.\n result = self.SetClassOfDevice(self.device_type) and result\n\n # Set authentication to the specified mode.\n if self.authentication_mode != PeripheralKit.OPEN_MODE:\n result = self.SetAuthenticationMode(self.authentication_mode)\\\n and result\n\n # Set RN-42 to work as a slave.\n result = self.SetSlaveMode() and result\n\n # Set a temporary pin code for testing purpose.\n # Only do this when we want to use a pin code.\n if self.authentication_mode == PeripheralKit.PIN_CODE_MODE:\n result = self.SetPinCode(self.TMP_PIN_CODE) and result\n\n # Enable the connection status message so that we could get the message\n # of connection/disconnection status.\n result = self.EnableConnectionStatusMessage() and result\n\n if not isinstance(self._kit, nRF52):\n # Reboot so that the configurations above take effect.\n result = self.Reboot() and result\n\n # Enter command mode again after reboot.\n result = self.EnterCommandMode() and result\n time.sleep(self.INIT_SLEEP_SECS)\n\n logging.info('A bluetooth HID \"%s\" device is connected.', self.device_type)\n return result",
"def _setup_device(self):\n if nfc.nfc_device_set_property_bool(self.__device, nfc.NP_ACTIVATE_CRYPTO1, True) < 0:\n raise Exception(\"Error setting Crypto1 enabled\")\n if nfc.nfc_device_set_property_bool(self.__device, nfc.NP_INFINITE_SELECT, False) < 0:\n raise Exception(\"Error setting Single Select option\")\n if nfc.nfc_device_set_property_bool(self.__device, nfc.NP_AUTO_ISO14443_4, False) < 0:\n raise Exception(\"Error setting No Auto ISO14443-A jiggery pokery\")\n if nfc.nfc_device_set_property_bool(self.__device, nfc.NP_HANDLE_PARITY, True) < 0:\n raise Exception(\"Error setting Easy Framing property\")",
"def _generate_machine_id(self):\r\n mach_id = \"machine_\"\r\n try:\r\n gws = netifaces.gateways() # get all gateways\r\n default = gws['default'] # get the default gw\r\n adapter = default[2][1] # get the adapter identifier\r\n real_adapter = netifaces.ifaddresses(adapter) # get the adapter\r\n link_info = real_adapter[netifaces.AF_LINK]\r\n mac = link_info[0]['addr']\r\n mac = re.sub('[:]', '', mac)\r\n except:\r\n mac = \"unsup\"\r\n self.logger.error(\"Getting mac of internet card is not supported, needs netifaces >= 0.10\")\r\n self.machine_id = mach_id + mac",
"def set_boot_device(self, device, persistent=False):\n\n operation = \"set_boot_device\"\n try:\n self.sp_manager.create_boot_policy()\n self.sp_manager.set_boot_device(device)\n\n except UcsException as ex:\n raise exception.UcsOperationError(operation=operation, error=ex)"
] | [
"0.6869196",
"0.6160965",
"0.5724841",
"0.56476253",
"0.5574767",
"0.5545499",
"0.54519165",
"0.5236378",
"0.51754254",
"0.5034209",
"0.5028963",
"0.49785176",
"0.49782223",
"0.49486035",
"0.49466297",
"0.49353293",
"0.4927631",
"0.49271697",
"0.4922526",
"0.49185145",
"0.4895232",
"0.48668995",
"0.48619774",
"0.48566747",
"0.4855276",
"0.4855276",
"0.4834752",
"0.48255107",
"0.48132867",
"0.48129967",
"0.48055586",
"0.48052055",
"0.47544578",
"0.47432265",
"0.47405043",
"0.47383633",
"0.47185886",
"0.4713861",
"0.47047207",
"0.4700002",
"0.4688584",
"0.46689627",
"0.46649447",
"0.4663028",
"0.4660648",
"0.46494806",
"0.46476027",
"0.46352345",
"0.46328792",
"0.46325648",
"0.46124828",
"0.46107125",
"0.46088594",
"0.46058255",
"0.4595942",
"0.4591314",
"0.4590285",
"0.45813885",
"0.45770967",
"0.45745593",
"0.45730808",
"0.45708767",
"0.45669776",
"0.4563916",
"0.45632243",
"0.45517132",
"0.45504782",
"0.45501205",
"0.45454",
"0.45418882",
"0.45380667",
"0.45373803",
"0.45346928",
"0.4534514",
"0.45271906",
"0.4522563",
"0.45206702",
"0.4515095",
"0.45121568",
"0.45083743",
"0.45071343",
"0.4505022",
"0.4496312",
"0.44940913",
"0.44855195",
"0.44855195",
"0.4484607",
"0.4484602",
"0.44845355",
"0.44805557",
"0.44801196",
"0.44722018",
"0.44703436",
"0.44667107",
"0.44597998",
"0.4459744",
"0.4459307",
"0.44568774",
"0.4453226",
"0.4451042"
] | 0.79104954 | 0 |
Disable iscsi boot option in uefi boot mode. | def unset_iscsi_boot_info(self, mac):
if(self._is_boot_mode_uefi() is True):
iscsi_info = {'iSCSIBootEnable': 'Disabled'}
self._change_iscsi_settings(mac.upper(), iscsi_info)
else:
msg = 'iscsi boot is not supported in the BIOS boot mode'
raise exception.IloCommandNotSupportedInBiosError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def reset_secure_boot_keys(self):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('ResetToDefaultKeys', True)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def test_patch_bios_boot_mode(self):\n pass",
"def test_update_bios_boot_mode(self):\n pass",
"def safe_boot_disabled(self, safe_boot_disabled):\n\n self._safe_boot_disabled = safe_boot_disabled",
"def set_boot_device(self, task, device, persistent=False):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified.\") % device)\n\n uefi_mode = (\n boot_mode_utils.get_boot_mode(task.node) == 'uefi')\n\n # disable 60 secs timer\n timeout_disable = \"0x00 0x08 0x03 0x08\"\n ipmitool.send_raw(task, timeout_disable)\n\n # note(naohirot):\n # Set System Boot Options : ipmi cmd '0x08', bootparam '0x05'\n #\n # $ ipmitool raw 0x00 0x08 0x05 data1 data2 0x00 0x00 0x00\n #\n # data1 : '0xe0' persistent + uefi\n # '0xc0' persistent + bios\n # '0xa0' next only + uefi\n # '0x80' next only + bios\n # data2 : boot device defined in the dict _BOOTPARAM5_DATA2\n\n bootparam5 = '0x00 0x08 0x05 %s %s 0x00 0x00 0x00'\n if persistent:\n data1 = '0xe0' if uefi_mode else '0xc0'\n else:\n data1 = '0xa0' if uefi_mode else '0x80'\n data2 = _BOOTPARAM5_DATA2[device]\n\n cmd8 = bootparam5 % (data1, data2)\n ipmitool.send_raw(task, cmd8)\n else:\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified. \"\n \"Current iRMC firmware condition doesn't support IPMI \"\n \"but Redfish.\") % device)\n super(ipmitool.IPMIManagement, self).set_boot_device(\n task, device, persistent)",
"def unconfigure_global_dual_active_recovery_reload_disable(device):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Enables dual-active recovery-reload\n command_list = ['stackwise-virtual']\n command_list.append(f'no dual-active recovery-reload-disable')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to Disable global stackwise-virtual dual-active recovery-reload')\n return output",
"def safe_boot_disabled(self):\n return self._safe_boot_disabled",
"def configure_global_dual_active_recovery_reload_disable(device):\n # build a list of commands to send\n # Add stackwise-virtual as first element in the list\n # Disables dual-active recovery-reload\n command_list = ['stackwise-virtual']\n command_list.append(f'dual-active recovery-reload-disable')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to Enable global stackwise-virtual dual-active recovery-reload')\n return output",
"def _DisableRootFsVerification(self):\n # 2 and 4 are the kernel partitions.\n for partition in [2, 4]:\n self.RunCmdOnDevice(['/usr/share/vboot/bin/make_dev_ssd.sh',\n '--partitions', str(partition),\n '--remove_rootfs_verification', '--force'])\n\n # Restart, wait a bit, and re-establish the SSH master connection.\n # We need to close the connection gracefully, then run the shutdown command\n # without using a master connection. port_forward=True bypasses the master\n # connection.\n self.CloseConnection()\n self.RunCmdOnDevice(['reboot'], port_forward=True)\n time.sleep(30)\n self.OpenConnection()",
"def set_boot_mode(self, task, mode):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='set_boot_mode')",
"def setOff(self, command):\r\n self.setDriver('ST', 0)",
"def disable_irq() -> int:",
"def bdev_nvme_disable_controller(client, name, cntlid):\n\n params = {'name': name}\n\n if cntlid is not None:\n params['cntlid'] = cntlid\n\n return client.call('bdev_nvme_disable_controller', params)",
"def systemOff():\n # Updated 11/19/16\n I2C.write_byte_data(Valve_bus, pinOut_O, 0x00 )\n I2C.write_byte_data(Pump_Mag_bus, pinOut_O, 0x00)",
"def _remove_bios_config(task, reboot_flag=False):\n task.node.del_driver_internal_info('irmc_bios_config')\n # NOTE(tiendc): If reboot flag is raised, then the BM will\n # reboot and cause a bug if the next clean step is in-band.\n # See https://storyboard.openstack.org/#!/story/2002731\n if reboot_flag:\n task.node.set_driver_internal_info('cleaning_reboot', True)\n task.node.save()",
"def _nixie_disable():\n # type: () -> None\n GPIO.output(NIXIE_nOE, GPIO.HIGH)",
"def disable():\n if _status_apf():\n return __apf_cmd(\"-f\")",
"def supported_boot_interfaces(self):\n return [fake.FakeBoot] + super().supported_boot_interfaces",
"def soft_shutdown(self, wait_for_board_off=False):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def switch_off(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def set_secure_boot_mode(self, secure_boot_enable):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('SecureBootEnable',\n secure_boot_enable)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def set_pending_boot_mode(self, boot_mode):\n boot_mode = boot_mode.lower()\n if boot_mode not in ['uefi', 'legacy']:\n msg = 'Invalid Boot mode specified'\n raise exception.IloInvalidInputError(msg)\n\n boot_properties = {'BootMode': boot_mode}\n\n if boot_mode == 'legacy':\n boot_properties['BootMode'] = 'LegacyBios'\n else:\n # If Boot Mode is 'Uefi' set the UEFIOptimizedBoot first.\n boot_properties['UefiOptimizedBoot'] = \"Enabled\"\n\n # Change the Boot Mode\n self._change_bios_setting(boot_properties)",
"def turn_off(self):\n self._state = False\n if(self._device['type'] == '_DT-PLUG' or self._device['type'] == '_THIMR'):\n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"op\":0 }', 5)\n if(self._device['type'] == '_REALY2' or self._device['type'] == '_REALY4'): \n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"'+ self._data_key +'\":0 }', 5)",
"def powerOff(self):\n self._sendCommand(self.SONY_CMD_ExtBackupCommunicator_ForcePowerOff, bufferSize=0)",
"def test_get_bios_boot_mode_by_moid(self):\n pass",
"async def async_turn_off(self):\n path = \"/interface\"\n param = \"default-name\"\n if \"-\" in self._data[\"port-mac-address\"]:\n param = \"name\"\n value = self._data[param]\n mod_param = \"disabled\"\n mod_value = True\n self._ctrl.set_value(path, param, value, mod_param, mod_value)\n\n if self._data[\"poe-out\"] == \"auto-on\":\n path = \"/interface/ethernet\"\n self._ctrl.set_value(path, param, value, \"poe-out\", \"off\")\n\n await self._ctrl.async_update()",
"def get_boot_device(self, task):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n return super(IRMCManagement, self).get_boot_device(task)\n else:\n return super(\n ipmitool.IPMIManagement, self).get_boot_device(task)",
"def _doDisableRegulation(self):\n self._cmdRegulOff()",
"def get_boot_mode(self, task):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='get_boot_mode')",
"def disable_weapon(self, weapon):\n if weapon == \"nothing\":\n weapon = 0\n elif weapon == \"main\":\n weapon = 1\n elif weapon == \"secondary\":\n weapon = 2\n elif weapon == \"everything\":\n weapon = 3\n cmd = '{}testDisableWeaponMode {}'.format(self.console, weapon)\n self.write_command(cmd)",
"def disable_pki(client, mount_point=\"pki\"):\n client.sys.disable_secrets_engine(mount_point)",
"def get_boot_mode(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def bootloader() -> NoReturn:",
"def unconfigure_global_stackwise_virtual(device):\n # Single command 'no stackwise-virtual' will remove configuration\n command = 'no stackwise-virtual'\n try:\n output = device.configure(command)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to remove global stackwise-virtual')\n return output",
"def DisableByRunIf(self):\n self.run_if = 'False'",
"def unload_kernel_module(params) -> None:\n print(\"Unloading kernel module...\")\n if os.system(\"modprobe -r v4l2loopback >/dev/null 2>&1\") == 0:\n print(\" Success !\")\n else:\n print(\" Failure !\")",
"def request_shutdown(self, kernel_id, restart=False):",
"def disable_idle_states(self):\n # Disable C1 (cluster shutdown).\n self.set_sysfile_value('/sys/devices/system/cpu/cpu0/cpuidle/state1/disable', 1, verify=False)\n # Disable C0.\n self.set_sysfile_value('/sys/kernel/debug/idle_debug/enable_idle', 0xFF, verify=False)",
"def soft_shutdown_cmd(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def is_allow_select_boot_device(self):\n\t\treturn bool(call_sdk_function('PrlVmCfg_IsAllowSelectBootDevice', self.handle))",
"def turn_test_mode_off_by_default(test_mode_off):",
"def allOff():\n # Get/set special slice IDs\n root_xid = bwlimit.get_xid(\"root\")\n default_xid = bwlimit.get_xid(\"default\")\n kernelhtbs = gethtbs(root_xid, default_xid)\n if len(kernelhtbs):\n logger.log(\"bwmon: Disabling all running HTBs.\")\n for htb in kernelhtbs.keys(): bwlimit.off(htb, dev = dev_default)",
"def get_supported_boot_devices(self, task):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n return super(IRMCManagement, self).get_supported_boot_devices(task)\n else:\n return super(ipmitool.IPMIManagement,\n self).get_supported_boot_devices(task)",
"def deactivate(self):\n if self.parents[0].type == 'dm-multipath':\n devmap = block.getMap(major=self.major, minor=self.minor)\n if devmap:\n try:\n block.removeDeviceMap(devmap)\n except Exception as e:\n raise errors.DeviceTeardownError(\"failed to tear down device-mapper partition %s: %s\" % (self.name, e))\n udev.settle()",
"def test_configure_lvm_storage_unforced_remove_default(self, reduce_lvm):\n devices = ['/dev/fakevbd']\n cinder_utils.configure_lvm_storage(devices, 'test', False, True)\n reduce_lvm.assert_called_with('test')",
"def turn_aux_heat_off(self):\n self.set_operation_mode(STATE_HEAT)",
"def _setBootable(self, bootable):\n if self.partedPartition:\n if arch.isS390():\n return\n if self.flagAvailable(parted.PARTITION_BOOT):\n if bootable:\n self.setFlag(parted.PARTITION_BOOT)\n else:\n self.unsetFlag(parted.PARTITION_BOOT)\n else:\n raise errors.DeviceError(\"boot flag not available for this partition\", self.name)\n\n self._bootable = bootable\n else:\n self.req_bootable = bootable",
"def scp_disable(task):\n cmd = \"no ip scp server enable\"\n task.run(task=netmiko_send_config, config_commands=cmd)\n task.run(task=netmiko_save_config)\n c_print(f\"*** {task.host}: SCP has been disabled ***\")",
"def unselect_and_select_boot_order():\n # Unselect and select the \"Manage boot order\" option\n selenium2lib = ui_lib.get_s2l()\n status = True\n logger._log_to_console_and_log_file(\"\")\n logger._log_to_console_and_log_file(\"Unselecting and selecting the 'Manage boot order' checkbox\")\n ui_lib.wait_for_checkbox_and_unselect(FusionServerProfilesPage.ID_CHKBOX_MANAGE_BOOT_ORDER)\n if not ui_lib.wait_for_element_visible(\"name=%s\" % \"CD\") and ui_lib.wait_for_element_visible(\"name=%s\" % \"USB\") and ui_lib.wait_for_element_visible(\"name=%s\" % \"HardDisk\"):\n logger._log_to_console_and_log_file(\"- 'Manage boot order' items were correctly hidden\")\n else:\n logger._log_to_console_and_log_file(\"- 'Manage boot order' items are still being displayed\")\n selenium2lib.capture_page_screenshot()\n status = False\n ui_lib.wait_for_checkbox_and_select(FusionServerProfilesPage.ID_CHKBOX_MANAGE_BOOT_ORDER)\n if ui_lib.wait_for_element_visible(\"name=%s\" % \"CD\") and ui_lib.wait_for_element_visible(\"name=%s\" % \"USB\") and ui_lib.wait_for_element_visible(\"name=%s\" % \"HardDisk\"):\n logger._log_to_console_and_log_file(\"- 'Manage boot order' items were correctly displayed\")\n else:\n logger._log_to_console_and_log_file(\"- 'Manage boot order' items were NOT displayed\")\n selenium2lib.capture_page_screenshot()\n status = False\n return status",
"def hard_shutdown(self, wait_for_board_off=False):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def disable_setup(self):\n self.high_ver_entry.config(state=\"disabled\")\n self.low_ver_entry.config(state=\"disabled\")\n self.left_hor_entry.config(state=\"disabled\")\n self.right_hor_entry.config(state=\"disabled\")",
"def setBootargs(self):\n\t\tif self.testType == 'auto' or self.testType == 'manual':\n\t\t\tself.bootargs = self.settings.getKeyValue('nfs.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<nfsroot>', self.nfsroot)\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\n\t\telse:\n\t\t\tself.bootargs = self.settings.getKeyValue('ramdisk.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\t\t\n\t\treturn None",
"def set_bios_bootmode_uefi(ip, login_account, login_password, system_id):\n result = {}\n login_host = \"https://\" + ip\n try:\n # Connect using the BMC address, account name, and password\n # Create a REDFISH object\n REDFISH_OBJ = redfish.redfish_client(base_url=login_host, username=login_account, timeout=utils.g_timeout,\n password=login_password, default_prefix='/redfish/v1', cafile=utils.g_CAFILE)\n # Login into the server and create a session\n REDFISH_OBJ.login(auth=utils.g_AUTH)\n except:\n traceback.print_exc()\n result = {'ret': False, 'msg': \"Please check the username, password, IP is correct\"}\n return result\n\n # GET the ComputerSystem resource\n system = utils.get_system_url(\"/redfish/v1\", system_id, REDFISH_OBJ)\n if not system:\n result = {'ret': False, 'msg': \"This system id is not exist or system member is None\"}\n REDFISH_OBJ.logout()\n return result\n for i in range(len(system)):\n system_url = system[i]\n response_system_url = REDFISH_OBJ.get(system_url, None)\n if response_system_url.status != 200:\n error_message = utils.get_extended_error(response_system_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (system_url, response_system_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else:\n # Get the bios resource\n bios_url = response_system_url.dict['Bios']['@odata.id']\n response_bios_url = REDFISH_OBJ.get(bios_url, None)\n if response_bios_url.status != 200:\n error_message = utils.get_extended_error(response_bios_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (bios_url, response_bios_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else: # Get bios success\n # Seek boot mode from bios attributes\n attribute_bootmode = None\n attributes = response_bios_url.dict['Attributes']\n for attribute in attributes:\n if attribute == \"BootMode\" or attribute == \"SystemBootMode\":\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"SystemBootMode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"Boot\" in attribute and \"Mode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n result = {'ret': False, 'msg': \"Can not found BootMode attribute in response of url %s\" %(bios_url)}\n REDFISH_OBJ.logout()\n return result\n\n # Get boot mode setting guide from bios registry\n WarningText = None\n ValueName = None\n bios_registry_url = \"/redfish/v1/Registries/\" + response_bios_url.dict['AttributeRegistry']\n response_bios_registry_url = REDFISH_OBJ.get(bios_registry_url, None)\n if response_bios_registry_url.status == 200:\n locations = response_bios_registry_url.dict['Location']\n bios_regjson_url = None\n for location in locations:\n if 'en' in location['Language']:\n bios_regjson_url = location['Uri']\n break\n if bios_regjson_url:\n response_bios_regjson_url = REDFISH_OBJ.get(bios_regjson_url, None)\n if response_bios_regjson_url.status == 200:\n regattributes = response_bios_regjson_url.dict['RegistryEntries']['Attributes']\n for regattribute in regattributes:\n if regattribute['AttributeName'] == attribute_bootmode:\n if 'WarningText' in regattribute:\n WarningText = regattribute['WarningText']\n for value in regattribute['Value']:\n if 'legacy' in value['ValueName'].lower():\n continue\n if 'uefi' in value['ValueName'].lower():\n ValueName = value['ValueName']\n break\n ValueName = value['ValueName']\n break\n \n # Perform patch to set\n if ValueName == None:\n ValueName = \"UEFIMode\"\n pending_url = response_bios_url.dict['@Redfish.Settings']['SettingsObject']['@odata.id']\n parameter = {attribute_bootmode: ValueName}\n attribute = {\"Attributes\": parameter}\n headers = {\"If-Match\": '*'}\n response_pending_url = REDFISH_OBJ.patch(pending_url, body=attribute, headers=headers)\n if response_pending_url.status in [200,204]:\n if WarningText:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful. WarningText: %s'% (WarningText) }\n else:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful'}\n elif response_pending_url.status == 405:\n result = {'ret': False, 'msg': \"Resource not supported\"}\n else:\n error_message = utils.get_extended_error(response_pending_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (\n pending_url, response_pending_url.status, error_message)}\n\n # Logout of the current session\n try:\n REDFISH_OBJ.logout()\n except:\n pass\n return result",
"def set_nic_legacy_boot_protocol_none(self, nic_id):\n return self._nic_cfg.set_nic_legacy_boot_protocol(nic_id, 'NONE')",
"def interrupt_kernel(self, kernel_id):",
"def disable_output(self):\n\n self.__rtcconfig = self.__helper.updatebyte(self.__rtcconfig, 7, 0)\n self.__rtcconfig = self.__helper.updatebyte(self.__rtcconfig, 4, 0)\n self.__bus.write_byte_data(\n self.__rtcaddress, self.CONTROL, self.__rtcconfig)\n return",
"def unconfigure_service_password_encryption(device):\n\n try:\n device.configure(\"no service password-encryption\")\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not unconfigure service password encryption\"\n )",
"def set_boot_options(self, image_name, **vendor_specifics):\n current_boot = self.show(\"show running-config | inc ^boot system \")\n file_system = vendor_specifics.get(\"file_system\")\n if file_system is None:\n file_system = self._get_file_system()\n\n file_system_files = self.show(f\"dir {file_system}\")\n if re.search(image_name, file_system_files) is None:\n log.error(\"Host %s: File not found error for image %s.\", self.host, image_name)\n raise NTCFileNotFoundError(\n # TODO: Update to use hostname\n hostname=self.host,\n file=image_name,\n directory=file_system,\n )\n\n current_images = current_boot.splitlines()\n commands_to_exec = [f\"no {image}\" for image in current_images]\n commands_to_exec.append(f\"boot system {file_system}/{image_name}\")\n self.config(commands_to_exec)\n\n self.save()\n if self.boot_options[\"sys\"] != image_name:\n log.error(\"Host %s: Setting boot command did not yield expected results\", self.host)\n raise CommandError(\n command=f\"boot system {file_system}/{image_name}\",\n message=\"Setting boot command did not yield expected results\",\n )\n\n log.info(\"Host %s: boot options have been set to %s\", self.host, image_name)",
"def power_off(self, default=False):\n if default:\n return self.exec_command('SupplyPowerDefault = 0')\n return self.exec_command('SupplyPower = 0')",
"def disable_cl2(self):\n self.write_versa5(0x31,0x80) ## Disable divider output for clock2\n self.write_versa5(0x63,0x00) ## Disable clock2 output",
"def _disable_wifi_ap(self):\n call(['systemctl', 'disable', 'hostapd', ])\n call(['systemctl', 'disable', 'dnsmasq', ])\n\n context = self._get_ap_context()\n self._write_system_template('/etc/network/interfaces', 'interfaces.conf', context)\n self._write_system_template('/etc/dhcpcd.conf', 'dhcpcd.conf', context)",
"def get_boot_mode():\n boot_mode = 'Legacy'\n try:\n reg_key = winreg.OpenKey(\n winreg.HKEY_LOCAL_MACHINE, r'System\\CurrentControlSet\\Control')\n reg_value = winreg.QueryValueEx(reg_key, 'PEFirmwareType')[0]\n if reg_value == 2:\n boot_mode = 'UEFI'\n except:\n boot_mode = 'Unknown'\n\n return boot_mode",
"def set_secure_boot_state(self, task, state):\n return irmc_common.set_secure_boot_mode(task.node, state)",
"def shutdown_kernel(self, kernel_id, now=False, restart=False):",
"def disable():\n request = dict(id='gbn')\n _gbn_disable(request)",
"async def test_set_aux_heat_off(opp):\n await common.async_set_aux_heat(opp, False, ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_CLIMATE)\n assert state.attributes.get(ATTR_AUX_HEAT) == STATE_OFF",
"def test_get_bios_boot_mode_list(self):\n pass",
"def unconfigure_stackwise_virtual_dual_active_interfaces(device, dad_links):\n # build a list of commands to send\n command_list = []\n output = ''\n for interface in dad_links:\n command_list.append(f'interface {interface}')\n command_list.append(f'no stackwise-virtual dual-active-detection')\n try:\n output = device.configure(command_list)\n except SubCommandFailure:\n raise SubCommandFailure('Failed to unconfigure stackwise-virtual dual-active-detection interfaces')\n return output",
"def enable_secure_boot(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_secure_boot\")",
"def _is_boot_mode_uefi(self):\n boot_mode = self.get_current_boot_mode()\n if boot_mode == 'UEFI':\n return True\n else:\n return False",
"def create_boot_dev(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_CreateBootDev', self.handle))",
"def nfvi_disable_compute_host_services(host_uuid, host_name, host_personality,\n callback):\n cmd_id = _compute_plugin.invoke_plugin('disable_host_services',\n host_uuid, host_name,\n host_personality,\n callback=callback)\n return cmd_id",
"def exit_sleep_mode(self):\n self.execute(SdpI2cCmdExitSleepMode())",
"def disable_auto_os_update_for_packagekit(self):\n self.composite_logger.log(\"Disabling auto OS updates using packagekit\")\n self.__init_auto_update_for_packagekit()\n\n self.backup_image_default_patch_configuration_if_not_exists()\n\n if not self.is_auto_update_service_installed(self.packagekit_install_check_cmd):\n self.composite_logger.log_debug(\"Cannot disable as packagekit is not installed on the machine\")\n return\n\n self.composite_logger.log_debug(\"Preemptively disabling auto OS updates using packagekit\")\n #todo: uncomment after finding the correct value\n # self.update_os_patch_configuration_sub_setting(self.download_updates_identifier_text, \"false\", self.packagekit_config_pattern_match_text)\n self.update_os_patch_configuration_sub_setting(self.apply_updates_identifier_text, \"false\", self.packagekit_config_pattern_match_text)\n self.disable_auto_update_on_reboot(self.packagekit_disable_on_reboot_cmd)\n\n self.composite_logger.log(\"Successfully disabled auto OS updates using packagekit\")",
"def disable(self, subsystem=False):\n self.__dict__[\"enabled\"] = False\n\n if subsystem:\n self.subsystem.disable()",
"async def async_turn_off(self, **kwargs):\n self._wrap_device.device.set_duct_zone(self._zone, False)",
"def power_off(self):\n LOG.info('Powering off system')\n self._run_shutdown_command('poweroff')",
"def ethernet_off(self):\n if not self.healthy:\n self.health_check()\n if not self._ethernet_switch:\n raise errors.CapabilityNotReadyError(\n device_name=self._device_name,\n msg=\"Not set up for ethernet switching.\")\n self._ethernet_switch.switch_power.power_off(self.ethernet_port_number)",
"def stop_salt():\n with fabric_settings(warn_only=True):\n if env.host == env.master_server.public_ip:\n sudo(\"systemctl stop salt-master\")\n sudo(\"systemctl stop salt-minion\")",
"def off(config: dict):\n switch_device(config, config[\"inching\"], \"off\")",
"def turn_off(self, **kwargs: Any) -> None:\n if (\n DPCODE_LIGHT in self.tuya_device.status\n and DPCODE_SWITCH not in self.tuya_device.status\n ):\n commands = [{\"code\": DPCODE_LIGHT, \"value\": False}]\n else:\n commands = [{\"code\": DPCODE_SWITCH, \"value\": False}]\n self._send_command(commands)",
"def flashUboot(self):\n\t\tif self.settings.getKeyValue('flash.uboot?') == 'y':\n\t\t\tloadAddress = self.settings.getKeyValue('u-boot.flash.address')\n\t\t\tcmd = self.settings.getKeyValue('u-boot.load.command')\n\t\t\tcmd = cmd.replace('<u-boot>', 'u-boot.bin.12x.2430')\n\t\t\tself.socket.send(cmd, 5)\n\t\t\t#self.socket.send('protect off 1:0-1\\r', 2)\n\t\t\t#self.socket.send('erase 1:0-1\\r', 2)\n\t\t\t#self.socket.send('cp.b 80000000 %s 2ffff\\r' % loadAddress)\n\t\t\treturn None\n\t\t\t#cmd = cmd.replace('<u-bootloadadress>', self.u-bootloadaddress)",
"def disable(ctx):\n config_db = ConfigDBConnector()\n config_db.connect()\n config_db.mod_entry(\"NAT_GLOBAL\", \"Values\", {\"admin_mode\": \"disabled\"})",
"def set_allow_select_boot_device(self, bAllowed):\n\t\tcall_sdk_function('PrlVmCfg_SetAllowSelectBootDevice', self.handle, bAllowed)",
"def disable_discovery(self):",
"def off(self):\n status = self.dev.ctrl_transfer(0x40, 0x01, 0x0001, 0x20, [])\n if status == 0:\n self.ev.clear()\n return (status == 0)",
"def disable_auto_os_update(self):\n try:\n self.composite_logger.log(\"Disabling auto OS updates in all identified services...\")\n self.disable_auto_os_update_for_yum_cron()\n self.disable_auto_os_update_for_dnf_automatic()\n self.disable_auto_os_update_for_packagekit()\n self.composite_logger.log_debug(\"Successfully disabled auto OS updates\")\n\n except Exception as error:\n self.composite_logger.log_error(\"Could not disable auto OS updates. [Error={0}]\".format(repr(error)))\n raise",
"def test_no_overprovision(self):\n command_line = (\n self._MENU + [self._POOLNAME] + self._DEVICES + [\"--no-overprovision\"]\n )\n TEST_RUNNER(command_line)",
"def load_kernel_module(params) -> None:\n print(\"Loading kernel module...\")\n os.system(\"modprobe -r v4l2loopback >/dev/null 2>&1\")\n cmd = \"modprobe v4l2loopback devices=1 video_nr=\" + params['loopback_nr'] + \\\n \" card_label=\" + params['loopback_name'] + \\\n \" exclusive_caps=\" + params['loopback_exclusive'] + \" >/dev/null 2>&1\"\n if os.system(cmd) == 0:\n print(\" Success !\")\n else:\n print(\" Failure !\")",
"def off(self):\n print(f\"RF {self.name} off\")\n self.status(False)",
"def unconfigure_enable_password(device,secret=True,privilege=None):\n cmd=\"no enable\"\n if secret :\n cmd+=\" secret\"\n else :\n cmd+=\" password\"\n if privilege :\n cmd+=f\" level {privilege}\"\n\n try:\n device.configure(cmd)\n except SubCommandFailure as e:\n raise SubCommandFailure(\n f'Could not unconfigure enable password or secret:\\n{e}'\n )",
"def _disable(self):\n self.enabled = False",
"def test_no_adapter_opts_ignore_service_type(self):\n self.oslo_config_dict['heat'] = None\n self.assert_service_disabled(\n 'orchestration',\n \"Not in the list of requested service_types.\",\n # 'orchestration' absent from this list\n service_types=['compute'],\n )",
"def shutdown(self):\n self.disable_modulation()\n self.disable()\n super().shutdown()",
"def invalidateBoot (self):\n if self.isBootValid(): \n self.mountBootPartition()\n installFilePath = self._getBootInstallationFilePath()\n if os.path.exists(installFilePath):\n os.remove(installFilePath)\n\n #self._runCommandRaiseIfFail(\"rm -rf %s\" % (self._getBootInstallationFilePath()))\n self._log(\"invalidate-boot\").notice(\"boot partition is invalidated\")\n else:\n self._log(\"invalidate-boot\").notice(\"boot partition is already invalid\")",
"def disable(self):\n logging.debug(\"Disabling switch %s\" % self.name)\n self.disabled = True",
"def test_no_adapter_opts(self):\n self.oslo_config_dict['heat'] = None\n self.assert_service_disabled(\n 'orchestration',\n \"Encountered an exception attempting to process config for \"\n \"project 'heat' (service type 'orchestration'): no such option\",\n )",
"def disable_cloud_dataset(self):\n self._boto3 = None\n self._botocore = None",
"def disable_aaa_password_restriction(device):\n cmd=\"no aaa password restriction\"\n try:\n device.configure(cmd)\n except SubCommandFailure as e:\n raise SubCommandFailure(\n f'Could not configure no aaa password restriction:\\n{e}'\n )",
"async def async_turn_off(self, **kwargs: Any) -> None:\n await self.entity_description.set_command(self, False)"
] | [
"0.608217",
"0.60647833",
"0.60542953",
"0.60381645",
"0.6015318",
"0.59813994",
"0.5816549",
"0.57793945",
"0.5776712",
"0.5750608",
"0.5560355",
"0.5523266",
"0.55037075",
"0.5491119",
"0.5489613",
"0.54749763",
"0.54649585",
"0.5449469",
"0.54409015",
"0.539503",
"0.53881",
"0.53875077",
"0.53786755",
"0.5376367",
"0.5360541",
"0.5359437",
"0.53442544",
"0.53436774",
"0.53394485",
"0.53392154",
"0.5317519",
"0.5312851",
"0.53107",
"0.5303603",
"0.53005034",
"0.52833325",
"0.5281864",
"0.52771574",
"0.5227419",
"0.52215695",
"0.5213003",
"0.52111024",
"0.52066535",
"0.5203341",
"0.51959115",
"0.5193034",
"0.5191826",
"0.5164627",
"0.5143272",
"0.51432425",
"0.51410055",
"0.5126064",
"0.51244694",
"0.51224214",
"0.5121303",
"0.5116436",
"0.5100679",
"0.50940824",
"0.50892884",
"0.50860494",
"0.50797045",
"0.5074826",
"0.50720197",
"0.5066169",
"0.5065622",
"0.50636023",
"0.50613075",
"0.5060732",
"0.5060401",
"0.5044103",
"0.5037419",
"0.50249994",
"0.50160867",
"0.50119275",
"0.50115967",
"0.50106907",
"0.5008531",
"0.499895",
"0.49956763",
"0.4994895",
"0.49854672",
"0.49745694",
"0.4974232",
"0.49684516",
"0.49664193",
"0.4963811",
"0.4952995",
"0.49370277",
"0.49337775",
"0.49290082",
"0.492701",
"0.4925761",
"0.49241498",
"0.49225414",
"0.491399",
"0.4910079",
"0.49093038",
"0.4905356",
"0.48974502",
"0.48948267"
] | 0.77110964 | 0 |
Retrieves the current boot mode of the server. | def get_current_boot_mode(self):
boot_mode = self._get_bios_setting('BootMode')
if boot_mode == 'LegacyBios':
boot_mode = 'legacy'
return boot_mode.upper() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_boot_mode():\n boot_mode = 'Legacy'\n try:\n reg_key = winreg.OpenKey(\n winreg.HKEY_LOCAL_MACHINE, r'System\\CurrentControlSet\\Control')\n reg_value = winreg.QueryValueEx(reg_key, 'PEFirmwareType')[0]\n if reg_value == 2:\n boot_mode = 'UEFI'\n except:\n boot_mode = 'Unknown'\n\n return boot_mode",
"def get_pending_boot_mode(self):\n headers, uri, bios_settings = self._check_bios_resource(['BootMode'])\n _, _, settings = self._get_bios_settings_resource(bios_settings)\n boot_mode = settings.get('BootMode')\n if boot_mode == 'LegacyBios':\n boot_mode = 'legacy'\n return boot_mode.upper()",
"def get_boot_mode(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def get_secure_boot_mode(self):\n system = self._get_host_details()\n\n if ('links' not in system['Oem']['Hp'] or\n 'SecureBoot' not in system['Oem']['Hp']['links']):\n msg = ('\"SecureBoot\" resource or feature is not supported'\n ' on this system')\n raise exception.IloCommandNotSupportedError(msg)\n\n secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']\n\n # get the Secure Boot object\n status, headers, secure_boot_settings = self._rest_get(secure_boot_uri)\n\n if status >= 300:\n msg = self._get_extended_error(secure_boot_settings)\n raise exception.IloError(msg)\n\n return secure_boot_settings['SecureBootCurrentState']",
"def get_boot_mode(self, task):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='get_boot_mode')",
"def get_secure_boot_mode(self):\n sushy_system = self._get_sushy_system()\n try:\n secure_boot_enabled = GET_SECUREBOOT_CURRENT_BOOT_MAP.get(\n sushy_system.secure_boot.current_boot)\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish controller failed to provide '\n 'information about secure boot on the server. '\n 'Error: %(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexCommandNotSupportedError(msg)\n\n if secure_boot_enabled:\n LOG.debug(self._(\"Secure boot is Enabled\"))\n else:\n LOG.debug(self._(\"Secure boot is Disabled\"))\n return secure_boot_enabled",
"def get_mode(self) -> str:\n\n return self.send(self.cmd.GET_MODE)",
"def get_mode(self):\r\n return self._api.get_mode()",
"def get_boot_driver(self):\n return self._boot_driver",
"def set_bootloader_mode(self, mode):\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 'B')",
"def set_bootloader_mode(self, mode):\n self.check_validity()\n\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 9, 'B')",
"def test_get_bios_boot_mode_by_moid(self):\n pass",
"def test_get_bios_boot_mode_list(self):\n pass",
"def get_secure_boot_state(self, task):\n return irmc_common.get_secure_boot_mode(task.node)",
"def get_one_time_boot(self):\n system = self._get_host_details()\n try:\n if system['Boot']['BootSourceOverrideEnabled'] == 'Once':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n else:\n # value returned by RIBCL if one-time boot setting are absent\n return 'Normal'\n\n except KeyError as e:\n msg = \"get_one_time_boot failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)",
"def get_bootvar(self):\n module = 'bootimage/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n bootdefault = response.json()['bootimage']['oper']['hd-default']\n print(self.device + ' The device is set to boot from: ' + bootdefault + ' in the future')\n return bootdefault",
"def is_bootable(self):\n return self.bootable_flag == 0x80",
"def getmode(self):\n return self.mode",
"def get_mode(self):\r\n return self.mode",
"def get_mode(self, ):\n return self.get_parameter('mode')",
"def get_current_mode(self):\n return self.read(0xa2)",
"def get_mode(self, port):\n port = int(port)\n self._validate_port(\"get_mode\", port)\n flags = self._regex_shell_fn(\n self._command_dict[\"GET_MODE\"].format(port),\n self._regex_dict[\"GET_MODE_REGEX\"],\n tries=5)\n\n if \"O\" in flags:\n mode = OFF\n elif \"S\" in flags:\n mode = SYNC\n else:\n mode = CHARGE\n return mode",
"def mode(self):\n return self._data.get('mode', None)",
"def get_app_mode(self):\n\t\treturn call_sdk_function('PrlApi_GetAppMode')",
"def system(self, mode=None):\n if mode == System.AUTO:\n self.change_request[\"SystemSwitch\"] = System.AUTO\n elif mode == System.COOL:\n self.change_request[\"SystemSwitch\"] = System.COOL\n elif mode == System.HEAT:\n self.change_request[\"SystemSwitch\"] = System.HEAT\n elif mode == System.OFF:\n self.change_request[\"SystemSwitch\"] = System.OFF\n else:\n return False\n return self.change_request[\"SystemSwitch\"]",
"def getMode(self):\n return self._mode",
"def hvac_mode(self):\n if self.ac.status is None:\n _LOGGER.debug(f\"hvac_mode: status is None, returning None\")\n return None\n if self.ac.status.is_on:\n ac_mode = self.ac.status.ac_mode\n value = self.HVAC_MODE_MAPPING[ac_mode]\n _LOGGER.debug(f\"hvac_mode: returning {value} (derived from {ac_mode})\")\n return value\n else:\n _LOGGER.debug(f\"hvac_mode: returning HVAC_MODE_OFF - device is off\")\n return HVAC_MODE_OFF",
"def dev_mode(self):\r\n return self._dev_mode",
"def mode(self) -> Optional[str]:\n return pulumi.get(self, \"mode\")",
"def mode(self) -> Optional[str]:\n return pulumi.get(self, \"mode\")",
"def mode(self):\n if self._vsanobj.id is None:\n raise VsanNotPresent(\"Vsan \" + str(self._vsanobj._id) + \" is not present on the switch.\")\n out = self.__show_zone_status()\n return out[get_key(zonekeys.MODE, self._SW_VER)]",
"def mode(self) -> str:\n return pulumi.get(self, \"mode\")",
"def mode(self) -> str:\n return pulumi.get(self, \"mode\")",
"def get_supported_boot_modes(self, task):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='get_supported_boot_modes')",
"def power_mode(self) -> str:\n return self._device_info[\"PowerMode\"]",
"def mode(self):\n return self._lift(\"mode\")",
"def get_mode_parameter(mode):\n if mode == 'job':\n return 'cli'\n elif mode == 'serve':\n return 'serving'\n else:\n return mode",
"def game_mode(self):\n return self._get(\"game_mode\")",
"def _get_mode():\n return context.get_context('mode')",
"def hvac_mode(self) -> str:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"hvac_mode\"))\r\n return self._hvac_mode",
"def hvac_mode(self) -> str | None:\n\n if self._device.system_mode is None:\n return # unable to determine\n if self._device.system_mode[CONF_SYSTEM_MODE] == SystemMode.HEAT_OFF:\n return HVACMode.OFF\n if self._device.system_mode[CONF_SYSTEM_MODE] == SystemMode.AWAY:\n return HVACMode.AUTO # users can't adjust setpoints in away mode\n return HVACMode.HEAT",
"def getMode(self):\n with self.lock:\n mode = self.mode\n return mode",
"def hvac_mode(self) -> str | None:\n\n if self._device.tcs.system_mode is None:\n return # unable to determine\n if self._device.tcs.system_mode[CONF_SYSTEM_MODE] == SystemMode.AWAY:\n return HVACMode.AUTO\n if self._device.tcs.system_mode[CONF_SYSTEM_MODE] == SystemMode.HEAT_OFF:\n return HVACMode.OFF\n\n if self._device.mode is None or self._device.mode[ATTR_SETPOINT] is None:\n return # unable to determine\n if (\n self._device.config\n and self._device.mode[ATTR_SETPOINT] <= self._device.config[\"min_temp\"]\n ):\n return HVACMode.OFF\n return HVACMode.HEAT",
"def get_bootarch(self):\n return self._bootarch",
"def mode(self):\n return self._mode",
"def mode(self):\n return self._mode",
"def mode(self):\n return self._mode",
"def boot_configuration(self):\n bootconfs = self.get_logical_configuration(gdef.BOOT_LOG_CONF)\n if not bootconfs:\n return bootconfs\n assert len(bootconfs) == 1 # Only one boot configuration can exist for each device instance.\n return bootconfs[0]",
"def preset_mode(self) -> str | None:\n\n if self._device.system_mode is None:\n return # unable to determine\n return PRESET_TCS_TO_HA[self._device.system_mode[CONF_SYSTEM_MODE]]",
"def mode(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"mode\")",
"def mode(self) -> Optional[pulumi.Input['WorkloadMetadataConfigMode']]:\n return pulumi.get(self, \"mode\")",
"def mode(self):\r\n return self._mode",
"def mode(self):\r\n return self._mode",
"def mode(self):\r\n return self._mode",
"def _is_boot_mode_uefi(self):\n boot_mode = self.get_current_boot_mode()\n if boot_mode == 'UEFI':\n return True\n else:\n return False",
"def mode(self) -> Optional[str]:\n for mode in self._modes:\n if mode.active:\n return mode.name\n return None",
"def _get_mode(self):\n raise NotImplementedError",
"def currentMode(self):\n logger.debug(\"Func: currentMode/getter\")\n\n return self._currentsDict[\"currentMode\"]",
"def get_socket_mode(self):\n\t\treturn call_sdk_function('PrlVmDevSerial_GetSocketMode', self.handle)",
"def mode(self):\n\n return self._mode",
"def get_all_servers_modes():\n return _get_list(\n lambda server: server.mode,\n lambda server: server.mode_name_long\n )",
"def get_mode(guild_id: int):\n key = _mode_key(guild_id)\n if key not in db:\n return fixtures.chat\n return db[key]",
"def drive_mode(self):\n return self._read(MX_DRIVE_MODE)",
"def _get_mode(self):\n self._validate_mode()\n return deepcopy(self.mode)",
"def mode(self):\n return self.__mode",
"def wait_boot(self) -> int:\n return self._data[ATTR_WAIT_BOOT]",
"def getbootinfo(self):\n self.mount()\n kernel = None\n inits = []\n for line in self.xlist(\"get-bootinfo\", IBASE)[1]:\n if line.startswith('+++'):\n kernel = line.split()[1]\n else:\n inits.append(line)\n self.unmount()\n if not inits:\n run_error(_(\"No initramfs found\"))\n return None\n if not kernel:\n run_error(_(\"GRUB problem:\\n\") + inits[0])\n return None\n return (kernel, inits)",
"def mode(self) -> Optional[pulumi.Input[Union[str, 'Mode']]]:\n return pulumi.get(self, \"mode\")",
"def test_update_bios_boot_mode(self):\n pass",
"def device_mode(self) -> str:\n raw_mode = self._device_info[\"SensorMode\"]\n if raw_mode not in DEVICE_MODE_MAP:\n LOGGER.debug(\"Unknown device mode value: %s\", raw_mode)\n return DEVICE_MODE_UNKNOWN\n return DEVICE_MODE_MAP[raw_mode]",
"def boot(self):\n\t\tmesslen, received = self.socket.send('bootm\\r', 25)\t\t\n\t\treturn None",
"def get_type(self):\n\t\treturn call_sdk_function('PrlBootDev_GetType', self.handle)",
"def _get_bios_boot_resource(self, data):\n try:\n boot_uri = data['links']['Boot']['href']\n except KeyError:\n msg = ('Boot resource not found.')\n raise exception.IloCommandNotSupportedError(msg)\n\n status, headers, boot_settings = self._rest_get(boot_uri)\n\n if status != 200:\n msg = self._get_extended_error(boot_settings)\n raise exception.IloError(msg)\n\n return boot_settings",
"def preset_mode(self) -> str | None:\n\n if self._device.tcs.system_mode is None:\n return # unable to determine\n # if self._device.tcs.system_mode[CONF_SYSTEM_MODE] in MODE_TCS_TO_HA:\n if self._device.tcs.system_mode[CONF_SYSTEM_MODE] in (\n SystemMode.AWAY,\n SystemMode.HEAT_OFF,\n ):\n return PRESET_TCS_TO_HA[self._device.tcs.system_mode[CONF_SYSTEM_MODE]]\n\n if self._device.mode is None:\n return # unable to determine\n if self._device.mode[CONF_MODE] == ZoneMode.SCHEDULE:\n return PRESET_TCS_TO_HA[self._device.tcs.system_mode[CONF_SYSTEM_MODE]]\n return PRESET_ZONE_TO_HA.get(self._device.mode[CONF_MODE])",
"def get_pump_mode(self):\n return self.__pump_mode",
"def get_deploy_mode():\n inventory = Inventory.load()\n remote_mode = inventory.remote_mode\n deploy_mode = 'remote' if remote_mode else 'local'\n return deploy_mode",
"def power_mode(self):\n if not self.eve_type.is_upwell_structure:\n return None\n\n if self.fuel_expires_at and self.fuel_expires_at > now():\n return self.PowerMode.FULL_POWER\n\n elif self.last_online_at:\n if self.last_online_at >= now() - timedelta(days=7):\n return self.PowerMode.LOW_POWER\n else:\n return self.PowerMode.ABANDONED\n\n elif self.state in {self.State.ANCHORING, self.State.ANCHOR_VULNERABLE}:\n return self.PowerMode.LOW_POWER\n\n else:\n return self.PowerMode.LOW_ABANDONED",
"def mode(self) -> str:\r\n return self._mode",
"def _get_modes(self):\n return self.__modes",
"def is_http_boot_requested(node):\n http_boot_requested = (\n str(node.driver_info.get('enable_uefi_httpboot', 'false')).lower())\n return http_boot_requested == 'true'",
"def mode(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"mode\")",
"def mode(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"mode\")",
"def tflite_mode(self):\n return getattr(self, \"_tflite_mode\", False)",
"def deploy_mode(self):\n return self.nodes[0].get('infos').get('system_info').get('deploy_mode')",
"def auto_mode(self):\n return self._auto_mode",
"def gateway_slb_mode(self) -> str:\n return pulumi.get(self, \"gateway_slb_mode\")",
"def get_boot_device(self):\n operation = 'get_boot_device'\n try:\n boot_device = self.sp_manager.get_boot_device()\n return boot_device\n except UcsException as ex:\n print(_(\"Cisco client exception: %(msg)s.\"), {'msg': ex})\n raise exception.UcsOperationError(operation=operation, error=ex)",
"def get_window_mode(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetWindowMode', self.handle)",
"def get_cpu_mode(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetCpuMode', self.handle)",
"def mode(self):\n return self._mode_func",
"def mode(self) -> Mode:\n return self._mode",
"def set_boot_mode(self, task, mode):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='set_boot_mode')",
"def get_persistent_boot_device(self):\n system = self._get_host_details()\n try:\n # Return boot device if it is persistent.\n if system['Boot']['BootSourceOverrideEnabled'] == 'Continuous':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n except KeyError as e:\n msg = \"get_persistent_boot_device failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)\n\n # Check if we are in BIOS boot mode.\n # There is no resource to fetch boot device order for BIOS boot mode\n if not self._is_boot_mode_uefi():\n return None\n\n # Get persistent boot device order for UEFI\n boot_sources, boot_devices = self._get_persistent_boot_devices()\n\n boot_string = \"\"\n try:\n for source in boot_sources:\n if (source[\"StructuredBootString\"] == boot_devices[0]):\n boot_string = source[\"BootString\"]\n break\n except KeyError as e:\n msg = \"get_persistent_boot_device failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)\n\n if 'HP iLO Virtual USB CD' in boot_string:\n return 'CDROM'\n\n elif ('NIC' in boot_string or\n 'PXE' in boot_string or\n \"iSCSI\" in boot_string):\n return 'NETWORK'\n\n elif common.isDisk(boot_string):\n return 'HDD'\n\n else:\n return None",
"def login_mode(self) -> str:\n return pulumi.get(self, \"login_mode\")",
"def carpet_mode(self):\n return CarpetModeStatus(self.send(\"get_carpet_mode\")[0])",
"def mode_remote(self):\n self.send(\"!MR\")\n # time.sleep(2.0)\n # No feedback, so query to verify set\n got = self.get_mode()\n assert got == \"R\", got",
"def mode(self) -> int:\n return self._mode",
"def server_type(self):\n return self._server_type",
"def usb_mode() -> str:",
"def _get_state(self):\n fw_wp_en = (self._interface.get('fw_wp_en') == 'on')\n fw_wp = (self._interface.get('fw_wp') == 'on')\n if fw_wp_en:\n return self._STATE_FORCE_ON if fw_wp else self._STATE_FORCE_OFF\n else:\n return self._STATE_ON if fw_wp else self._STATE_OFF"
] | [
"0.77743053",
"0.75218076",
"0.7441925",
"0.7247707",
"0.7066327",
"0.6706312",
"0.6605948",
"0.6499532",
"0.64143753",
"0.6414196",
"0.64062566",
"0.6375331",
"0.6338292",
"0.63119465",
"0.6295608",
"0.6240478",
"0.6215727",
"0.61063474",
"0.60902596",
"0.60828465",
"0.6080002",
"0.6025562",
"0.60031265",
"0.59920686",
"0.59810793",
"0.5972829",
"0.59713215",
"0.5922564",
"0.5886268",
"0.5886268",
"0.588506",
"0.58533984",
"0.58533984",
"0.5848123",
"0.58303976",
"0.5826041",
"0.58028436",
"0.57799166",
"0.5779678",
"0.57574403",
"0.5754634",
"0.57531023",
"0.57397366",
"0.57169247",
"0.56939006",
"0.56939006",
"0.56939006",
"0.56846845",
"0.5682197",
"0.56721187",
"0.56711376",
"0.56667817",
"0.56667817",
"0.56667817",
"0.564777",
"0.5645513",
"0.5640453",
"0.56314147",
"0.56292033",
"0.5628172",
"0.562225",
"0.5619066",
"0.5603542",
"0.5603066",
"0.55970824",
"0.5595717",
"0.5591724",
"0.55735695",
"0.55657345",
"0.5556417",
"0.5553107",
"0.55529565",
"0.5522982",
"0.5513491",
"0.5503157",
"0.54970336",
"0.54897696",
"0.5484133",
"0.54739815",
"0.54589313",
"0.5447729",
"0.5447729",
"0.5421502",
"0.54141176",
"0.5401253",
"0.54004043",
"0.53924435",
"0.53896654",
"0.5383479",
"0.5379834",
"0.5375683",
"0.5366163",
"0.5363878",
"0.5348241",
"0.53288496",
"0.53271157",
"0.53199184",
"0.5319152",
"0.53177047",
"0.52984667"
] | 0.78080875 | 0 |
Retrieves the pending boot mode of the server. Gets the boot mode to be set on next reset. | def get_pending_boot_mode(self):
headers, uri, bios_settings = self._check_bios_resource(['BootMode'])
_, _, settings = self._get_bios_settings_resource(bios_settings)
boot_mode = settings.get('BootMode')
if boot_mode == 'LegacyBios':
boot_mode = 'legacy'
return boot_mode.upper() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_boot_mode():\n boot_mode = 'Legacy'\n try:\n reg_key = winreg.OpenKey(\n winreg.HKEY_LOCAL_MACHINE, r'System\\CurrentControlSet\\Control')\n reg_value = winreg.QueryValueEx(reg_key, 'PEFirmwareType')[0]\n if reg_value == 2:\n boot_mode = 'UEFI'\n except:\n boot_mode = 'Unknown'\n\n return boot_mode",
"def get_current_boot_mode(self):\n boot_mode = self._get_bios_setting('BootMode')\n if boot_mode == 'LegacyBios':\n boot_mode = 'legacy'\n\n return boot_mode.upper()",
"def get_boot_mode(self, task):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='get_boot_mode')",
"def get_boot_mode(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def get_secure_boot_mode(self):\n system = self._get_host_details()\n\n if ('links' not in system['Oem']['Hp'] or\n 'SecureBoot' not in system['Oem']['Hp']['links']):\n msg = ('\"SecureBoot\" resource or feature is not supported'\n ' on this system')\n raise exception.IloCommandNotSupportedError(msg)\n\n secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']\n\n # get the Secure Boot object\n status, headers, secure_boot_settings = self._rest_get(secure_boot_uri)\n\n if status >= 300:\n msg = self._get_extended_error(secure_boot_settings)\n raise exception.IloError(msg)\n\n return secure_boot_settings['SecureBootCurrentState']",
"def get_secure_boot_state(self, task):\n return irmc_common.get_secure_boot_mode(task.node)",
"def get_secure_boot_mode(self):\n sushy_system = self._get_sushy_system()\n try:\n secure_boot_enabled = GET_SECUREBOOT_CURRENT_BOOT_MAP.get(\n sushy_system.secure_boot.current_boot)\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish controller failed to provide '\n 'information about secure boot on the server. '\n 'Error: %(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexCommandNotSupportedError(msg)\n\n if secure_boot_enabled:\n LOG.debug(self._(\"Secure boot is Enabled\"))\n else:\n LOG.debug(self._(\"Secure boot is Disabled\"))\n return secure_boot_enabled",
"def set_bootloader_mode(self, mode):\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 'B')",
"def set_bootloader_mode(self, mode):\n self.check_validity()\n\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 9, 'B')",
"def get_mode(self) -> str:\n\n return self.send(self.cmd.GET_MODE)",
"def set_pending_boot_mode(self, boot_mode):\n boot_mode = boot_mode.lower()\n if boot_mode not in ['uefi', 'legacy']:\n msg = 'Invalid Boot mode specified'\n raise exception.IloInvalidInputError(msg)\n\n boot_properties = {'BootMode': boot_mode}\n\n if boot_mode == 'legacy':\n boot_properties['BootMode'] = 'LegacyBios'\n else:\n # If Boot Mode is 'Uefi' set the UEFIOptimizedBoot first.\n boot_properties['UefiOptimizedBoot'] = \"Enabled\"\n\n # Change the Boot Mode\n self._change_bios_setting(boot_properties)",
"def get_mode(self, port):\n port = int(port)\n self._validate_port(\"get_mode\", port)\n flags = self._regex_shell_fn(\n self._command_dict[\"GET_MODE\"].format(port),\n self._regex_dict[\"GET_MODE_REGEX\"],\n tries=5)\n\n if \"O\" in flags:\n mode = OFF\n elif \"S\" in flags:\n mode = SYNC\n else:\n mode = CHARGE\n return mode",
"def get_mode(self):\r\n return self._api.get_mode()",
"def get_boot_driver(self):\n return self._boot_driver",
"def test_get_bios_boot_mode_by_moid(self):\n pass",
"def test_get_bios_boot_mode_list(self):\n pass",
"def get_one_time_boot(self):\n system = self._get_host_details()\n try:\n if system['Boot']['BootSourceOverrideEnabled'] == 'Once':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n else:\n # value returned by RIBCL if one-time boot setting are absent\n return 'Normal'\n\n except KeyError as e:\n msg = \"get_one_time_boot failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)",
"def wait_boot(self) -> int:\n return self._data[ATTR_WAIT_BOOT]",
"def getMode(self):\n with self.lock:\n mode = self.mode\n return mode",
"def get_supported_boot_modes(self, task):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='get_supported_boot_modes')",
"def get_preferred_mode(self):\n ret = self._transfer(TVGetModes())\n return ret.data if isinstance(ret, RaspiAckMsg) and ret.ack else None",
"def boot(self):\n\t\tmesslen, received = self.socket.send('bootm\\r', 25)\t\t\n\t\treturn None",
"def get_bootvar(self):\n module = 'bootimage/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n bootdefault = response.json()['bootimage']['oper']['hd-default']\n print(self.device + ' The device is set to boot from: ' + bootdefault + ' in the future')\n return bootdefault",
"def get_current_mode(self):\n return self.read(0xa2)",
"def getMode(self):\n return self._mode",
"def system(self, mode=None):\n if mode == System.AUTO:\n self.change_request[\"SystemSwitch\"] = System.AUTO\n elif mode == System.COOL:\n self.change_request[\"SystemSwitch\"] = System.COOL\n elif mode == System.HEAT:\n self.change_request[\"SystemSwitch\"] = System.HEAT\n elif mode == System.OFF:\n self.change_request[\"SystemSwitch\"] = System.OFF\n else:\n return False\n return self.change_request[\"SystemSwitch\"]",
"def getmode(self):\n return self.mode",
"def preset_mode(self) -> str | None:\n\n if self._device.tcs.system_mode is None:\n return # unable to determine\n # if self._device.tcs.system_mode[CONF_SYSTEM_MODE] in MODE_TCS_TO_HA:\n if self._device.tcs.system_mode[CONF_SYSTEM_MODE] in (\n SystemMode.AWAY,\n SystemMode.HEAT_OFF,\n ):\n return PRESET_TCS_TO_HA[self._device.tcs.system_mode[CONF_SYSTEM_MODE]]\n\n if self._device.mode is None:\n return # unable to determine\n if self._device.mode[CONF_MODE] == ZoneMode.SCHEDULE:\n return PRESET_TCS_TO_HA[self._device.tcs.system_mode[CONF_SYSTEM_MODE]]\n return PRESET_ZONE_TO_HA.get(self._device.mode[CONF_MODE])",
"def get_mode(self):\r\n return self.mode",
"def mode_remote(self):\n self.send(\"!MR\")\n # time.sleep(2.0)\n # No feedback, so query to verify set\n got = self.get_mode()\n assert got == \"R\", got",
"def _get_state(self):\n fw_wp_en = (self._interface.get('fw_wp_en') == 'on')\n fw_wp = (self._interface.get('fw_wp') == 'on')\n if fw_wp_en:\n return self._STATE_FORCE_ON if fw_wp else self._STATE_FORCE_OFF\n else:\n return self._STATE_ON if fw_wp else self._STATE_OFF",
"def get_app_mode(self):\n\t\treturn call_sdk_function('PrlApi_GetAppMode')",
"def set_boot_mode(self, task, mode):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='set_boot_mode')",
"def preset_mode(self) -> str | None:\n\n if self._device.system_mode is None:\n return # unable to determine\n return PRESET_TCS_TO_HA[self._device.system_mode[CONF_SYSTEM_MODE]]",
"def mode(self):\n return self._data.get('mode', None)",
"def is_bootable(self):\n return self.bootable_flag == 0x80",
"def _get_mode(self):\n self._validate_mode()\n return deepcopy(self.mode)",
"def get_mode(self, ):\n return self.get_parameter('mode')",
"def get_sync_mode():\n return sync_mode",
"def get_socket_mode(self):\n\t\treturn call_sdk_function('PrlVmDevSerial_GetSocketMode', self.handle)",
"def hvac_mode(self):\n if self.ac.status is None:\n _LOGGER.debug(f\"hvac_mode: status is None, returning None\")\n return None\n if self.ac.status.is_on:\n ac_mode = self.ac.status.ac_mode\n value = self.HVAC_MODE_MAPPING[ac_mode]\n _LOGGER.debug(f\"hvac_mode: returning {value} (derived from {ac_mode})\")\n return value\n else:\n _LOGGER.debug(f\"hvac_mode: returning HVAC_MODE_OFF - device is off\")\n return HVAC_MODE_OFF",
"def get_pump_mode(self):\n return self.__pump_mode",
"def power_mode(self) -> str:\n return self._device_info[\"PowerMode\"]",
"def status(self):\n ret = self.dev.ctrl_transfer(0xc0, 0x01, 0x0081, 0x0000, 0x0001)\n if ret[0] == 0xa0:\n return self.POWER_ON\n return self.POWER_OFF",
"def safe_boot_disabled(self):\n return self._safe_boot_disabled",
"def drmode(self):\n data = self._ftdi.spi_read(self.DRMODE_ADDR, len=1, burst='fixed')\n return data[0] & self.DRMODE_MASK",
"def mode(self):\n if self._vsanobj.id is None:\n raise VsanNotPresent(\"Vsan \" + str(self._vsanobj._id) + \" is not present on the switch.\")\n out = self.__show_zone_status()\n return out[get_key(zonekeys.MODE, self._SW_VER)]",
"def get_mode(guild_id: int):\n key = _mode_key(guild_id)\n if key not in db:\n return fixtures.chat\n return db[key]",
"def pull_request_mode(self):\n return self._pull_request_mode",
"def mode(self) -> Optional[pulumi.Input['WorkloadMetadataConfigMode']]:\n return pulumi.get(self, \"mode\")",
"def get_type(self):\n\t\treturn call_sdk_function('PrlBootDev_GetType', self.handle)",
"def test_update_bios_boot_mode(self):\n pass",
"def get_status():\n return ('off', 'off')",
"def drive_mode(self):\n return self._read(MX_DRIVE_MODE)",
"def get_mode(self):\r\n _debug('simq03b_api.get_mode')\r\n \r\n s = self.query('FREQ:MODE?')\r\n if s == None: return None\r\n \r\n s = s.strip()\r\n if s == 'CW': return 'Fixed'\r\n elif s == 'LIST': return 'List'\r\n else:\r\n print('ERROR: Unknown mode '+str(s))\r\n return",
"def mode(self):\n return self._mode",
"def mode(self):\n return self._mode",
"def mode(self):\n return self._mode",
"def get_persistent_boot_device(self):\n system = self._get_host_details()\n try:\n # Return boot device if it is persistent.\n if system['Boot']['BootSourceOverrideEnabled'] == 'Continuous':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n except KeyError as e:\n msg = \"get_persistent_boot_device failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)\n\n # Check if we are in BIOS boot mode.\n # There is no resource to fetch boot device order for BIOS boot mode\n if not self._is_boot_mode_uefi():\n return None\n\n # Get persistent boot device order for UEFI\n boot_sources, boot_devices = self._get_persistent_boot_devices()\n\n boot_string = \"\"\n try:\n for source in boot_sources:\n if (source[\"StructuredBootString\"] == boot_devices[0]):\n boot_string = source[\"BootString\"]\n break\n except KeyError as e:\n msg = \"get_persistent_boot_device failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)\n\n if 'HP iLO Virtual USB CD' in boot_string:\n return 'CDROM'\n\n elif ('NIC' in boot_string or\n 'PXE' in boot_string or\n \"iSCSI\" in boot_string):\n return 'NETWORK'\n\n elif common.isDisk(boot_string):\n return 'HDD'\n\n else:\n return None",
"def mode(self):\r\n return self._mode",
"def mode(self):\r\n return self._mode",
"def mode(self):\r\n return self._mode",
"def mode(self) -> Optional[str]:\n return pulumi.get(self, \"mode\")",
"def mode(self) -> Optional[str]:\n return pulumi.get(self, \"mode\")",
"def preset_mode(self):\n return self._preset_mode",
"def mode(self):\n\n return self._mode",
"def mode(self) -> Optional[str]:\n for mode in self._modes:\n if mode.active:\n return mode.name\n return None",
"def supported_modes(self):\n return [OFF, SYNC, CHARGE]",
"def networkMode(self):\n\n response = self.at.sendCommand(\"AT+CEREG?\")\n\n # If we failed to query the network mode, that's a paddlin'\n if not response:\n raise modem.AtError(response, \"Failed to query network mode\")\n\n lines = response.lines\n\n if len(lines) < 1:\n raise modem.AtError(response, \"Invalid network mode response\")\n\n fields = lines[0].split(\",\")\n\n # If there isn't at least the prefix and the current mode, that's a\n # paddlin'\n if len(fields) < 2:\n raise modem.AtError(response, \"Invalid network mode response\")\n\n try:\n return int(fields[1])\n\n except ValueError:\n raise modem.AtError(response, \"Invalid network mode\")",
"def get_state(self):\n ret = self.send(\"?S\", recv=True)\n assert ret in \"WDR\"\n return ret",
"def _get_bios_boot_resource(self, data):\n try:\n boot_uri = data['links']['Boot']['href']\n except KeyError:\n msg = ('Boot resource not found.')\n raise exception.IloCommandNotSupportedError(msg)\n\n status, headers, boot_settings = self._rest_get(boot_uri)\n\n if status != 200:\n msg = self._get_extended_error(boot_settings)\n raise exception.IloError(msg)\n\n return boot_settings",
"def get_mode_parameter(mode):\n if mode == 'job':\n return 'cli'\n elif mode == 'serve':\n return 'serving'\n else:\n return mode",
"def _get_mode():\n return context.get_context('mode')",
"def get_bootarch(self):\n return self._bootarch",
"def state(self):\n if self._is_standby:\n return STATE_OFF\n else:\n return STATE_PLAYING",
"def currentMode(self):\n logger.debug(\"Func: currentMode/getter\")\n\n return self._currentsDict[\"currentMode\"]",
"def getModemMode(self, unitCode=0):\n resp = self.XAPCommand('MDMODE', unitCode=unitCode)\n return bool(int(resp))",
"def state(self):\n if 'power' in self._status and self._status['power'] == '0':\n return STATE_OFF\n if 'mode' in self._status:\n if self._status['mode'] == 'pause':\n return STATE_PAUSED\n if self._status['mode'] == 'play':\n return STATE_PLAYING\n if self._status['mode'] == 'stop':\n return STATE_IDLE\n return STATE_UNKNOWN",
"def data_pull_mode(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"data_pull_mode\")",
"def get_baud_status(self):\n self.read(\":SYST:BAUD?\")",
"def mode(self) -> Optional[pulumi.Input[Union[str, 'Mode']]]:\n return pulumi.get(self, \"mode\")",
"def mode(self):\n return self.__mode",
"def boot_in_play(self):\n return self._boot_in_play",
"def power_mode(self):\n if not self.eve_type.is_upwell_structure:\n return None\n\n if self.fuel_expires_at and self.fuel_expires_at > now():\n return self.PowerMode.FULL_POWER\n\n elif self.last_online_at:\n if self.last_online_at >= now() - timedelta(days=7):\n return self.PowerMode.LOW_POWER\n else:\n return self.PowerMode.ABANDONED\n\n elif self.state in {self.State.ANCHORING, self.State.ANCHOR_VULNERABLE}:\n return self.PowerMode.LOW_POWER\n\n else:\n return self.PowerMode.LOW_ABANDONED",
"def GetPackageModes(self):\n return self._modes",
"def make_BootModeSetting(manageMode, mode, pxeBootPolicy):\n return {'manageMode': manageMode,\n 'mode': mode,\n 'pxeBootPolicy': pxeBootPolicy\n }",
"def get_all_servers_modes():\n return _get_list(\n lambda server: server.mode,\n lambda server: server.mode_name_long\n )",
"def test_patch_bios_boot_mode(self):\n pass",
"def mode(self):\n return self._lift(\"mode\")",
"def is_http_boot_requested(node):\n http_boot_requested = (\n str(node.driver_info.get('enable_uefi_httpboot', 'false')).lower())\n return http_boot_requested == 'true'",
"def game_mode(self):\n return self._get(\"game_mode\")",
"def _get_mode(self):\n raise NotImplementedError",
"def get_cpu_mode(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetCpuMode', self.handle)",
"def carpet_mode(self):\n return CarpetModeStatus(self.send(\"get_carpet_mode\")[0])",
"def hvac_mode(self) -> str | None:\n\n if self._device.tcs.system_mode is None:\n return # unable to determine\n if self._device.tcs.system_mode[CONF_SYSTEM_MODE] == SystemMode.AWAY:\n return HVACMode.AUTO\n if self._device.tcs.system_mode[CONF_SYSTEM_MODE] == SystemMode.HEAT_OFF:\n return HVACMode.OFF\n\n if self._device.mode is None or self._device.mode[ATTR_SETPOINT] is None:\n return # unable to determine\n if (\n self._device.config\n and self._device.mode[ATTR_SETPOINT] <= self._device.config[\"min_temp\"]\n ):\n return HVACMode.OFF\n return HVACMode.HEAT",
"def get_mode(self):\r\n s = self.query('FREQ:MODE?')\r\n if s == None: return None\r\n \r\n s = s.strip()\r\n if s == 'FIX': return 'Fixed'\r\n elif s == 'LIST': return 'List'\r\n else:\r\n print('ERROR: Unknown mode '+str(s))\r\n return",
"def mode(self) -> Mode:\n return self._mode",
"def dev_mode(self):\r\n return self._dev_mode",
"def get_restart_mode(restart_file):\n if os.path.exists(restart_file):\n with open(restart_file, 'r') as f:\n return f.read()\n return \"shutdown\"",
"def tflite_mode(self):\n return getattr(self, \"_tflite_mode\", False)"
] | [
"0.70903546",
"0.69976276",
"0.6860447",
"0.6815118",
"0.66070074",
"0.62555677",
"0.6129522",
"0.6030498",
"0.6028887",
"0.6026956",
"0.6023533",
"0.5965988",
"0.58770573",
"0.5847371",
"0.5799201",
"0.5777145",
"0.5753953",
"0.57251155",
"0.5688327",
"0.56876516",
"0.5649138",
"0.56454",
"0.56306136",
"0.5603256",
"0.5579972",
"0.5577915",
"0.55605006",
"0.554544",
"0.55343264",
"0.552363",
"0.5511957",
"0.5498863",
"0.5493461",
"0.5480584",
"0.5460996",
"0.5445978",
"0.5443451",
"0.5432479",
"0.5415555",
"0.5384764",
"0.53817475",
"0.5339523",
"0.53013474",
"0.52810615",
"0.5275968",
"0.5275576",
"0.52753764",
"0.5247507",
"0.52413404",
"0.5238233",
"0.5229354",
"0.5213794",
"0.51953185",
"0.5193716",
"0.51934326",
"0.5188454",
"0.5188454",
"0.5188454",
"0.5177945",
"0.5173143",
"0.5173143",
"0.5173143",
"0.5168721",
"0.5168721",
"0.5166359",
"0.5155936",
"0.5141971",
"0.51368797",
"0.5134763",
"0.5129027",
"0.5125951",
"0.5122623",
"0.51204556",
"0.51180166",
"0.5113788",
"0.5106675",
"0.51045245",
"0.5092605",
"0.50906014",
"0.5089864",
"0.50865",
"0.508408",
"0.5081396",
"0.5077801",
"0.5077566",
"0.50713176",
"0.5056086",
"0.5052984",
"0.5052684",
"0.5048458",
"0.50423265",
"0.5037424",
"0.50371146",
"0.50306165",
"0.50292915",
"0.5027459",
"0.502475",
"0.50226575",
"0.50033647",
"0.49920008"
] | 0.8131488 | 0 |
Sets the boot mode of the system for next boot. | def set_pending_boot_mode(self, boot_mode):
boot_mode = boot_mode.lower()
if boot_mode not in ['uefi', 'legacy']:
msg = 'Invalid Boot mode specified'
raise exception.IloInvalidInputError(msg)
boot_properties = {'BootMode': boot_mode}
if boot_mode == 'legacy':
boot_properties['BootMode'] = 'LegacyBios'
else:
# If Boot Mode is 'Uefi' set the UEFIOptimizedBoot first.
boot_properties['UefiOptimizedBoot'] = "Enabled"
# Change the Boot Mode
self._change_bios_setting(boot_properties) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_boot_mode(self, task, mode):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='set_boot_mode')",
"def set_bootloader_mode(self, mode):\n self.check_validity()\n\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 9, 'B')",
"def set_bootloader_mode(self, mode):\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 'B')",
"def boot(self, boot):\n\n self._boot = boot",
"def _setBootable(self, bootable):\n if self.partedPartition:\n if arch.isS390():\n return\n if self.flagAvailable(parted.PARTITION_BOOT):\n if bootable:\n self.setFlag(parted.PARTITION_BOOT)\n else:\n self.unsetFlag(parted.PARTITION_BOOT)\n else:\n raise errors.DeviceError(\"boot flag not available for this partition\", self.name)\n\n self._bootable = bootable\n else:\n self.req_bootable = bootable",
"def test_update_bios_boot_mode(self):\n pass",
"def test_patch_bios_boot_mode(self):\n pass",
"def boot(self):\n\n pass",
"def get_boot_mode(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def get_boot_mode(self, task):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='get_boot_mode')",
"def set_automatic(self, mode):\n self.slam.controlled = not mode\n if mode:\n self.slam.resume()",
"def set_one_time_boot(self, device, mac=None):\n self._update_persistent_boot([device], persistent=False, mac=mac)",
"def get_current_boot_mode(self):\n boot_mode = self._get_bios_setting('BootMode')\n if boot_mode == 'LegacyBios':\n boot_mode = 'legacy'\n\n return boot_mode.upper()",
"def set_drive_mode(mode):",
"def wait_boot(self, value: int) -> None:\n self._data[ATTR_WAIT_BOOT] = value",
"def setmode(self, mode):\n # ueberpruefe, ob der Modus gueltig ist\n if mode in [GPIO.BCM, GPIO.BOARD]:\n self.mode = mode\n print(f\"Modus auf {mode} gesetzt\")\n else:\n raise ValueError(\"An invalid mode was passed to setmode()\")",
"def get_boot_mode():\n boot_mode = 'Legacy'\n try:\n reg_key = winreg.OpenKey(\n winreg.HKEY_LOCAL_MACHINE, r'System\\CurrentControlSet\\Control')\n reg_value = winreg.QueryValueEx(reg_key, 'PEFirmwareType')[0]\n if reg_value == 2:\n boot_mode = 'UEFI'\n except:\n boot_mode = 'Unknown'\n\n return boot_mode",
"def set_secure_boot_mode(self, secure_boot_enable):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('SecureBootEnable',\n secure_boot_enable)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def _select_mode(self):\n self.__check_mode()\n if self.mode[\"auto_mode\"]:\n self.mode_auto()\n elif self.mode[\"auto_mode\"] is None: # Do Nothing\n self.mode_standby()\n else:\n self.mode_manual()",
"def setBootargs(self):\n\t\tif self.testType == 'auto' or self.testType == 'manual':\n\t\t\tself.bootargs = self.settings.getKeyValue('nfs.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<nfsroot>', self.nfsroot)\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\n\t\telse:\n\t\t\tself.bootargs = self.settings.getKeyValue('ramdisk.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\t\t\n\t\treturn None",
"def get_pending_boot_mode(self):\n headers, uri, bios_settings = self._check_bios_resource(['BootMode'])\n _, _, settings = self._get_bios_settings_resource(bios_settings)\n boot_mode = settings.get('BootMode')\n if boot_mode == 'LegacyBios':\n boot_mode = 'legacy'\n return boot_mode.upper()",
"def set_boot_order(profile_obj):\n status = True\n logger._log_to_console_and_log_file(\"\")\n logger._log_to_console_and_log_file(\"### Testing the 'Boot Settings' session ###\")\n logger._log_to_console_and_log_file(\"- Select the 'Legacy BIOS' mode\")\n createprofile_elements = ProfileContainer(ProfileContainerType.ADD)\n __select_value_from_a_profile_combo_box(createprofile_elements.ID_COMBO_PROFILE_BOOT_MODE, createprofile_elements.ID_COMBO_PROFILE_BOOT_MODE_LIST % \"Legacy BIOS\")\n # Set invalid values\n logger._log_to_console_and_log_file(\"Testing using invalid values\")\n for profile in profile_obj:\n items = [[\"CD\", profile.cd], [\"USB\", profile.usb], [\"HardDisk\", profile.harddisk]]\n for data in items:\n ui_lib.wait_for_element_and_input_text(\"name=%s\" % data[0], data[1])\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_CREATE_SERVER_PROFILE_FORM)\n if data[0] == \"HardDisk\":\n data[0] = \"Hard Disk\"\n if ui_lib.wait_for_element_text(FusionServerProfilesPage.ID_BOOT_ORDER_POSITION % data[0], data[1], timeout=1):\n logger._log_to_console_and_log_file(\"- \" + \"'\" + data[0] + \"'\" + \" field was not cleared to the default value and persisted as '\" + str(data[1]) + \"'\")\n status = False\n else:\n logger._log_to_console_and_log_file(\"- \" + \"'\" + data[0] + \"'\" + \" field was correctly cleared to the default value\")\n return status",
"def mode(self, mode):\n self.set_mode(mode)",
"def set_mode(self, mode):\n if mode in self.MODES:\n self.mode = self.MODES[mode]",
"def set_mode(self, mode):\n print('set_mode', mode)\n self._mode = int(mode)",
"def set_manual_mode(self):\n self._kernel.set_manual_mode()",
"def setbacklight(self, backlight=True):\n if backlight:\n self._backlight = 0x08\n else:\n self._backlight = 0x00\n\n self.lcd_byte(0x00 ,LCD_CMD)",
"def set_mode(self, mode=0, detection_param=0):\r\n return self._arm.set_mode(mode=mode, detection_param=detection_param)",
"def set_vm_status(self, boot_on_next_reset):\n data = {\n \"Oem\": {\n \"Hpe\": {\n \"BootOnNextServerReset\": boot_on_next_reset\n }\n }\n }\n self._conn.patch(self.path, data=data)",
"def set_preset_mode(self, preset_mode: str | None) -> None:\n self.svc_set_system_mode(PRESET_TO_TCS.get(preset_mode, SystemMode.AUTO))",
"def setMode(cls, mode):\n global CURRENT_MODE\n assert isinstance(mode, cls), \"Invalid mode {}\".format(mode)\n CURRENT_MODE = mode",
"def SetStandbyFPMode(self):\n handler = self.get_command_object(\"SetStandbyFPMode\")\n handler()",
"def set_secure_boot_mode(self, secure_boot_enable):\n sushy_system = self._get_sushy_system()\n try:\n sushy_system.secure_boot.enable_secure_boot(secure_boot_enable)\n except exception.InvalidInputError as e:\n msg = (self._('Invalid input. Error %(error)s')\n % {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish controller failed to set secure '\n 'boot settings on the server. Error: %(error)s')\n % {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def set_type(self, nDevType):\n\t\tcall_sdk_function('PrlBootDev_SetType', self.handle, nDevType)",
"def set_mode(vehicle, mode):\n util.log_info(\"Setting %s.\" % mode)\n shared.status['manual_mode'] = mode\n vehicle.mode = VehicleMode(mode)\n \n wait_count = 0 \n while True:\n time.sleep(.2)\n wait_count = wait_count + 1\n \n if vehicle.mode.name == mode :\n return True\n \n elif wait_count >= 45:\n util.log_warning(\"Unable to set %s. Assume link lost.\" % mode)\n shared.status['abort'] = True\n return False\n \n elif wait_count % 15 == 0 :\n util.log_warning(\"Retry setting %s\" % mode)\n vehicle.mode = VehicleMode(mode) # resend command",
"def step7(self):\n for indx, mr in enumerate(self.mrs):\n self.log.info(\"Set boot drive on controller:%d\"\n % (mr.ctrl_id))\n for vd in self.mr_vds[indx]:\n if (int(mr.cli.bootdrive_vd_get()) != vd):\n mr.cli.bootdrive_vd_set(vd_id=self.mr_vds[indx][indx],\n setting=\"On\")\n break",
"def make_BootModeSetting(manageMode, mode, pxeBootPolicy):\n return {'manageMode': manageMode,\n 'mode': mode,\n 'pxeBootPolicy': pxeBootPolicy\n }",
"def set_secure_boot_state(self, task, state):\n return irmc_common.set_secure_boot_mode(task.node, state)",
"def set_boot_options(self, image_name, **vendor_specifics):\n current_boot = self.show(\"show running-config | inc ^boot system \")\n file_system = vendor_specifics.get(\"file_system\")\n if file_system is None:\n file_system = self._get_file_system()\n\n file_system_files = self.show(f\"dir {file_system}\")\n if re.search(image_name, file_system_files) is None:\n log.error(\"Host %s: File not found error for image %s.\", self.host, image_name)\n raise NTCFileNotFoundError(\n # TODO: Update to use hostname\n hostname=self.host,\n file=image_name,\n directory=file_system,\n )\n\n current_images = current_boot.splitlines()\n commands_to_exec = [f\"no {image}\" for image in current_images]\n commands_to_exec.append(f\"boot system {file_system}/{image_name}\")\n self.config(commands_to_exec)\n\n self.save()\n if self.boot_options[\"sys\"] != image_name:\n log.error(\"Host %s: Setting boot command did not yield expected results\", self.host)\n raise CommandError(\n command=f\"boot system {file_system}/{image_name}\",\n message=\"Setting boot command did not yield expected results\",\n )\n\n log.info(\"Host %s: boot options have been set to %s\", self.host, image_name)",
"def reset_secure_boot_keys(self):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('ResetToDefaultKeys', True)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def change_mode(self):\n master.destroy()\n os.system(\"add_mode_run.py\")",
"def set_freerun_mode(self, mode):\n if (mode):\n self._set_register_field(SI5324._FIELD_Free_Run_Mode, 1)\n else:\n self._set_register_field(SI5324._FIELD_Free_Run_Mode, 0)",
"def idle(self) -> None:\n # Like RadioHead library, turn off high power boost if enabled.\n self.set_boost(_TEST_PA1_NORMAL)\n self.operation_mode = STANDBY_MODE",
"def set_boot_device(self, task, device, persistent=False):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified.\") % device)\n\n uefi_mode = (\n boot_mode_utils.get_boot_mode(task.node) == 'uefi')\n\n # disable 60 secs timer\n timeout_disable = \"0x00 0x08 0x03 0x08\"\n ipmitool.send_raw(task, timeout_disable)\n\n # note(naohirot):\n # Set System Boot Options : ipmi cmd '0x08', bootparam '0x05'\n #\n # $ ipmitool raw 0x00 0x08 0x05 data1 data2 0x00 0x00 0x00\n #\n # data1 : '0xe0' persistent + uefi\n # '0xc0' persistent + bios\n # '0xa0' next only + uefi\n # '0x80' next only + bios\n # data2 : boot device defined in the dict _BOOTPARAM5_DATA2\n\n bootparam5 = '0x00 0x08 0x05 %s %s 0x00 0x00 0x00'\n if persistent:\n data1 = '0xe0' if uefi_mode else '0xc0'\n else:\n data1 = '0xa0' if uefi_mode else '0x80'\n data2 = _BOOTPARAM5_DATA2[device]\n\n cmd8 = bootparam5 % (data1, data2)\n ipmitool.send_raw(task, cmd8)\n else:\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified. \"\n \"Current iRMC firmware condition doesn't support IPMI \"\n \"but Redfish.\") % device)\n super(ipmitool.IPMIManagement, self).set_boot_device(\n task, device, persistent)",
"def system(self, mode=None):\n if mode == System.AUTO:\n self.change_request[\"SystemSwitch\"] = System.AUTO\n elif mode == System.COOL:\n self.change_request[\"SystemSwitch\"] = System.COOL\n elif mode == System.HEAT:\n self.change_request[\"SystemSwitch\"] = System.HEAT\n elif mode == System.OFF:\n self.change_request[\"SystemSwitch\"] = System.OFF\n else:\n return False\n return self.change_request[\"SystemSwitch\"]",
"def _boot_using_bootmon(self, target):\n self.logger.debug('Booting using bootmon.')\n\n try:\n self._wait_for_vemsd_mount(target, timeout=20)\n except DeviceError:\n # OK, something's wrong. Reboot the board and try again.\n self.logger.debug('VEMSD not mounted, attempting to power cycle device.')\n target.sendline(' ')\n state = target.expect(['Cmd> ', self.config.bootmon_prompt, self.android_prompt]) # pylint: disable=E1101\n\n if state == 0 or state == 1:\n # Reboot - Bootmon\n target.sendline('reboot')\n target.expect('Powering up system...')\n elif state == 2:\n target.sendline('reboot -n')\n target.expect('Powering up system...')\n else:\n raise DeviceError('Unexpected board state {}; should be 0, 1 or 2'.format(state))\n\n self._wait_for_vemsd_mount(target)\n\n self._setup_before_reboot()\n\n # Reboot - Bootmon\n self.logger.debug('Rebooting into bootloader...')\n open(os.path.join(self.config.root_mount, 'reboot.txt'), 'a').close()\n target.expect('Powering up system...')\n target.expect(self.config.bootmon_prompt)\n\n # Wait for VEMSD to mount\n self._wait_for_vemsd_mount(target)\n\n #Boot Linux - Bootmon\n target.sendline('fl linux fdt ' + self.config.dtb)\n target.expect(self.config.bootmon_prompt)\n target.sendline('fl linux initrd ' + self.config.initrd)\n target.expect(self.config.bootmon_prompt)\n #Workaround TC2 bootmon serial issue for loading large initrd blob\n target.sendline(' ')\n target.expect(self.config.bootmon_prompt)\n target.sendline('fl linux boot ' + self.config.kernel + self.config.kernel_arguments)",
"def mode_auto(self):\n if self.__check_mode_change():\n self.communications.set_status(\"Bot Auto Mode Set\")\n self.patrol()",
"def set_boot_device(self, device, persistent=False):\n\n operation = \"set_boot_device\"\n try:\n self.sp_manager.create_boot_policy()\n self.sp_manager.set_boot_device(device)\n\n except UcsException as ex:\n raise exception.UcsOperationError(operation=operation, error=ex)",
"def set_mode(self, mode, port):\n port = int(port)\n self._validate_port(\"set_mode\", port)\n self._validate_mode(mode)\n logger.debug(\"{} setting power mode to {} for usb port {}\".format(\n self._device_name, mode, port))\n self._shell_fn(self._command_dict[\"SET_MODE\"].format(mode, port))",
"def set_mode(self, mode: str) -> None:\n # Not all programs are fully supported by the current\n # OpenInterface API version. The known restricitons are:\n # - The 'Calibration' and 'TightnessTest' programms cannot\n # be started through the API.\n # - The 'Dry' program does not expose all it's parameters\n # (see github.com/buchi-labortechnik-ag/openinterface_rotavapor/issues/1)\n return self.send(self.cmd.SET_MODE, mode)",
"def set_autofeed_mode(self, mode):\n self._info(\"set_autofeed_mode\")\n self.parent.controller.set_autofeed_mode(mode)",
"def mode(self, value):\n self._set_attr('mode', value)",
"def mode(self, mode):\n\n self._mode = mode",
"def mode(self, mode):\n\n self._mode = mode",
"def mode(self, mode):\n\n self._mode = mode",
"def setScalingMode(mode='down'):\n mdict = {'down':'DOWN','full':'FULL'}\n dislin.sclmod(mode)",
"def change_mode(self, mode):\r\n self.update_enrollment(mode=mode)",
"def mode (self, mode) :\r\n self.mode_ = mode",
"def svc_set_system_mode(self, mode, period=None, days=None) -> None:\n if period is not None:\n until = dt.now() + period\n elif days is not None:\n until = dt.now() + days # TODO: round down\n else:\n until = None\n self._call_client_api(self._device.set_mode, system_mode=mode, until=until)",
"def sleep_mode(self, value):\n if value:\n self._write(ST7789_SLPIN)\n else:\n self._write(ST7789_SLPOUT)",
"def setMode(self, mode):\n self.mode = mode\n if self.mode == 0:\n self.setDrawingMode()\n elif self.mode == 1:\n self.setConstructionMode()\n elif self.mode == 2:\n self.setDisplayMode()\n self.context.text.append(\"mode: \" + self.messages[self.mode])",
"async def async_set_preset_mode(self, preset_mode: str) -> None:\n if self._on != \"1\":\n if preset_mode == PRESET_NONE:\n return\n await self.async_turn_on()\n\n _LOGGER.debug(\"Setting preset mode of %s to %s\", self._unique_id, preset_mode)\n\n if preset_mode == PRESET_ECO:\n await self._device.command(\"energysave_on\")\n self._previous_state = preset_mode\n elif preset_mode == PRESET_BOOST:\n await self._device.command(\"turbo_on\")\n self._previous_state = preset_mode\n elif preset_mode == PRESET_SLEEP:\n await self._device.command(\"sleep_1\")\n self._previous_state = self._attr_hvac_mode\n elif preset_mode == \"sleep_2\":\n await self._device.command(\"sleep_2\")\n self._previous_state = self._attr_hvac_mode\n elif preset_mode == \"sleep_3\":\n await self._device.command(\"sleep_3\")\n self._previous_state = self._attr_hvac_mode\n elif preset_mode == \"sleep_4\":\n await self._device.command(\"sleep_4\")\n self._previous_state = self._attr_hvac_mode\n elif self._previous_state is not None:\n if self._previous_state == PRESET_ECO:\n await self._device.command(\"energysave_off\")\n elif self._previous_state == PRESET_BOOST:\n await self._device.command(\"turbo_off\")\n elif self._previous_state in HA_STATE_TO_AC:\n await self._device.command(HA_STATE_TO_AC[self._previous_state])\n self._previous_state = None",
"def set_mode(self,mode,state=True):\n\t\tprint \"SET_MODE START\"\n\t\tfor key,val in self.ms_all.iteritems():\n\t\t\tif val.index(mode) is not None:\n\t\t\t\tif state:\n\t\t\t\t\tval.activate( val.index(mode) )\n\t\t\t\telse:\n\t\t\t\t\tval.deactivate( val.index(mode) )\n\t\t\"\"\"\n\t\tprint \"SET_MODE DONE -- ALSO DOING EXPERIMENTAL -- \"\n\t\t# DEBUG / EXPERIMENTAL\n\t\tif self.int_encoder is not None:\n\t\t\tif mode == 'volume' and state == True and 'mode_timeout' in self.cfg_gpio and self.int_enabled:\n\t\t\t\tprint \"DEBUG2.. GPIO/VOLUME ({0}:{1}).. disabling our interrupts..\".format(mode,state)\n\t\t\t\tself.gpio.remove_event_detect(13)\n\t\t\t\tself.gpio.remove_event_detect(6)\n\t\t\t\tself.int_enabled = False\n\t\t\telif mode != 'volume' and state == True and 'mode_timeout' in self.cfg_gpio and not self.int_enabled:\n\t\t\t\tprint \"DEBUG2.. GPIO/NOT VOLUME ({0}:{1}).. enabling our interrupts..\".format(mode,state)\n\t\t\t\tself.gpio.setup((13,6), self.gpio.IN, pull_up_down=self.gpio.PUD_DOWN)\n\t\t\t\tself.gpio.add_event_detect(13, self.gpio.RISING, callback=self.int_encoder) # NO bouncetime \n\t\t\t\tself.gpio.add_event_detect(6, self.gpio.RISING, callback=self.int_encoder) # NO bouncetime\n\t\t\t\tself.int_enabled = True\n\t\t\telif mode == 'volume' and state == True and 'mode_timeout' not in self.cfg_gpio and not self.int_enabled:\n\t\t\t\tprint \"DEBUG2.. ECA/VOLUME ({0}:{1}).. enabling our interrupts..\".format(mode,state)\n\t\t\t\tself.gpio.setup((13,6), self.gpio.IN, pull_up_down=self.gpio.PUD_DOWN)\n\t\t\t\tself.gpio.add_event_detect(13, self.gpio.RISING, callback=self.int_encoder) # NO bouncetime \n\t\t\t\tself.gpio.add_event_detect(6, self.gpio.RISING, callback=self.int_encoder) # NO bouncetime\n\t\t\t\tself.int_enabled = True\n\t\t\telif mode != 'volume' and state == True and 'mode_timeout' not in self.cfg_gpio and self.int_enabled:\n\t\t\t\tprint \"DEBUG2.. ECA/NOT VOLUME ({0}:{1}).. disabling our interrupts..\".format(mode,state)\n\t\t\t\tself.gpio.remove_event_detect(13)\n\t\t\t\tself.gpio.remove_event_detect(6)\n\t\t\t\tself.int_enabled = False\n\t\t\tprint \"DEBUG2.. done\"\n\t\t\"\"\"",
"def set_mode(self, mode):\n self.mode = mode\n self.btn_mode.setText(f\"{mode.title()}\\u25BE\")\n self.state_changed()",
"def set_mode(self, mode):\n SetMode_srv = SetModeRequest(0, mode)\n response = self.set_mode_client(SetMode_srv)\n if response.mode_sent:\n rospy.loginfo(CGREEN2 + \"SetMode Was successful\" + CEND)\n return 0\n else:\n rospy.logerr(CRED2 + \"SetMode has failed\" + CEND)\n return -1",
"def _turn_on_dev_mode(self):\n if self._device is not None:\n self._char_write(self._BLE_SERVICE_ANTI_DOS,\n [ord(c) for c in self._ANTI_DOS_MESSAGE])\n self._char_write(self._BLE_SERVICE_TX_POWER,\n [self._TX_POWER_VALUE])\n # Sending 0x01 to the wake service wakes the sphero.\n self._char_write(self._BLE_SERVICE_WAKE, [0x01])",
"def reboot_fpga(self):\n log.info(\"Booting FPGA from SPI prom\")\n self.set(\"FPGA_CTRL\", \"boot_fpga\", 1);",
"def set_mode(self, new_mode):\n\n\t\tself._log.info('Mode changed to: %s' % new_mode.name)\n\t\tself._mode = new_mode\n\t\tself._dump_configuration()\n\t\tself._remove_all_flow_records()",
"def boot(self):\n\t\tmesslen, received = self.socket.send('bootm\\r', 25)\t\t\n\t\treturn None",
"def SetBootloaderEnv(script, name, val):\n script.AppendExtra('set_bootloader_env(\"%s\", \"%s\");' % (name, val))",
"def set_bootmodules(self, modules):\n raise NotImplementedError",
"def change_system_mode_manual(self, session, schedule_id, period_id, mode):\n payload = [('zoneId', self.zone_id), ('lccId', self.lcc_id),\n ('scheduleId', schedule_id), ('periodId', period_id),\n ('mode', mode)]\n sysmode_url = IC3Session.create_url(IComfort3Zone.SYSMODE_MANUAL)\n resp = session.post_url_json(sysmode_url, post_data=payload, \n referer_url=self.ms_url)\n resp.raise_for_status\n return resp.status_code == 200",
"def svc_reset_system_mode(self) -> None:\n self._call_client_api(self._device.reset_mode)",
"def boot_linux(self, rootfs=None, bootargs=\"\"):\n common.print_bold(\"\\n===== Booting linux for %s =====\" % self.model)\n\n self.sendline('fdt addr $fdt_addr')\n self.expect(self.uprompt)\n self.sendline('fdt get value bcm_bootargs /chosen bootargs')\n self.expect(self.uprompt)\n\n self.sendline('setenv bootargs \"$bcm_bootargs %s\"' % bootargs)\n self.expect(self.uprompt)\n\n self.sendline(\n \"setenv bootcmd 'fatload mmc 0 ${kernel_addr_r} %s; bootm ${kernel_addr_r} - ${fdt_addr}; booti ${kernel_addr_r} - ${fdt_addr}'\"\n % getattr(self, 'kernel_file', 'uImage'))\n self.expect(self.uprompt)\n self.sendline('saveenv')\n self.expect(self.uprompt)\n self.sendline('boot')\n\n # Linux handles serial better ?\n self.delaybetweenchar = None",
"def set_autoreboot_status(self, status: int) -> str:\n return self._req_post(self._URLS['SetAutoreboot'], data={\"autoRebootEn\": status, \"delayRebootEn\": True, \"rebootTime\": \"02: 00\"})",
"def set_auto_pilot_mode(self):\n self._kernel.set_auto_pilot_mode()",
"def setFreedriveMode(self, freedrive_mode):\n if freedrive_mode:\n freedrive_mode = 1\n else:\n freedrive_mode = 0\n\n self.set_register(REG_TYPE, SET_FREE_DRIVE, 'int')\n self.set_register(REG_FREE_DRIVE_ACTIVE, freedrive_mode, 'int')",
"def is_bootable(self):\n return self.bootable_flag == 0x80",
"def boot():\r\n print \"\"\"\r\n ###### ## ## ### ## ## ## ## ######## ########\r\n ## ## ## ## ## ## ### ## ## ## ## ## ##\r\n ## #### ## ## #### ## ## ## ## ## ##\r\n ## ## ## ## ## ## ## ## ## ######## ######\r\n ## ## ######### ## #### ## ## ## ## ##\r\n ## ## ## ## ## ## ### ## ## ## ## ##\r\n ###### ## ## ## ## ## ####### ## ## ########\r\n\r\n Version %s-%s\r\n\r\n Multi Purpose Artificial Inelegance Program\r\n Copyright (c) Alexandre Gauthier 2010-2011\r\n All Rights Reserved\r\n \"\"\" % ( constants.VERSION, constants.TAGNAME )\r\n\r\n # Initialize log\r\n # TODO: The values should be read from config file.\r\n log.init_log('cyanure.log', 'DEBUG')\r\n\r\n logger.info(\"Cyanure system init: Version %s (%s)\" % (\r\n constants.VERSION, constants.TAGNAME ))",
"def handleModeToggle(self):\n self.filesList.changeMode(not self.autoMode)\n if self.autoMode:\n self.modeToggle.setText(\"Auto Mode\")\n self.mainWindow.setWindowTitle(\"CMAT (Manual Mode)\")\n else:\n self.modeToggle.setText(\"Manual Mode\")\n self.mainWindow.setWindowTitle(\"CMAT (Auto Mode)\")\n self.autoMode = not self.autoMode",
"def set_power(self, power: bool):\r\n if not self.backlight:\r\n return\r\n\r\n self.backlight.power = power",
"def setMode(self, request, context):\n \n self.vehicle.mode = VehicleMode(str(request.mode))\n self.vehicle.wait_ready('mode')\n \n return droneconnect_pb2.Null()",
"def set_mode(self, mode):\n if mode == 'train':\n self.hidden = self._make_hidden(self.batch_size)\n elif mode == 'generate':\n self.hidden = self._make_hidden(1)",
"def set_mode(self, mode):\n if mode == 'train':\n self.hidden = self._make_hidden(self.batch_size)\n elif mode == 'generate':\n self.hidden = self._make_hidden(1)",
"def set_mode(self, mode='List'):\r\n _debug('simq03b_api.set_mode')\r\n \r\n #If we choose list mode \r\n if mode.lower() == 'list':\r\n #First choose a list if there was no, otherwise SMA100B is mad\r\n #To know the available list, the query is 'SOUR1:LIST:CAT?'\r\n self.write('SOUR1:LIST:SEL \"/var/user/list1.lsw\"') \r\n \r\n self.write('OUTP1:STAT ON') #Somehow the SMA100B wants the RF to be ON for switching into list mode.\r\n self.write('SOUR1:LIST:MODE STEP') #Make Step mode in order to not automatically sweep all the frequencies\r\n self.write('SOURce:FREQuency:MODE LIST')\r\n else:\r\n #CW and FIXed are synonyms for SMA100B\r\n self.write('SOURce:FREQuency:MODE CW')",
"def setMode(self,mode):\n self.mode=mode\n if self.mode==0:\n self.setDrawing()\n elif self.mode==1:\n self.setConstruction()\n elif self.mode==2:\n self.setDisplay()\n self.context.text.append(\"mode: \"+self.messages[self.mode])",
"def setMode(self, targetmode):\n self.resetStream()\n\n if targetmode not in self.prompts.keys():\n raise ValueError(\"Invalid Mode %s\" % targetmode)\n\n initialmode = self.getMode()\n if targetmode == initialmode:\n logger.debug(\"In %s mode\" % targetmode)\n return True\n\n logger.debug(\"Changing mode from '%s' to '%s' on %s\" % (initialmode, targetmode, self))\n\n # Provide all permutations of mode switching\n if targetmode == CLI_MODES.config and initialmode == CLI_MODES.enable:\n self._session.sendline(\"config terminal\")\n elif targetmode == CLI_MODES.config and initialmode == CLI_MODES.shell:\n self._session.sendline(\"cli -m config\")\n elif targetmode == CLI_MODES.config and initialmode == CLI_MODES.pmx:\n self._session.sendline(\"quit\")\n elif targetmode == CLI_MODES.enable and initialmode == CLI_MODES.shell:\n self._session.sendline(\"cli -m enable\")\n elif targetmode == CLI_MODES.enable and initialmode == CLI_MODES.config:\n self._session.sendline(\"exit\")\n elif targetmode == CLI_MODES.shell and initialmode == CLI_MODES.enable:\n self._session.sendline(\"_shell\")\n elif targetmode == CLI_MODES.shell and initialmode == CLI_MODES.config:\n self._session.sendline(\"_shell\")\n elif targetmode == CLI_MODES.shell and initialmode == CLI_MODES.mysql:\n self._session.sendline(\"quit\")\n elif targetmode == CLI_MODES.pmx:\n self.setMode(CLI_MODES.config)\n self._session.sendline(\"pmx\")\n elif targetmode == CLI_MODES.mysql:\n self.setMode(CLI_MODES.shell)\n self._session.sendline(\"idbmysql\")\n elif targetmode != CLI_MODES.config and initialmode == CLI_MODES.pmx:\n # Moving from pmx to other modes. Switch to config and proceed..\n self.setMode(CLI_MODES.config)\n self.setMode(targetmode)\n self._session.sendline(\"\") # Send empty line for guessMode to work\n elif targetmode != CLI_MODES.shell and initialmode == CLI_MODES.mysql:\n # Moving from mysql to other modes. Switch to shell and proceed..\n self.setMode(CLI_MODES.shell)\n self.setMode(targetmode)\n self._session.sendline(\"\") # Send empty line for guessMode to work\n else:\n raise ValueError(\"Invalid Mode combination. Targetmode: %s, Currentmode: %s\" % (targetmode, initialmode))\n\n finalmode = self.guessMode()\n logger.debug(\"Mode changed to %s mode\" % finalmode)\n if targetmode == finalmode:\n if finalmode == CLI_MODES.shell:\n self.initShell()\n return True\n else :\n # A user can be in pmx subshells. So we might need to get back a couple levels\n if finalmode == CLI_MODES.pmx and targetmode == CLI_MODES.config:\n return self.setMode(CLI_MODES.config)\n else:\n logger.warn(\"Unable to set '%s' mode\" % targetmode)\n return False",
"def turnOnSdkMode(self):\n \n command = b\"\\x90\\x01\\x01\"\n #print(\"turnOnSdkMode run, command: \")\n #print(command)\n \n self.sendCommand(command)",
"def set_mode(self, mode='List'):\r\n \r\n #If we choose list mode \r\n if mode.lower() == 'list':\r\n #First choose a list if there was no, otherwise SMA100B is mad\r\n #To know the available list, the query is 'SOUR1:LIST:CAT?'\r\n self.write('SOUR1:LIST:SEL \"/var/user/list1.lsw\"') \r\n \r\n self.write('OUTP1:STAT ON') #Somehow the SMA100B wants the RF to be ON for switching into list mode.\r\n self.write('SOUR1:LIST:MODE STEP') #Make Step mode in order to not automatically sweep all the frequencies\r\n self.write('SOURce1:FREQuency:MODE LIST')\r\n else:\r\n #CW and FIXed are synonyms for SMA100B\r\n self.write('SOURce1:FREQuency:MODE CW')",
"def set_mode(self, mode='List'):\r\n \r\n #If we choose list mode \r\n if mode.lower() == 'list':\r\n #First choose a list if there was no, otherwise SMA100B is mad\r\n #To know the available list, the query is 'SOUR1:LIST:CAT?'\r\n self.write('SOUR1:LIST:SEL \"/var/user/list1.lsw\"') \r\n \r\n self.write('OUTP1:STAT ON') #Somehow the SMA100B wants the RF to be ON for switching into list mode.\r\n self.write('SOUR1:LIST:MODE STEP') #Make Step mode in order to not automatically sweep all the frequencies\r\n self.write('SOURce1:FREQuency:MODE LIST')\r\n else:\r\n #CW and FIXed are synonyms for SMA100B\r\n self.write('SOURce1:FREQuency:MODE CW')",
"def last_boot(self, value: datetime) -> None:\n self._data[ATTR_LAST_BOOT] = value.isoformat()",
"def set_preset_mode(self, preset_mode: str) -> None:\n if self.target_temperature == 0:\n self._data.homestatus.setroomThermpoint(\n self._data.home_id, self._room_id, STATE_NETATMO_HOME,\n )\n\n if (\n preset_mode in [PRESET_BOOST, STATE_NETATMO_MAX]\n and self._module_type == NA_VALVE\n ):\n self._data.homestatus.setroomThermpoint(\n self._data.home_id,\n self._room_id,\n STATE_NETATMO_MANUAL,\n DEFAULT_MAX_TEMP,\n )\n elif preset_mode in [PRESET_BOOST, STATE_NETATMO_MAX]:\n self._data.homestatus.setroomThermpoint(\n self._data.home_id, self._room_id, PRESET_MAP_NETATMO[preset_mode]\n )\n elif preset_mode in [PRESET_SCHEDULE, PRESET_FROST_GUARD, PRESET_AWAY]:\n self._data.homestatus.setThermmode(\n self._data.home_id, PRESET_MAP_NETATMO[preset_mode]\n )\n else:\n _LOGGER.error(\"Preset mode '%s' not available\", preset_mode)\n\n self.update_without_throttle = True\n self.schedule_update_ha_state()",
"def setMode(self, mode):\n if mode == 0 or mode == 1:\n with self.lock:\n self.mode = mode\n else:\n raise FliError(\"FLISetCameraMode failed\")",
"def _sketch_mode(self):\r\n self._mode_select(1)",
"def _change_secure_boot_settings(self, property, value):\n system = self._get_host_details()\n # find the BIOS URI\n if ('links' not in system['Oem']['Hp'] or\n 'SecureBoot' not in system['Oem']['Hp']['links']):\n msg = (' \"SecureBoot\" resource or feature is not '\n 'supported on this system')\n raise exception.IloCommandNotSupportedError(msg)\n\n secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']\n\n # Change the property required\n new_secure_boot_settings = {}\n new_secure_boot_settings[property] = value\n\n # perform the patch\n status, headers, response = self._rest_patch(\n secure_boot_uri, None, new_secure_boot_settings)\n\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)\n\n # Change the bios setting as a workaround to enable secure boot\n # Can be removed when fixed for Gen9 snap2\n val = self._get_bios_setting('CustomPostMessage')\n val = val.rstrip() if val.endswith(\" \") else val+\" \"\n self._change_bios_setting({'CustomPostMessage': val})",
"def set_preset_mode(self, preset_mode):\n\n if preset_mode == PRESET_HOME:\n \"\"\"Turn away mode off.\"\"\"\n self._away = False\n self._device.set_temperature_to_auto()\n\n elif preset_mode == PRESET_AWAY:\n \"\"\"Turn away mode on.\"\"\"\n self._away = True\n self._device.set_location_to_frost()\n\n else:\n raise InvalidStateError\n\n pass",
"async def async_set_slave_mode(self, slave_mode):\n self._slave_mode = slave_mode\n #self.async_schedule_update_ha_state(True)",
"def sleep(self):\r\n if not self.backlight:\r\n return\r\n\r\n self.backlight.power = False",
"def test_get_bios_boot_mode_list(self):\n pass",
"def software_load(self, filename: str) -> None:\n pass # Most boards can use serialboot."
] | [
"0.7535342",
"0.7406527",
"0.7319358",
"0.6997932",
"0.66406643",
"0.6565158",
"0.6410105",
"0.6125415",
"0.6046522",
"0.60436237",
"0.6038874",
"0.6014684",
"0.6005457",
"0.6002688",
"0.59916985",
"0.59749866",
"0.5971754",
"0.59534365",
"0.5944353",
"0.5900143",
"0.58888465",
"0.5875209",
"0.58701897",
"0.5841792",
"0.58071274",
"0.57710737",
"0.57598937",
"0.5713621",
"0.5710734",
"0.570695",
"0.5693561",
"0.5687663",
"0.5680606",
"0.566673",
"0.5648962",
"0.564629",
"0.56452334",
"0.56310123",
"0.56305516",
"0.5617353",
"0.5601146",
"0.557242",
"0.55354404",
"0.5494074",
"0.5491425",
"0.5470701",
"0.5467866",
"0.5462722",
"0.5448103",
"0.54462934",
"0.54458165",
"0.5421771",
"0.54203326",
"0.54203326",
"0.54203326",
"0.54203075",
"0.54114866",
"0.53968173",
"0.53819156",
"0.53817385",
"0.5370647",
"0.5367762",
"0.5367002",
"0.53599566",
"0.5351804",
"0.5350858",
"0.53426653",
"0.53421855",
"0.5338906",
"0.53376895",
"0.5324519",
"0.53127587",
"0.53092647",
"0.53079104",
"0.52812713",
"0.5277461",
"0.5276455",
"0.5269399",
"0.52682644",
"0.5267632",
"0.52657557",
"0.52600205",
"0.52596945",
"0.52596945",
"0.5259165",
"0.5250478",
"0.5243344",
"0.5242709",
"0.5232951",
"0.5232951",
"0.5225361",
"0.52196634",
"0.52158093",
"0.5212434",
"0.51974845",
"0.51862",
"0.51836807",
"0.5183651",
"0.5181251",
"0.5178833"
] | 0.7734424 | 0 |
Resets the iLO password. | def reset_ilo_credential(self, password):
acc_uri = '/rest/v1/AccountService/Accounts'
for status, hds, account, memberuri in self._get_collection(acc_uri):
if account['UserName'] == self.login:
mod_user = {}
mod_user['Password'] = password
status, headers, response = self._rest_patch(memberuri,
None, mod_user)
if status != 200:
msg = self._get_extended_error(response)
raise exception.IloError(msg)
return
msg = "iLO Account with specified username is not found."
raise exception.IloError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def reset(ctx):\n\n controller = ctx.obj['controller']\n click.echo('Resetting OATH data...')\n old_id = controller.id\n controller.reset()\n\n settings = ctx.obj['settings']\n keys = settings.setdefault('keys', {})\n if old_id in keys:\n del keys[old_id]\n settings.write()\n\n click.echo(\n 'Success! All OATH credentials have been cleared from your YubiKey.')",
"def reset_password(self):\n self.password = passwordResetter(self.user_id, self.password)",
"def setpassword(self, pwd):\n pass",
"def reset_password(newpass, challenge):",
"def reset_password():\n pass",
"def reset(self):\n\n\t\tself._send_message(\"RESET\", \"\\x00\")",
"def password(self, password):\n self.password_hash = generate_password_hash(password)\n self.password_set = True",
"def clear_password(self, e):\n\n self.password.label.config(show='*')\n if self.password.get() == 'Enter Enovia Password':\n self.password.clear()",
"def reset(serial):\n if click.confirm(\n \"Warning: Your credentials will be lost!!! Do you wish to continue?\"\n ):\n print(\"Press the button to confirm -- again, your credentials will be lost!!!\")\n solo.client.find(serial).reset()\n click.echo(\"....aaaand they're gone\")",
"def reset(serial):\n if click.confirm(\n \"Warning: Your credentials will be lost!!! Do you wish to continue?\"\n ):\n print(\"Press the button to confirm -- again, your credentials will be lost!!!\")\n solo.client.find(serial).reset()\n click.echo(\"....aaaand they're gone\")",
"def _set_password(self, password):\n self._password = generate_password_hash(password)",
"def LdapResetPassword(self, record):\n password = self.login_pwd.generate_password()\n attrs = {}\n attrs['userPassword'] = self.login_pwd.encrypt_password(password)\n logger.debug(\"LDAP LdapResetPassword encrypt_password %s\"\n % (attrs['userPassword']))\n result = self.LdapModifyUser(record, attrs)\n return result",
"def set_pass(self, pw):\n\t\tself.passhash = generate_password_hash(pw)",
"def reset(self):\n self.state = \"YYYYRRRRGGGGOOOOBBBBWWWW\"",
"def setPassword(self, unhashPass):\n\t\tself.passHash = generate_password_hash(unhashPass)",
"def reset_merchant_pass(self, newpass):\n self.refresh()\n if not newpass:\n raise ValueError(\"Password must be defined\")\n\n updateshopobj = self.sc.get_updateshop_obj(\n {\n 'Alias': self.Alias,\n 'MerchantPassword': newpass,\n }\n )\n self.sc.update(updateshopobj)\n self.refresh()",
"def resetPassword(self, customerguid, password, jobguid=\"\", executionparams=None):",
"def test_010_change_user_password(self):\n\n testflow.step(\"Resetting password for user %s\", TEST_USER1)\n assert USER_CLI.run(\n 'password-reset',\n TEST_USER1,\n password='pass:%s' % self.user_password,\n password_valid_to='2100-01-01 11:11:11Z',\n )[0], \"Failed to change user's '%s' password\" % TEST_USER1",
"def reset(ctx, force):\n\n force or click.confirm(\n \"WARNING! This will delete all stored OATH accounts and restore factory \"\n \"settings. Proceed?\",\n abort=True,\n err=True,\n )\n\n session = ctx.obj[\"session\"]\n click.echo(\"Resetting OATH data...\")\n old_id = session.device_id\n session.reset()\n\n keys = ctx.obj[\"oath_keys\"]\n if old_id in keys:\n del keys[old_id]\n keys.write()\n logger.info(\"Deleted remembered access key\")\n\n click.echo(\"Success! All OATH accounts have been deleted from the YubiKey.\")",
"def set_password(self, password):\n self.PASS = password",
"def reset_password(user: User) -> Result[Password]:\n passwd = Password.new()\n command([\"/usr/sbin/chpasswd\"], passwd.wrap(\"{}:{{}}\".format(user.pw_name)))\n return Result(State.success, passwd)",
"def reset(self):\n self._keyCode = \"\"\n self._keyCodeCount = 0\n self._keyCodeTime = 0.0",
"def reset(self):\n self.string = self.axiom",
"def reset(self) -> None:\n self.memory = self.intcode.copy()\n self.ip = 0\n self.stdout.clear()",
"def reset(self):\n self.check_validity()\n\n self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_RESET, (), '', 0, '')",
"def set_password(self, password):\n self.PASSWORD = password",
"def set_password(self, password):\n self.cloudserver.change_password(password)",
"def reset(self):\n self._write(0x16, 1, 3, 0x08)",
"def set_password(self, password):\n self.password = generate_password_hash(password)",
"def set_password(self, new_password):\n super(Mafiasi, self).set_password(new_password)\n self.new_password = new_password",
"def change_pwd(self):\r\n if self.field_pwd.text() == \"\":\r\n self.label_chg_pwd.setText(\"Password cannot be empty\")\r\n return None\r\n self.encryptor.set_key_from_password(self.field_pwd.text())\r\n self.label_chg_pwd.setText(\"Password typed\")\r\n self.label_chg_pwd.setStyleSheet(\"color:#01ac2d\")\r\n self.label_chg_key.clear()\r\n self.field_key.clear()\r\n QtWidgets.QMessageBox.information(self, \"Password Change\", \r\n (\"Your password has been successfully changed.\\n\\n\"\r\n \"You can now encrypt / decrypt files.\"))",
"def set_password(self, password):\n self.password = password",
"def call_for_auth_reset(self):\n pos.select_dispenser(1)\n crindsim.lift_handle()\n pos.click(\"reset\")\n pos.click(\"yes\")\n crindsim.lower_handle()\n #Checks crind diag to see if reset message is displayed\n if not system.wait_for(lambda: \"reset\" in pos.read_dispenser_diag()[\"Status\"].lower(), verify = False):\n tc_fail(\"CRIND did not reset\")\n #Wait for crind to return to idle\n if not system.wait_for(lambda: \"idle\" in pos.read_dispenser_diag()[\"Status\"].lower(), timeout = 120, verify = False):\n tc_fail(\"CRIND did not return to idle\")\n pos.click(\"back\")",
"def password(self, password):\n if password is None:\n self._password = None\n else:\n self._password = generate_password_hash(password)",
"def on_limpiar(self, control):\n\n\n self.txtPassword.set_text(\"\")\n self.txtPassword.set_placeholder_text(\"\")",
"def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r",
"def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r",
"def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r",
"def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r",
"def reset(self):\n self.logger.debug(\"Resetting %s\", self.key)\n self.driver.reset(self.key)",
"def ChangePassword():\n if self.ChangePassword():\n # Update successful, return to main screen\n self.confirm_pass.set('')\n self.password.set('')\n Return()\n else:\n return",
"def set_password(self, password):\n self.password_hash = generate_password_hash(password)",
"def set_password(self, password):\n self.password_hash = generate_password_hash(password)",
"def resetUser(self):\n\t\turl = \"https://habitica.com/api/v4/user/reset\"\n\t\treturn(postUrl(url, self.credentials))",
"def reset(self,):\n\n self._toggle_pin(RESET_PIN)",
"def reset(self):\n\t\tself.write(\"*rst\")\n\t\tpass",
"def reset_punteggio(self):\n self.execute(TABELLE['punteggio']['reset'])",
"def reset(self):\n self.write_to_serial('*RST')",
"def change_password(self, new_password):\n dev = self.nearest_pandevice()\n self.password_hash = dev.request_password_hash(new_password)\n self.update(\"password_hash\")",
"def password(self, password) :\n\t\ttry :\n\t\t\tself._password = password\n\t\texcept Exception as e:\n\t\t\traise e",
"def reset(self):\r\r\n self.read(\"*cls\")\r\r\n self.waitForCompletion()\r\r\n self.read(\"*RST\") # Reset and query\r\r\n self.dev.write(\"*cls\")\r\r\n while self.read(\"*OPC?\") != \"1\": time.sleep(1) # Wait until completion\r\r",
"def reset(self):\n self.enc_len = None\n self.precomputed_enc_h = None\n self.mask = None",
"def set_password(self, password):\n self.password_hash = generate_password_hash(str(password))",
"async def password(self, ctx):\n pass",
"def set_password(self, password):\n self.password = self.hash_password(password)",
"def password(self, password):\n\n self._password = password",
"def password(self, password):\n\n self._password = password",
"def password(self, password):\n\n self._password = password",
"def password(self, password):\n\n self._password = password",
"def reset():\r\n pass",
"def password(self):\n raise RuntimeError(\"Password can not be read, only set\")",
"def password(self, password):\n self.password_hash = generate_password_hash(password)",
"def password(self, password):\n self.password_hash = generate_password_hash(password)",
"def password(self, password):\n self.password_hash = generate_password_hash(password)",
"def password(self, password):\n self.password_hash = generate_password_hash(password)",
"def reset(self):\n return self._send_command('reset')",
"def reset():\n pass",
"def reset():\n pass",
"def set_password(self, password):\n self.__init__(password=password)",
"def reset_password(connection,password,username):\r\n with connection:\r\n connection.execute(RESET_PASSWORD,(password,username))",
"def reset(self):\n self.ir_reg_name_generator.reset()",
"def reset(self):\n \n pass",
"def set_password(self, raw_password: str):\n self.new_password = raw_password",
"def password(self, password: str):\n\n self._password = password",
"def wifi_password(self):\n raise RuntimeError(\"Password can not be read, only set\")",
"def reset_login_attemtps(self):\r\n self.login_attempts = 0",
"def reset():",
"def reset():",
"def reset():",
"def change_password(change_account):\n change_data(change_account, changed_data='password')",
"def reset(self):\n self.enc_len = None\n self.precomputed_enc_h = None\n self.mask = None\n self.prev_attn = None",
"def password(self, password):\n\n self.password_hash = generate_password_hash(password)",
"def reset(self):\n requests.put('{}/reset'.format(self._get_url()))",
"def _set_password(self, cr, uid, id, password, context=None):\n encrypted = self._crypt_context(\n cr, uid, id, context=context).encrypt(password)\n print(password)\n print(encrypted)\n self._set_encrypted_password(cr, uid, id, encrypted, context=context)\n self._set_password_again(cr, uid, id, password, context=context)",
"def reset_ldap_password(username):\n \n from django_ldap_pixiedust.user import SynchronisingUserAdapter\n backend = LDAPBackend()\n user = User.objects.get(username=username)\n ldap_user = backend.get_user(user.id)\n sync = SynchronisingUserAdapter(ldap_user)\n sync.reset_ldap_password()",
"def set_password(self, password):\n self.authentication.password = password",
"def reset(self):\n self.desc.put(self.desc.pvname.split(\".\")[0])\n self.scan.put(\"Passive\")\n self.calc.put(\"0\")\n self.prec.put(\"5\")\n self.dold.put(0)\n self.doln.put(\"\")\n self.dopt.put(\"Use VAL\")\n self.flnk.put(\"0\")\n self.odly.put(0)\n self.oopt.put(\"Every Time\")\n self.outn.put(\"\")\n for letter in self.channels.read_attrs:\n channel = self.channels.__getattr__(letter)\n channel.reset()",
"def set_password(self, password):\n from kalon.auth import encrypt_password\n self.document.password = encrypt_password(password)",
"def set_password(self, password):\n self.password = md5crypt(password, gen_salt())",
"def resetSecret(self):\n self.secret = str(uuid())\n self.put()",
"def Reset(self):\n self.prompt_str = self.prompt_ev.FirstPromptEvaluator()",
"def reset(self) -> None:\n self.val = None\n self.notes = []\n self.blocked = False\n self.forbidden = False",
"def set_Password(self, value):\n super(DownloadDocumentInputSet, self)._set_input('Password', value)",
"def request_password_reset():",
"def _doReset(self):\n self._cmdReset()",
"def set_password(self, password):\n self.password = generate_password_hash(password, method='pbkdf2:sha256')",
"def set_password(self, password):\n self.password_hash = generate_password_hash(f\"{password}{self.user_salt}\")",
"def reset(self) -> None:\n ...",
"def reset(self) -> None:\n ...",
"def reset(self) -> None:\n ..."
] | [
"0.71026045",
"0.7095906",
"0.65982336",
"0.6317689",
"0.626955",
"0.622285",
"0.62090886",
"0.619031",
"0.6150298",
"0.6150298",
"0.6132683",
"0.6118869",
"0.60761297",
"0.6064993",
"0.605403",
"0.60505545",
"0.60464615",
"0.60406226",
"0.601135",
"0.59997255",
"0.5995621",
"0.59928083",
"0.59907496",
"0.59784484",
"0.5974175",
"0.5958172",
"0.5949247",
"0.5946935",
"0.5936411",
"0.5931389",
"0.5928982",
"0.5920122",
"0.5913778",
"0.59096867",
"0.5908521",
"0.58982795",
"0.58982795",
"0.58982795",
"0.58982795",
"0.58902097",
"0.58875495",
"0.58871746",
"0.58871746",
"0.5876373",
"0.5873538",
"0.5867131",
"0.5859567",
"0.5857351",
"0.5846917",
"0.582424",
"0.58232003",
"0.580466",
"0.5804623",
"0.5803969",
"0.5795058",
"0.5794797",
"0.5794797",
"0.5794797",
"0.5794797",
"0.5787557",
"0.57726634",
"0.5770837",
"0.5770837",
"0.5770837",
"0.5770837",
"0.57702345",
"0.57666373",
"0.57666373",
"0.5759917",
"0.5752354",
"0.57491094",
"0.5742996",
"0.574112",
"0.57405263",
"0.5739332",
"0.57389385",
"0.5734743",
"0.5734743",
"0.5734743",
"0.5733825",
"0.5712564",
"0.570798",
"0.5706886",
"0.5705569",
"0.57016397",
"0.56935036",
"0.56881094",
"0.5686238",
"0.56783056",
"0.5673629",
"0.5669674",
"0.56692153",
"0.5667407",
"0.5664036",
"0.56620026",
"0.5661496",
"0.562183",
"0.56217945",
"0.56217945",
"0.56217945"
] | 0.72471446 | 0 |
Resets the BIOS settings to default values. | def reset_bios_to_default(self):
# Check if the BIOS resource if exists.
headers_bios, bios_uri, bios_settings = self._check_bios_resource()
# Get the BaseConfig resource.
try:
base_config_uri = bios_settings['links']['BaseConfigs']['href']
except KeyError:
msg = ("BaseConfigs resource not found. Couldn't apply the BIOS "
"Settings.")
raise exception.IloCommandNotSupportedError(msg)
# Check if BIOS resource supports patch, else get the settings
if not self._operation_allowed(headers_bios, 'PATCH'):
headers, bios_uri, _ = self._get_bios_settings_resource(
bios_settings)
self._validate_if_patch_supported(headers, bios_uri)
status, headers, config = self._rest_get(base_config_uri)
if status != 200:
msg = self._get_extended_error(config)
raise exception.IloError(msg)
new_bios_settings = {}
for cfg in config['BaseConfigs']:
default_settings = cfg.get('default', None)
if default_settings is not None:
new_bios_settings = default_settings
break
else:
msg = ("Default Settings not found in 'BaseConfigs' resource.")
raise exception.IloCommandNotSupportedError(msg)
request_headers = self._get_bios_hash_password(self.bios_password)
status, headers, response = self._rest_patch(bios_uri, request_headers,
new_bios_settings)
if status >= 300:
msg = self._get_extended_error(response)
raise exception.IloError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def reset(self):\r\n # TODO: have reset flag such that it forces all the bottom changes\r\n self.pwm_freq = self._default[\"pwm_freq\"]\r\n self.gate_logic = self._default[\"gate_logic\"]\r\n self.max_pwm = self._default[\"max_pwm\"]\r\n self.lase_on_power_up = self._default[\"lase_on_power_up\"]\r\n\r\n self.mode = self._default[\"mode\"]\r\n self.lase = self._default[\"lase\"]\r\n self.percent = self._default[\"percent\"] # in percent\r",
"def reset(self):\n self.manager.delete_all()\n for name, val in DEFAULT_SETTINGS.items():\n val['name'] = name\n val['default_value'] = val['value']\n self.manager.from_dict(val)",
"def SetDefaults():\n winsound.MessageBeep()\n returnValue = MessageBox(0, u\"You are about to reset the settings, \"\n \"are you sure you want to contine?\"\n , u\"Reset settings file?\", 4)\n\n if returnValue == MB_YES:\n\n returnValue = MessageBox(0, u\"Settings successfully restored to default values\"\n , u\"Reset complete!\", 0)\n\n MySet = Settings()\n MySet.Save(settingsFile)",
"def reset_settings():\n settings = Settings()\n settings.reset()\n settings.save()",
"def reset():\n if os.name == \"posix\": #In linux\n os.system(\"clear\")\n elif os.name == (\"ce\", \"nt\", \"dos\"): #In windows\n os.system(\"cls\")",
"def reset(self):\n self.settings = None\n self.sublime_settings = None\n self.settings_base = \"Javatar.sublime-settings\"\n self.sublime_base = \"Preferences.sublime-settings\"",
"def reset( self ):\n self.conf = self.defaults",
"def reset(self):\n\n game.reset()\n sm.get_screen('game_screen').reset()",
"def reset(self):\n self._unset_defaults_and_overrides()\n self.clear()",
"def reset(self):\n self.reset_dev_via_serial(self.forced_reset_timeout)",
"def resetStoredDefaults( self ):\n keys= list( self._defDict.keys() )\n data= [ self._defDict[ aKey ] for aKey in keys ]\n \n self.prefObj.save( group= self.prefGroup, name= keys, data= data )\n self.resetSelfWithDefaults()",
"def restore_defaults(self):\n if messagebox.askyesno(\n message='Are you sure? '\n 'ALL SETTINGS will be reset to game defaults.\\n'\n 'You may need to re-install graphics afterwards.',\n title='Reset all settings to Defaults?', icon='question'):\n self.lnp.restore_defaults()\n messagebox.showinfo(\n self.root.title(),\n 'All settings reset to defaults!')",
"def reset(self):\n self._write(0x16, 1, 3, 0x08)",
"def reset_state(self):\n for name in self._buffers:\n self._buffers[name] = self._defaults[name]",
"def reset_to_factory(self):\n self._log_msg_start(\"Reset to factory settings\")\n # Order of execution is clear, save, load. This will copy the factory default\n # settings from ROM to flash, load from flash, and activate.\n device_mask_dict = dict(\n deviceDevBbr=1, # devSpiFlash device battery backed RAM\n deviceDevFlash=1, # device Flash\n deviceDevEeprom=1, # device EEPROM\n deviceDeviceSpiFlash=1, # device SPI Flash\n )\n # self._ubx.send(\n # \"CFG-CFG\",\n # clearMask=0xFFFF,\n # saveMask=0xFFFF,\n # loadMask=0xFFFF,\n # deviceMask=device_mask_dict,\n # )\n self._ubx.send(\n \"CFG-CFG\",\n clearMask=0xFFFF,\n saveMask=0x0000,\n loadMask=0xFFFF,\n deviceMask=device_mask_dict,\n )\n self._ubx.send(\n \"CFG-CFG\",\n clearMask=0x0000,\n saveMask=dict(\n msgConf=1,\n ),\n loadMask=dict(),\n deviceMask=device_mask_dict,\n )",
"def hard_reset(self) -> None:\n os.system('rm -fr \"$HOME/.daf/\"')",
"async def reset(self, ctx):\n await self.config.clear_all_guilds()\n await ctx.send(\"Reset all settings to default values.\")",
"def resetDeviceStates(self):",
"def reset_secure_boot_keys(self):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('ResetToDefaultKeys', True)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def restore():\r\n\tglobal mhp, php, men, pen\r\n\tmhp = 100\r\n\tphp = 100\r\n\tmen = 100\r\n\tpen = 100",
"def reset(self):\n\n\t\tself._send_message(\"RESET\", \"\\x00\")",
"def reset(self):\n self.rst.value(0) # RST on\n self.sleep_us(100) # reset impulse has to be >100 ns and <100 ms\n self.rst.value(1) # RST off\n # Defaults after reset:\n self.power = self.POWER_DOWN\n self.addressing = self.ADDRESSING_HORIZ\n self.instr = self.INSTR_BASIC\n self.display_mode = self.DISPLAY_BLANK\n self.temp_coeff = self.TEMP_COEFF_0\n self.bias = self.BIAS_1_11\n self.voltage = 3060",
"def reset(self):\n self.data = self._defaults",
"async def _reset_settings(self, ctx):\n data = await self.get_data(ctx)\n await data.Settings.clear()\n msg = (\"{0.name} ({0.id}) reset all \"\n \"casino settings.\").format(ctx.author)\n await ctx.send(msg)",
"def reset(self):\n self.train_loss.reset_states()\n self.train_accuracy.reset_states()\n self.val_loss.reset_states()\n self.val_accuracy.reset_states()\n self.train_mIoU.reset_states()\n self.val_mIoU.reset_states()",
"def reset(self):\n self.params.resetParams()",
"def resetSettings(self):\n\n # it does this 4 times because for some reason it would not grab everything one time through. Investigate\n for i in range(4):\n\n networkNode = self.returnNetworkNode\n attrs = cmds.listAttr(networkNode, ud=True)\n\n for attr in attrs:\n attrType = str(cmds.getAttr(networkNode + \".\" + attr, type=True))\n\n if attrType == \"double\":\n cmds.setAttr(networkNode + \".\" + attr, lock=False)\n cmds.setAttr(networkNode + \".\" + attr, 0, lock=True)\n\n if attrType == \"bool\":\n cmds.setAttr(networkNode + \".\" + attr, lock=False)\n cmds.setAttr(networkNode + \".\" + attr, True, lock=True)\n\n if attrType == \"enum\":\n cmds.setAttr(networkNode + \".\" + attr, lock=False)\n cmds.setAttr(networkNode + \".\" + attr, 0, lock=True)\n\n # relaunch the UI\n self.updateSettingsUI()\n self.applyModuleChanges(self)",
"def _soft_reset(self):\n self._reset_specific_envs(self.episodes_done)\n self._update_other_info()",
"def _reset(self):\n self._interface.set('fw_wp_en', 'off')",
"def UnsetWiredDefault(self):\n config = ConfigParser.ConfigParser()\n config.read(self.wired_conf)\n profileList = config.sections()\n for profile in profileList:\n if config.has_option(profile, \"default\"):\n if misc.to_bool(config.get(profile, \"default\")):\n config.set(profile, \"default\", False)\n config.write(open(self.wired_conf, \"w\"))\n self.SaveWiredNetworkProfile(profile)",
"def reset(self):\n self.fontname = None\n self.size = -1\n self.valign = None\n self.bold = False\n self.italics = False\n self.smallcaps = False",
"def reset(self,bootloader=False):\n self.send_packet('\\xff' if bootloader else '\\xfe')",
"def restore_defaults(self):\n temp_index = self.temp_dropdown.findText(self.default_units[\"Temperature\"])\n vol_index = self.volume_dropdown.findText(self.default_units[\"Volume\"])\n press_index = self.press_dropdown.findText(self.default_units[\"Pressure\"])\n energy_index = self.energy_dropdown.findText(self.default_units[\"Energy\"])\n amount_index = self.amount_dropdown.findText(self.default_units[\"Amount\"])\n speed_index = self.speed_dropdown.findText(self.default_units[\"Speed\"])\n\n self.temp_dropdown.setCurrentIndex(temp_index)\n self.volume_dropdown.setCurrentIndex(vol_index)\n self.press_dropdown.setCurrentIndex(press_index)\n self.energy_dropdown.setCurrentIndex(energy_index)\n self.amount_dropdown.setCurrentIndex(amount_index)\n self.speed_dropdown.setCurrentIndex(speed_index)",
"def reset(self):\n # The camera will give no response to this command\n self._serial_io('\\x55\\x99\\x66\\x11', None)\n while True:\n try:\n self.system_state = 0x11\n if self.system_state == 0x11:\n break\n except CygnetExc:\n time.sleep(.2)\n while True:\n self.system_state = 0x12\n time.sleep(.2)\n if self.system_state == 0x16:\n break",
"def reset():\n Vessel.reset_instances()",
"def reset(self):\n self.write(\"*RST;*CLS;*SRE 0;*ESE 0;:STAT:PRES;\")",
"def reset(self):\n Simulation.reset(self)",
"def _reset(self):\n self._value = self._default",
"def reset(self):\n self.at_cmd('Z')",
"def reset(self):\n self._set_init()",
"def reset(self):\n self.value.put(0)\n self.input_pv.put(\"\")\n self.input_trigger.put(\"Yes\")",
"def soft_reset():",
"def reset_factory(self):\n self.set_vcp_value_by_name('Restore Factory Defaults', 1)",
"def reset_defaults(cls, deco_classname):\n # v0.3.0b24 -- use new classmethods\n orig_defaults = cls._classname2SettingsDataOrigDefaults_dict[deco_classname]\n settings_map = cls._classname2SettingsData_dict[deco_classname]\n for name in settings_map:\n settings_map[name].default = orig_defaults[name]",
"def reset():",
"def reset():",
"def reset():",
"def reset(self):\n self.write_to_serial('*RST')",
"def sys_reset(self):\n result = self._lib.NRFJPROG_sys_reset()\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)",
"def reset(cls):\n\n cls._set_mode_stopped()\n TimeDisplay.reset_time(erase=True)\n TimeDisplay.show_default()\n Notes.clear()\n for callback in cls.reset_callback:\n callback()",
"def setdefaults(self):\n res = __library__.MSK_XX_setdefaults(self.__nativep)\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)",
"def reset(self):\n command = \"export STLINK_DEVICE=\" + self.stlink.port + \"; st-flash reset\"\n subprocess.run(command, shell=True)\n time.sleep(1)",
"def _hard_reset(self):\n self._reset_specific_envs(np.ones_like(self.episodes_done))\n self._update_other_info()",
"def reset(self):\n self.desc.put(self.desc.pvname.split(\".\")[0])\n self.scan.put(\"Passive\")\n self.calc.put(\"0\")\n self.prec.put(\"5\")\n self.dold.put(0)\n self.doln.put(\"\")\n self.dopt.put(\"Use VAL\")\n self.flnk.put(\"0\")\n self.odly.put(0)\n self.oopt.put(\"Every Time\")\n self.outn.put(\"\")\n for letter in self.channels.read_attrs:\n channel = self.channels.__getattr__(letter)\n channel.reset()",
"def restore_config(self):\n self._clear_previous_windows_assigment()\n self._restart_i3_config()",
"def screen_reset(self, width, height):\n pygame.display.set_mode((width, height))\n self.s_width = width\n self.s_height = height\n self.main_menu.reset()\n self._option_menu.reset()\n self._instruction_menu.reset()\n self._title.reset_pos(self.s_width/2, self.s_height*0.25)\n self._info.screen_init()\n self._bg.init_bg()",
"def reset(self):\n\n self.scaler = None\n self.isFitted = False\n self.__create_scaler()",
"def reset(self) -> None:\n # See section 7.2.2 of the datasheet for reset description.\n self._reset.value = True\n time.sleep(0.0001) # 100 us\n self._reset.value = False\n time.sleep(0.005) # 5 ms",
"def reset(self):\n self.reset_image_estimate()\n self.init_m_aux()\n self.reset_hessian_and_bias()\n self.reset_adadelta_variables()",
"def svc_reset_system_mode(self) -> None:\n self._call_client_api(self._device.reset_mode)",
"def reset(self):\n \n pass",
"def resetToMainSection(self):\n wValue = 0\n wIndex = 0\n wLength = 0\n try:\n self.__bootCommand(op.BootloaderCommands.Reset,1,[0,0,0],[])\n except:\n #This will always throw an exception because it disconnects the device and re-enumerates as a normal Power Monitor\n print(\"Resetting to Main Section.\")",
"def reset(self):\n self._faux._default_setup()\n self._faux._update()",
"def reset(self):\n self.velocity_controller.reset()\n self.yaw_filter.reset()",
"def _reset(cls):\r\n cls._CONFIGURED = False\r\n cls._ENABLED = {}",
"async def _reset_games(self, ctx):\n data = await self.get_data(ctx)\n await data.Games.clear()\n msg = (\"{0.name} ({0.id}) restored casino games to \"\n \"default settings.\").format(ctx.author)\n await ctx.send(msg)",
"def reset():\r\n pass",
"def _doResetMemory(self):\n self._cmdClearMemory()\n time.sleep(1)\n self._cmdResetParameters()\n time.sleep(1)",
"def reset_screen() -> None:\n os.system(\"clear\") if os.name == \"posix\" else os.system(\"cls\")\n print(logo)\n print(\"=\" * 80)",
"def soft_reset(self):\n self.ser.write(\"\\030\")\n self._handle_reset()",
"def reset_states(self):\n self.model.reset_states()",
"def reset(ctx):\n\n controller = ctx.obj['controller']\n click.echo('Resetting OATH data...')\n old_id = controller.id\n controller.reset()\n\n settings = ctx.obj['settings']\n keys = settings.setdefault('keys', {})\n if old_id in keys:\n del keys[old_id]\n settings.write()\n\n click.echo(\n 'Success! All OATH credentials have been cleared from your YubiKey.')",
"def restore_default_uac():\n if global_vars['OS']['Version'] == '10':\n write_registry_settings(UAC_DEFAULTS_WIN10, all_users=True)\n else:\n # Haven't checked Win8 settings, only applying minimum set\n write_registry_settings(UAC_DEFAULTS_WIN7, all_users=True)",
"def reset_10gbe():\n snap.write_int('valid_en',0)\n snap.write_int('rst',1)\n time.sleep(1)\n snap.write_int('rst',0)\n snap.write_int('valid_en',3)",
"def clearAllSettings(self) -> None:\n ...",
"def _doReset(self):\n self._cmdReset()",
"def actionReset(self):\n sys.stderr.write(\"Reset device ...\\n\")\n sys.stderr.flush()\n self.bslReset(0) #only reset",
"def reset():\n _runtime.reset()",
"def reset(self):\n self.state.fill(EMPTY)",
"def reset(self):\r\n err = self._cfuncs['ka_reset'](self._core._get_ka())\r\n self._core._handle_error(err)",
"def __mode_reset(self):\n\t\tfor key,val in self.ms_all.iteritems():\n\t\t\tval.reset_restart()",
"def reset(self):\n self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_RESET, (), '', '')",
"def reset(self):\n self.from_platform()\n self.load_video()\n self.init_all_params()\n self.show_frame(0)",
"def full_reset(self):\n self.at_cmd('CFUN=1')",
"def reset_config():\r\n # TODO implement configuration reset\r\n pass",
"def reset(self):\r\r\n self.read(\"*cls\")\r\r\n self.waitForCompletion()\r\r\n self.read(\"*RST\") # Reset and query\r\r\n self.dev.write(\"*cls\")\r\r\n while self.read(\"*OPC?\") != \"1\": time.sleep(1) # Wait until completion\r\r",
"def resetDefaults(self):\n self.client.SetFont(wx.Font(10,wx.SWISS,wx.NORMAL,wx.NORMAL))\n self.client.SetFontSizeAxis(10)\n self.client.SetFontSizeLegend(7)\n self.client.setLogScale((False,False))\n self.client.SetXSpec('auto')\n self.client.SetYSpec('auto')",
"def restoreDefaults(self):\n # preserve `_options` if set by clients (for `reset`).\n opts = self._options\n self.setOptions(Options(\"utf-8\", csv.excel()))\n self._options = opts",
"def reset(self):\n pass",
"def reset(self):\n pass",
"def reset(self):\n pass",
"def reset(self):\n pass",
"def reset(self):\n pass",
"def reset(self):\n pass",
"def reset(self):\n pass",
"def reset(self):\n pass",
"def reset(self):\n pass",
"def reset(self):\n pass",
"def reset(self):\n pass",
"def reset(self):\n pass"
] | [
"0.68796086",
"0.66056365",
"0.65619004",
"0.65489376",
"0.6536054",
"0.6508475",
"0.64923877",
"0.6490318",
"0.6390865",
"0.6369686",
"0.63630855",
"0.6360946",
"0.6344241",
"0.6342181",
"0.6341036",
"0.63164306",
"0.6314139",
"0.62830174",
"0.62667483",
"0.62354726",
"0.620782",
"0.61914915",
"0.6181724",
"0.6167523",
"0.6161257",
"0.6152776",
"0.61489147",
"0.6137172",
"0.61319363",
"0.6130049",
"0.6120767",
"0.6115945",
"0.6092943",
"0.6090004",
"0.6080848",
"0.6051578",
"0.6048817",
"0.6004901",
"0.5999535",
"0.5982361",
"0.59731734",
"0.5968209",
"0.5959839",
"0.5958492",
"0.5957916",
"0.5957916",
"0.5957916",
"0.59521425",
"0.59515566",
"0.5937812",
"0.5934023",
"0.59307456",
"0.5920988",
"0.59207606",
"0.59199953",
"0.59163135",
"0.59128463",
"0.59075195",
"0.590384",
"0.59005994",
"0.5899314",
"0.58935976",
"0.5893584",
"0.58817655",
"0.587877",
"0.5872906",
"0.5872727",
"0.5872685",
"0.58621025",
"0.58586824",
"0.5843502",
"0.58415246",
"0.58408314",
"0.58396304",
"0.5839377",
"0.58355725",
"0.58151644",
"0.5813854",
"0.5813326",
"0.5813171",
"0.58113754",
"0.58095217",
"0.58069944",
"0.58020097",
"0.57976186",
"0.5793544",
"0.57891107",
"0.57853496",
"0.5785286",
"0.5785286",
"0.5785286",
"0.5785286",
"0.5785286",
"0.5785286",
"0.5785286",
"0.5785286",
"0.5785286",
"0.5785286",
"0.5785286",
"0.5785286"
] | 0.68314517 | 1 |
Gets the ilo firmware version for server capabilities | def _get_ilo_firmware_version(self):
manager, reset_uri = self._get_ilo_details()
ilo_firmware_version = manager['Firmware']['Current']['VersionString']
return {'ilo_firmware_version': ilo_firmware_version} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def firmware_version(self):\n return self._get_system_status()[\"firmware\"]",
"def get_ilo_firmware_version_as_major_minor(self):\n try:\n manager, reset_uri = self._get_ilo_details()\n ilo_fw_ver_str = (\n manager['Oem']['Hp']['Firmware']['Current']['VersionString']\n )\n return common.get_major_minor(ilo_fw_ver_str)\n except Exception:\n return None",
"def fw_version(self):\n return self.capabilities.get(\"fw_ver\")",
"def hw_version(self) -> str | None:\n return self.status.get(\"FIRMWARE\")",
"def firmware_version(self):\n return self.data.get('fw_ver')",
"def get_firmware_version():\r\n return utils.run('crossystem fwid').stdout.strip()",
"def firmware_version(self):\n buf = (ctypes.c_char * self.MAX_BUF_SIZE)()\n self._dll.JLINKARM_GetFirmwareString(buf, self.MAX_BUF_SIZE)\n return ctypes.string_at(buf).decode()",
"def hardware_version(self):\n return self.data.get('hw_ver')",
"def test_get_hyperflex_server_firmware_version_by_moid(self):\n pass",
"def get_firmware_version(self):\n fw_version = {\n \"BIOS\": self._api_helper.read_txt_file(BIOS_VER_PATH),\n \"BMC\": self.__get_bmc_ver(),\n \"SWITCH_CPLD1\": self.__get_cpld_ver(SW_CPLD1_VER_PATH),\n \"SWITCH_CPLD2\": self.__get_cpld_ver(SW_CPLD2_VER_PATH),\n }.get(self.name, \"Unknown\")\n\n return fw_version",
"def get_hardware_revision():\n return _pigpio_command(_control, _PI_CMD_HWVER, 0, 0)",
"def firmware_version(self):\n return self._read(MX_FIRMWARE_VERSION)",
"async def get_firmware_version(self):\n current_time = time.time()\n if self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE) == '':\n await self._send_sysex(PrivateConstants.REPORT_FIRMWARE, None)\n while self.query_reply_data.get(\n PrivateConstants.REPORT_FIRMWARE) == '':\n elapsed_time = time.time()\n if elapsed_time - current_time > 2:\n return None\n await asyncio.sleep(self.sleep_tune)\n reply = ''\n for x in self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE):\n reply_data = ord(x)\n if reply_data:\n reply += chr(reply_data)\n self.query_reply_data[PrivateConstants.REPORT_FIRMWARE] = reply\n return self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE)",
"def getFirmwareVersion(self, board=0):\n return self.callModule('admin', board, 0, 'getVersion')",
"def hardware_version(self) -> str:\n return self.camera_info[\"main_hw_version\"]",
"def get_firmware_version(self):\n request_command = self.parser_invoker.get_firmware_version_command_bytes(self.sequence_id, self.product_id)\n response_command_content = self.connectObj.send_receive_command(request_command)\n return response_command_content",
"def firmware_version(self) -> str:\n return self._firmware_version",
"def get_fw_version(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def get_version():\n ver = '0.0.0'\n req = restcall(0, 'config', 10.0)\n if req['text'] is not None:\n try: \n tree = ET.fromstring(req['text'])\n ver = tree.findall('app_version')[0].text\n if ver is None:\n ver = '0.0.0'\n _LOGGER.info(\"ISY: firmware version: %s\", ver)\n except ET.ParseError:\n _LOGGER.error(\"No version information found on ISY.\")\n return ver",
"async def get_firmware_version(self):\n if self.debug:\n print(\"Sending GET_FIRMWARE_VERSION\")\n\n response = await self.call_function(_COMMAND_GETFIRMWAREVERSION)\n if response is None:\n raise RuntimeError('Failed to detect the PN532')\n return tuple(response)",
"def firmware(self) -> str:\n return self._device_info[\"Firmware\"]",
"def get_firmware_version(self):\n cmd = protocol.GET_FIRMWARE_VERSION\n response = self.__send_and_receive(cmd)\n\n value = self.__gen_response_value(response)\n if value:\n self.firmware_version = value[0][1:]\n else:\n return False",
"def software_version(self) -> str:\n return self.camera_info[\"main_sw_version\"]",
"def test_get_hyperflex_server_firmware_version_list(self):\n pass",
"def get_firmware_version(self):\n response = self.call_function(PN532_COMMAND_GETFIRMWAREVERSION, 4)\n if response is None:\n raise RuntimeError('Failed to detect the PN532! Make sure there is sufficient power (use a 1 amp or greater power supply), the PN532 is wired correctly to the device, and the solder joints on the PN532 headers are solidly connected.')\n return (response[0], response[1], response[2], response[3])",
"def get_hardware_version(self):\n cmd = protocol.GET_HARDWARE_VERSION\n response = self.__send_and_receive(cmd)\n\n value = self.__gen_response_value(response)\n if value:\n self.hardware_version = value[0][1:]\n else:\n return False",
"def driver_version(self):\n data = fcntl.ioctl(self._fd, _EVIOCGVERSION, '\\x00\\x00\\x00\\x00')\n return struct.unpack(\"i\", data)[0]",
"def fw_ver(self):\n return self._fw_ver",
"def get_version(self):\r\n return self._arm.get_version()",
"def hardware_version(self):\n version = self._dll.JLINKARM_GetHardwareVersion()\n major = version / 10000 % 100\n minor = version / 100 % 100\n return '%d.%02d' % (major, minor)",
"def read_fw_version(self):\n\n # This function expects the firmware version to be in a line\n # prefixed with 'Product Extra'.\n # At the moment, it takes the form:\n # Product Extra : MCH FW V2.18.8 Final (r14042) (Mar 31 2017 - 11:29)\n # The following two parts will be extracted:\n # mch_fw_ver: V2.18.8 Final\n # mch_fw_date: Mar 31 2017 - 11:29\n # If NAT change the format, then this function will need to be updated\n\n pattern = \".*: MCH FW (.*) \\(.*\\) \\((.*)\\)\"\n\n for mch in range(1,3):\n try:\n result = self.mch_comms.call_ipmitool_command([\"fru\", \"print\", str(mch + MCH_FRU_ID_OFFSET)])\n\n for line in result.splitlines():\n if FW_TAG in line:\n match = re.match(pattern, line)\n if match:\n self.mch_fw_ver[mch] = match.group(1)\n self.mch_fw_date[mch] = match.group(2)\n else:\n self.mch_fw_ver[mch] = \"Unknown\"\n self.mch_fw_date[mch] = \"Unknown\"\n except CalledProcessError as e:\n self.mch_fw_ver[mch] = \"Unknown\"\n self.mch_fw_date[mch] = \"Unknown\"\n except TimeoutExpired as e:\n print(\"read_fw_version: caught TimeoutExpired exception: {}\".format(e))",
"def test_patch_hyperflex_server_firmware_version(self):\n pass",
"def get_machine_version():\n return get_file_content(\"/home/pi/.machineconfig/latest_version\")",
"def version(self):\n done, data = self._request('GV')\n if done:\n return {\n 'firmware': data[0],\n 'protocol': data[1]\n }\n\n raise EvseError",
"def sw_version(self) -> str | None:\n return self.status.get(\"VERSION\")",
"def fusion_api_get_server_hardware_firmware_compliance(self, body, api=None, headers=None):\n return self.sh.post(body=body, param='/firmware-compliance', api=api, headers=headers)",
"def get_server_version(self):\n return self.client.getServerVersion().decode('utf-8')\n return self.client.getServerVersion().decode('utf-8')",
"def test_update_hyperflex_server_firmware_version(self):\n pass",
"def get_server_version(self):\n return self.__aceQLHttpApi.get_server_version()",
"def fusion_api_get_server_hardware_firmware(self, uri, api=None, headers=None):\n return self.sh.get(uri=uri, api=api, headers=headers, param='/firmware')",
"def software_version(self) -> str:\n return self.data.get(Attribute.SOFTWARE_VERSION)",
"def compatible_firmware_version(self):\n identifier = self.firmware_version.split('compiled')[0]\n buf_size = self.MAX_BUF_SIZE\n buf = (ctypes.c_char * buf_size)()\n res = self._dll.JLINKARM_GetEmbeddedFWString(identifier.encode(), buf, buf_size)\n if res < 0:\n raise errors.JLinkException(res)\n\n return ctypes.string_at(buf).decode()",
"def version(self):\n data = self._ftdi.spi_read(self.VERSION_ADDR, len=1, burst='fixed')\n return data[0] & self.VERSION_MASK",
"def get_software_version(self):\n \n try:\n if self.product_info is None:\n self.product_info = self.connection.System.SystemInfo.\\\n get_product_information()\n return self.product_info['product_version']\n except:\n raise",
"def get_fw_version(self):\n summary = self.get_version_summary()\n pattern = '\\$.*? .*? .*? .*? .*? .*? .*? .*? (.*?) \\r\\n' \n fw_version = re.findall(pattern,summary).pop()\n return fw_version",
"async def get_firmware_version(self):\n current_time = time.time()\n #logstring(\"setting current time {}\".format(current_time))\n #logstring(\"1\")\n if self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE) == '':\n #logstring(\"2\")\n #logstring(\"checking time now 1 {}\".format(time.time()))\n await self._send_sysex(PrivateConstants.REPORT_FIRMWARE, None)\n #logstring(\"checking time now 2 {}\".format(time.time()))\n #logstring(\"3\")\n if self.serial_port.IsPortOpen == False:\n #logstring(\"Looks like that port wasn't working!!!!!!!!!!!!!????\")\n return None\n while self.query_reply_data.get(\n PrivateConstants.REPORT_FIRMWARE) == '':\n #logstring(\"4\")\n elapsed_time = time.time()\n #logstring(\"setting elapsed time {}\".format(elapsed_time))\n #logstring(\"5\")\n if elapsed_time - current_time > 3:\n #logstring(\"really took too long: {} {} {}\".format(elapsed_time, current_time, elapsed_time - current_time))\n return None\n #logstring(\"7\")\n if self.serial_port.IsPortOpen == False:\n #logstring(\"Looks like that port wasn't working!!!!!!!!!!!!!\")\n return None\n await asyncio.sleep(self.sleep_tune)\n #logstring(\"8\")\n #logstring(\"Geez, that took: {} {} {} ??????????????????\".format(elapsed_time, current_time, elapsed_time - current_time))\n\n reply = ''\n #logstring(\"9\")\n for x in self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE):\n reply_data = ord(x)\n if reply_data:\n reply += chr(reply_data)\n self.query_reply_data[PrivateConstants.REPORT_FIRMWARE] = reply\n #logstring(\"10\")\n return self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE)",
"def get_os_version(self):\n\t\treturn call_sdk_function('PrlSrvInfo_GetOsVersion', self.handle)",
"def get_server_version():\n url_address = 'https://raw.githubusercontent.com/muhammadfredo/FrMaya/master/FrMaya/version.py'\n url_data = urllib2.urlopen(url_address).read(200)\n result = re.search(r'(\\d+), (\\d+), (\\d+)', url_data, re.MULTILINE)\n if result:\n version_list = [int(v) for v in result.groups()]\n return version_list\n else:\n raise ValueError('Cannot get server version!!!')",
"def version():\n cmd = \"{} -v\".format(_detect_os())\n out = __salt__[\"cmd.run\"](cmd).splitlines()\n ret = out[0].split(\": \")\n return ret[1]",
"def _get_system_hardware(self):\n return self._get_system_status()[\"hardware\"]",
"def get_host_os_minor(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetHostOsMinor', self.handle)",
"def getFirmwareVersion(self, *id_list):\n if id_list == ():#Empty list\n return -1\n elif len(id_list) == 1:#Just one ID.\n pkt = Packet.makeReadPacket(id_list[0],xl320.XL320_FIRMWARE_VERSION)\n else:\n pkt = Packet.makeSyncReadPacket(xl320.XL320_FIRMWARE_VERSION,id_list)\n\n ans,err_num,err_str = self.serial.sendPkt(pkt)\n if ans == []:#In case of an empty packet arrives\n return -2\n else:\n data = []\n for index,val in enumerate(id_list):\n #print (index,val)\n data.append(val) #Append the ID value\n data.append(ans[index*12+9])#Append the respective ID's data\n return data",
"def getFirmwareRevision(self): \n return self.firmware_revision",
"def hw_from_req(req):\n return req.app['com.opentrons.hardware']",
"def get_server_capabilities(self):\n capabilities = {}\n system = self._get_host_details()\n capabilities['server_model'] = system['Model']\n rom_firmware_version = (\n system['Oem']['Hp']['Bios']['Current']['VersionString'])\n capabilities['rom_firmware_version'] = rom_firmware_version\n capabilities.update(self._get_ilo_firmware_version())\n capabilities.update(self._get_number_of_gpu_devices_connected())\n if self._get_tpm_capability():\n capabilities['trusted_boot'] = 'true'\n\n if self._get_cpu_virtualization():\n capabilities['cpu_vt'] = 'true'\n if self._get_nvdimm_n_status():\n capabilities['nvdimm_n'] = 'true'\n try:\n self.get_secure_boot_mode()\n capabilities['secure_boot'] = 'true'\n except exception.IloCommandNotSupportedError:\n # If an error is raised dont populate the capability\n # secure_boot\n pass\n if self._is_sriov_enabled():\n capabilities['sriov_enabled'] = 'true'\n return capabilities",
"def get_fw_ver(self, rec, report):\n\n rec.VAL = self.crate.mch_fw_ver[self.slot]",
"def _GetWhitelistVersion(self, component):\n firmware_info = os.path.join(pyauto.PyUITest.DataDir(),\n 'pyauto_private/chromeos/',\n 'chromeos_firmware_info.txt')\n assert os.path.exists(firmware_info), 'Data file does not exist.'\n return self.EvalDataFrom(firmware_info)[self.ChromeOSBoard()][component]",
"def get_product_version(self):\n\t\treturn call_sdk_function('PrlSrvInfo_GetProductVersion', self.handle)",
"def test_create_hyperflex_server_firmware_version(self):\n pass",
"def get_required_ovs_version(self):\n return self.get_required_version(\"Open vSwitch\", self.openshift_to_ovs_version)",
"def server_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"server_version\")",
"def server_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"server_version\")",
"def fusion_api_edit_server_hardware_mp_firmware_version(self, body, uri, api=None, headers=None):\n return self.sh.update(body, uri, api, headers, param='/mpFirmwareVersion')",
"def get_version(self):\n url = '{}/version'.format(self.url)\n try:\n r = requests.get(url)\n if r.status_code == 200:\n return r.json()['orionld version']\n except Exception as e:\n pass\n return ''",
"async def get_version(self):\n\n # Display info message\n log.info(\"get_version\")\n\n # By default empty string\n version = \"\"\n\n # Run get version on the device\n output = await self.send_command(self.cmd_get_version)\n\n # Seek \"Version \" and \",\" to get the version in the returned output\n version = output.split(\"Version \")[1].split(\",\")[0]\n\n # Display info message\n log.info(f\"get_version: version: {version}\")\n\n # Return the version of the software of the device\n return version",
"def version():\n from MotionWise.log_proc import __version__ as log_ver\n from MotionWise.pm_measurement import __version__ as pm_ver\n from MotionWise.MotionWise_perf_proxy import __version__ as proxy_ver \n from MotionWise.MotionWise_perf_client import __version__ as client_ver \n \n ver = \"$Revision: 80204 $\".split()[1]\n batch = max([ pm_instrument.version().split('.')[-1], log_ver\n , ver, pm_ver, proxy_ver, client_ver, FP.__version__])\n return \"3.0.{}\".format(batch)",
"def do_get_version(self, arg):\n arg = arg\n print(self.phil.if_version)",
"def minor_version(self):\n return self.unpack_dword(0x18)",
"def version(self):\n response = self._request_call('/version')\n return response.version_etcdserver",
"def version(self):\n if not self.proxy:\n self.proxy = self.session.service(\"ALBattery\")\n return self.proxy.version()",
"def getversion(): # 3\n res,resargs = _msk.Env.getversion()\n if res != 0:\n raise Error(rescode(res),\"\")\n _major_return_value,_minor_return_value,_build_return_value,_revision_return_value = resargs\n return _major_return_value,_minor_return_value,_build_return_value,_revision_return_value",
"def GetVersion(self):\n try:\n return self.server.GetVersionString()\n except dbus.DBusException:\n return None",
"def fusion_api_get_appliance_firmware_upgrade_status(self, api=None, headers=None):\n param = '/notification'\n return self.appfirmware.get(api=api, headers=headers, param=param)",
"def update_firmware(self):\n return self._dll.JLINKARM_UpdateFirmwareIfNewer()",
"def _GetSystemVersion(self, component, info):\n # Check if we are on mario, then we need to use the legacy parser\n if self.ChromeOSBoard() == 'x86-mario':\n return self._GetSystemVersionMario(component, info)\n items = info.strip().splitlines()\n # This is going to give us a list of lines, we are looking for the\n # following ones:\n # BIOS version: board.xx.xx.xxx.xxx.xx\n # EC version: foobar\n for line in items:\n line_components = line.split(':')\n # The line we are looking for has at least 2 items\n if len(line_components) >= 2 and line_components[0] == component:\n return line_components[1].strip()\n self.fail('Could not locate the following item %s in the return value '\n 'of chromeos-firmwareupdate.' % component)",
"def get_host_os_major(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetHostOsMajor', self.handle)",
"def version(serial, udp):\n\n if udp:\n solo.fido2.force_udp_backend()\n\n try:\n major, minor, patch = solo.client.find(serial).solo_version()\n print(f\"{major}.{minor}.{patch}\")\n except solo.exceptions.NoSoloFoundError:\n print(\"No Solo found.\")\n print(\"If you are on Linux, are your udev rules up to date?\")\n except (solo.exceptions.NoSoloFoundError, ApduError):\n # Older\n print(\"Firmware is out of date (key does not know the SOLO_VERSION command).\")",
"def version(serial, udp):\n\n if udp:\n solo.fido2.force_udp_backend()\n\n try:\n major, minor, patch = solo.client.find(serial).solo_version()\n print(f\"{major}.{minor}.{patch}\")\n except solo.exceptions.NoSoloFoundError:\n print(\"No Solo found.\")\n print(\"If you are on Linux, are your udev rules up to date?\")\n except (solo.exceptions.NoSoloFoundError, ApduError):\n # Older\n print(\"Firmware is out of date (key does not know the SOLO_VERSION command).\")",
"def get_os_version(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetOsVersion', self.handle)",
"def latestidd():\n pth, _ = run_functions.install_paths(version='8.8.0') # works with any value in version\n dirpth = os.path.dirname(pth)\n dirpth = os.path.dirname(dirpth)\n alldirs = os.listdir(dirpth)\n eplusdirs = [dir for dir in alldirs if dir.startswith('EnergyPlus')]\n maxapp = max(eplusdirs)\n ver = folder2ver(maxapp)\n return ver",
"def get_version():\n return 1",
"async def test_get_fw_version(subject: Controller):\n _, fw_version = _find_smoothie_file()\n assert subject._cached_fw_version == fw_version",
"def _get_firmware_update_service_resource(self):\n manager, uri = self._get_ilo_details()\n try:\n fw_uri = manager['Oem']['Hp']['links']['UpdateService']['href']\n except KeyError:\n msg = (\"Firmware Update Service resource not found.\")\n raise exception.IloCommandNotSupportedError(msg)\n return fw_uri",
"def get_uni_version(self):\n version, major_version = None, None\n response = self.get_resource(category=VERSION, no_version=True)\n if response and response.get('version'):\n version = response['version']\n version_list = version.split('.')\n major_version = version_list[0][1:] + version_list[1]\n return version, major_version",
"def latest_version(self):\n state = self.coordinator.data\n\n try:\n # fake a new update\n # return \"foobar\"\n return dict_get(state, \"firmware_update_info.base.version\")\n except KeyError:\n return None",
"def latestidd():\n pth, _ = run_functions.install_paths(\n version=\"8.8.0\"\n ) # works with any value in version\n dirpth = os.path.dirname(pth)\n dirpth = os.path.dirname(dirpth)\n alldirs = os.listdir(dirpth)\n eplusdirs = [dir for dir in alldirs if dir.startswith(\"EnergyPlus\")]\n maxapp = max(eplusdirs)\n ver = folder2ver(maxapp)\n return ver",
"def get_firmata_version(self):\n return self.firmata_version",
"def version_hassio(self):\n return self._data.get(ATTR_HASSIO)",
"def iperf_version(self):\n # TODO: Is there a better way to get the const char than allocating 30?\n VersionType = c_char * 30\n return VersionType.in_dll(self.lib, \"version\").value.decode('utf-8')",
"def last_available_os_version(self) -> str:\n return pulumi.get(self, \"last_available_os_version\")",
"def __getRedhatVersion(self):\n result, resultErr = self.ksp_ssh.ssh_execute_command('cat /etc/redhat-release')\n if \"Red\" in result:\n linuxVendor = \"RedHat\"\n linuxRelease, resultErr = self.ksp_ssh.ssh_execute_command(\n \"cat /etc/redhat-release | sed 's/^Red Hat Enterprise Linux.* release /EL/' | sed 's/[ .].*//'\")\n elif \"CentOS\" in result:\n linuxVendor = \"CentOS\"\n linuxRelease, resultErr = self.ksp_ssh.ssh_execute_command(\n \"cat /etc/os-release | grep -w \\\"VERSION\\\"| sed 's/VERSION=\\\"/EL/' | sed 's/[ .].*//'\")\n elif \"Cloud\" in result:\n linuxVendor = \"CloudLinux\"\n linuxRelease, resultErr = self.ksp_ssh.ssh_execute_command(\n \"cat /etc/redhat-release | sed 's/^CloudLinux.*release //' | sed 's/[ .].*//'\")\n else:\n linuxVendor = \"unknownVendor\"\n linuxRelease = \"unknownRelease\"\n return linuxVendor.strip(), linuxRelease.strip()",
"def get_frida_version(self):\n if not self.available():\n return None\n\n result = self._do_adb_command('shell frida --version')\n if result:\n if 'not found' in result or 'No such file or directory' in result:\n result = self._do_adb_command('shell frida-server --version')\n if result and 'not found' in result:\n return None\n elif result:\n self._alternate_frida_name = True\n else:\n return None\n\n result = result.split(os.linesep)\n check_ver = result[len(result) - 2].replace('\\r', '').split('.')\n if len(check_ver) == 3:\n try:\n v_major = int(check_ver[0])\n v_minor = int(check_ver[1])\n v_patch = int(check_ver[2])\n\n if v_major >= 12 and v_minor >= 8:\n return '.'.join(check_ver)\n else:\n #print('frida version is outdated')\n return '.'.join(check_ver)\n except ValueError:\n return None\n\n return None",
"def get_version():\n module_path = os.path.join(os.path.dirname('__file__'), 'hwget', 'version.py')\n\n meta = {}\n with open(module_path) as fh:\n exec(fh.read(), meta)\n return meta[\"__version__\"]",
"def getversion():\n major_ = ctypes.c_int32()\n minor_ = ctypes.c_int32()\n revision_ = ctypes.c_int32()\n res = __library__.MSK_XX_getversion(ctypes.byref(major_),ctypes.byref(minor_),ctypes.byref(revision_))\n if res != 0:\n raise Error(rescode(res),Env.getcodedesc(rescode(res))[1])\n major_ = major_.value\n _major_return_value = major_\n minor_ = minor_.value\n _minor_return_value = minor_\n revision_ = revision_.value\n _revision_return_value = revision_\n return (_major_return_value,_minor_return_value,_revision_return_value)",
"def version():\n return uname().version",
"def version():\n return uname().version",
"def get_minknow_version(self):\n\t\tif self.have_metadata is False:\n\t\t\tself._get_metadata()\n\t\t\tself.have_metadata = True\n\n\t\ttry:\n\t\t\treturn self.keyinfo['context_tags'].attrs['verssion']\n\t\texcept:\n\t\t\treturn None",
"def fpga_minor():\n return int, None",
"def version_string(self):\n return self.server_version",
"def get_version():\n major=c_int_t(0)\n minor=c_int_t(0)\n patch=c_int_t(0)\n safe_call(backend.get().af_get_version(c_pointer(major), c_pointer(minor), c_pointer(patch)))\n return major.value,minor.value,patch.value"
] | [
"0.76318765",
"0.74084985",
"0.73684555",
"0.7351483",
"0.7309879",
"0.71681994",
"0.7141852",
"0.696607",
"0.6915562",
"0.690672",
"0.68797165",
"0.6854152",
"0.67555326",
"0.6753871",
"0.67471206",
"0.67280734",
"0.6711157",
"0.6674547",
"0.66333795",
"0.6591106",
"0.6559826",
"0.65548754",
"0.64803994",
"0.6444525",
"0.6432335",
"0.6371255",
"0.6371154",
"0.63599116",
"0.631369",
"0.63107944",
"0.6304193",
"0.62607956",
"0.6252628",
"0.6239198",
"0.6225609",
"0.6203831",
"0.619335",
"0.61799985",
"0.61795855",
"0.6172035",
"0.6145426",
"0.6142566",
"0.61327773",
"0.6129006",
"0.61137295",
"0.60760885",
"0.6072425",
"0.6066396",
"0.6034888",
"0.60287577",
"0.60119325",
"0.59864646",
"0.5966039",
"0.5956786",
"0.5930808",
"0.59302384",
"0.5917904",
"0.58963233",
"0.5896312",
"0.58759034",
"0.5872743",
"0.5872743",
"0.5852943",
"0.58501405",
"0.58440536",
"0.58405596",
"0.5838454",
"0.5796069",
"0.5768287",
"0.57459056",
"0.5743227",
"0.5741227",
"0.5731937",
"0.5722286",
"0.5721877",
"0.57195956",
"0.5713956",
"0.5713956",
"0.57117176",
"0.57115906",
"0.5710454",
"0.5699639",
"0.5697315",
"0.5693888",
"0.5692499",
"0.5690615",
"0.5688391",
"0.5687302",
"0.5665704",
"0.56589526",
"0.5643975",
"0.5632218",
"0.56252384",
"0.562256",
"0.5616191",
"0.5616191",
"0.56115746",
"0.5602262",
"0.560145",
"0.55915433"
] | 0.7568138 | 1 |
Gets the ilo firmware version for server capabilities | def get_ilo_firmware_version_as_major_minor(self):
try:
manager, reset_uri = self._get_ilo_details()
ilo_fw_ver_str = (
manager['Oem']['Hp']['Firmware']['Current']['VersionString']
)
return common.get_major_minor(ilo_fw_ver_str)
except Exception:
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def firmware_version(self):\n return self._get_system_status()[\"firmware\"]",
"def _get_ilo_firmware_version(self):\n\n manager, reset_uri = self._get_ilo_details()\n ilo_firmware_version = manager['Firmware']['Current']['VersionString']\n return {'ilo_firmware_version': ilo_firmware_version}",
"def fw_version(self):\n return self.capabilities.get(\"fw_ver\")",
"def hw_version(self) -> str | None:\n return self.status.get(\"FIRMWARE\")",
"def firmware_version(self):\n return self.data.get('fw_ver')",
"def get_firmware_version():\r\n return utils.run('crossystem fwid').stdout.strip()",
"def firmware_version(self):\n buf = (ctypes.c_char * self.MAX_BUF_SIZE)()\n self._dll.JLINKARM_GetFirmwareString(buf, self.MAX_BUF_SIZE)\n return ctypes.string_at(buf).decode()",
"def hardware_version(self):\n return self.data.get('hw_ver')",
"def test_get_hyperflex_server_firmware_version_by_moid(self):\n pass",
"def get_firmware_version(self):\n fw_version = {\n \"BIOS\": self._api_helper.read_txt_file(BIOS_VER_PATH),\n \"BMC\": self.__get_bmc_ver(),\n \"SWITCH_CPLD1\": self.__get_cpld_ver(SW_CPLD1_VER_PATH),\n \"SWITCH_CPLD2\": self.__get_cpld_ver(SW_CPLD2_VER_PATH),\n }.get(self.name, \"Unknown\")\n\n return fw_version",
"def get_hardware_revision():\n return _pigpio_command(_control, _PI_CMD_HWVER, 0, 0)",
"def firmware_version(self):\n return self._read(MX_FIRMWARE_VERSION)",
"async def get_firmware_version(self):\n current_time = time.time()\n if self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE) == '':\n await self._send_sysex(PrivateConstants.REPORT_FIRMWARE, None)\n while self.query_reply_data.get(\n PrivateConstants.REPORT_FIRMWARE) == '':\n elapsed_time = time.time()\n if elapsed_time - current_time > 2:\n return None\n await asyncio.sleep(self.sleep_tune)\n reply = ''\n for x in self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE):\n reply_data = ord(x)\n if reply_data:\n reply += chr(reply_data)\n self.query_reply_data[PrivateConstants.REPORT_FIRMWARE] = reply\n return self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE)",
"def getFirmwareVersion(self, board=0):\n return self.callModule('admin', board, 0, 'getVersion')",
"def hardware_version(self) -> str:\n return self.camera_info[\"main_hw_version\"]",
"def get_firmware_version(self):\n request_command = self.parser_invoker.get_firmware_version_command_bytes(self.sequence_id, self.product_id)\n response_command_content = self.connectObj.send_receive_command(request_command)\n return response_command_content",
"def firmware_version(self) -> str:\n return self._firmware_version",
"def get_fw_version(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def get_version():\n ver = '0.0.0'\n req = restcall(0, 'config', 10.0)\n if req['text'] is not None:\n try: \n tree = ET.fromstring(req['text'])\n ver = tree.findall('app_version')[0].text\n if ver is None:\n ver = '0.0.0'\n _LOGGER.info(\"ISY: firmware version: %s\", ver)\n except ET.ParseError:\n _LOGGER.error(\"No version information found on ISY.\")\n return ver",
"async def get_firmware_version(self):\n if self.debug:\n print(\"Sending GET_FIRMWARE_VERSION\")\n\n response = await self.call_function(_COMMAND_GETFIRMWAREVERSION)\n if response is None:\n raise RuntimeError('Failed to detect the PN532')\n return tuple(response)",
"def firmware(self) -> str:\n return self._device_info[\"Firmware\"]",
"def get_firmware_version(self):\n cmd = protocol.GET_FIRMWARE_VERSION\n response = self.__send_and_receive(cmd)\n\n value = self.__gen_response_value(response)\n if value:\n self.firmware_version = value[0][1:]\n else:\n return False",
"def software_version(self) -> str:\n return self.camera_info[\"main_sw_version\"]",
"def test_get_hyperflex_server_firmware_version_list(self):\n pass",
"def get_firmware_version(self):\n response = self.call_function(PN532_COMMAND_GETFIRMWAREVERSION, 4)\n if response is None:\n raise RuntimeError('Failed to detect the PN532! Make sure there is sufficient power (use a 1 amp or greater power supply), the PN532 is wired correctly to the device, and the solder joints on the PN532 headers are solidly connected.')\n return (response[0], response[1], response[2], response[3])",
"def get_hardware_version(self):\n cmd = protocol.GET_HARDWARE_VERSION\n response = self.__send_and_receive(cmd)\n\n value = self.__gen_response_value(response)\n if value:\n self.hardware_version = value[0][1:]\n else:\n return False",
"def driver_version(self):\n data = fcntl.ioctl(self._fd, _EVIOCGVERSION, '\\x00\\x00\\x00\\x00')\n return struct.unpack(\"i\", data)[0]",
"def fw_ver(self):\n return self._fw_ver",
"def get_version(self):\r\n return self._arm.get_version()",
"def hardware_version(self):\n version = self._dll.JLINKARM_GetHardwareVersion()\n major = version / 10000 % 100\n minor = version / 100 % 100\n return '%d.%02d' % (major, minor)",
"def read_fw_version(self):\n\n # This function expects the firmware version to be in a line\n # prefixed with 'Product Extra'.\n # At the moment, it takes the form:\n # Product Extra : MCH FW V2.18.8 Final (r14042) (Mar 31 2017 - 11:29)\n # The following two parts will be extracted:\n # mch_fw_ver: V2.18.8 Final\n # mch_fw_date: Mar 31 2017 - 11:29\n # If NAT change the format, then this function will need to be updated\n\n pattern = \".*: MCH FW (.*) \\(.*\\) \\((.*)\\)\"\n\n for mch in range(1,3):\n try:\n result = self.mch_comms.call_ipmitool_command([\"fru\", \"print\", str(mch + MCH_FRU_ID_OFFSET)])\n\n for line in result.splitlines():\n if FW_TAG in line:\n match = re.match(pattern, line)\n if match:\n self.mch_fw_ver[mch] = match.group(1)\n self.mch_fw_date[mch] = match.group(2)\n else:\n self.mch_fw_ver[mch] = \"Unknown\"\n self.mch_fw_date[mch] = \"Unknown\"\n except CalledProcessError as e:\n self.mch_fw_ver[mch] = \"Unknown\"\n self.mch_fw_date[mch] = \"Unknown\"\n except TimeoutExpired as e:\n print(\"read_fw_version: caught TimeoutExpired exception: {}\".format(e))",
"def test_patch_hyperflex_server_firmware_version(self):\n pass",
"def get_machine_version():\n return get_file_content(\"/home/pi/.machineconfig/latest_version\")",
"def version(self):\n done, data = self._request('GV')\n if done:\n return {\n 'firmware': data[0],\n 'protocol': data[1]\n }\n\n raise EvseError",
"def sw_version(self) -> str | None:\n return self.status.get(\"VERSION\")",
"def fusion_api_get_server_hardware_firmware_compliance(self, body, api=None, headers=None):\n return self.sh.post(body=body, param='/firmware-compliance', api=api, headers=headers)",
"def get_server_version(self):\n return self.client.getServerVersion().decode('utf-8')\n return self.client.getServerVersion().decode('utf-8')",
"def test_update_hyperflex_server_firmware_version(self):\n pass",
"def get_server_version(self):\n return self.__aceQLHttpApi.get_server_version()",
"def fusion_api_get_server_hardware_firmware(self, uri, api=None, headers=None):\n return self.sh.get(uri=uri, api=api, headers=headers, param='/firmware')",
"def software_version(self) -> str:\n return self.data.get(Attribute.SOFTWARE_VERSION)",
"def compatible_firmware_version(self):\n identifier = self.firmware_version.split('compiled')[0]\n buf_size = self.MAX_BUF_SIZE\n buf = (ctypes.c_char * buf_size)()\n res = self._dll.JLINKARM_GetEmbeddedFWString(identifier.encode(), buf, buf_size)\n if res < 0:\n raise errors.JLinkException(res)\n\n return ctypes.string_at(buf).decode()",
"def version(self):\n data = self._ftdi.spi_read(self.VERSION_ADDR, len=1, burst='fixed')\n return data[0] & self.VERSION_MASK",
"def get_software_version(self):\n \n try:\n if self.product_info is None:\n self.product_info = self.connection.System.SystemInfo.\\\n get_product_information()\n return self.product_info['product_version']\n except:\n raise",
"def get_fw_version(self):\n summary = self.get_version_summary()\n pattern = '\\$.*? .*? .*? .*? .*? .*? .*? .*? (.*?) \\r\\n' \n fw_version = re.findall(pattern,summary).pop()\n return fw_version",
"async def get_firmware_version(self):\n current_time = time.time()\n #logstring(\"setting current time {}\".format(current_time))\n #logstring(\"1\")\n if self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE) == '':\n #logstring(\"2\")\n #logstring(\"checking time now 1 {}\".format(time.time()))\n await self._send_sysex(PrivateConstants.REPORT_FIRMWARE, None)\n #logstring(\"checking time now 2 {}\".format(time.time()))\n #logstring(\"3\")\n if self.serial_port.IsPortOpen == False:\n #logstring(\"Looks like that port wasn't working!!!!!!!!!!!!!????\")\n return None\n while self.query_reply_data.get(\n PrivateConstants.REPORT_FIRMWARE) == '':\n #logstring(\"4\")\n elapsed_time = time.time()\n #logstring(\"setting elapsed time {}\".format(elapsed_time))\n #logstring(\"5\")\n if elapsed_time - current_time > 3:\n #logstring(\"really took too long: {} {} {}\".format(elapsed_time, current_time, elapsed_time - current_time))\n return None\n #logstring(\"7\")\n if self.serial_port.IsPortOpen == False:\n #logstring(\"Looks like that port wasn't working!!!!!!!!!!!!!\")\n return None\n await asyncio.sleep(self.sleep_tune)\n #logstring(\"8\")\n #logstring(\"Geez, that took: {} {} {} ??????????????????\".format(elapsed_time, current_time, elapsed_time - current_time))\n\n reply = ''\n #logstring(\"9\")\n for x in self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE):\n reply_data = ord(x)\n if reply_data:\n reply += chr(reply_data)\n self.query_reply_data[PrivateConstants.REPORT_FIRMWARE] = reply\n #logstring(\"10\")\n return self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE)",
"def get_os_version(self):\n\t\treturn call_sdk_function('PrlSrvInfo_GetOsVersion', self.handle)",
"def get_server_version():\n url_address = 'https://raw.githubusercontent.com/muhammadfredo/FrMaya/master/FrMaya/version.py'\n url_data = urllib2.urlopen(url_address).read(200)\n result = re.search(r'(\\d+), (\\d+), (\\d+)', url_data, re.MULTILINE)\n if result:\n version_list = [int(v) for v in result.groups()]\n return version_list\n else:\n raise ValueError('Cannot get server version!!!')",
"def version():\n cmd = \"{} -v\".format(_detect_os())\n out = __salt__[\"cmd.run\"](cmd).splitlines()\n ret = out[0].split(\": \")\n return ret[1]",
"def _get_system_hardware(self):\n return self._get_system_status()[\"hardware\"]",
"def get_host_os_minor(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetHostOsMinor', self.handle)",
"def getFirmwareVersion(self, *id_list):\n if id_list == ():#Empty list\n return -1\n elif len(id_list) == 1:#Just one ID.\n pkt = Packet.makeReadPacket(id_list[0],xl320.XL320_FIRMWARE_VERSION)\n else:\n pkt = Packet.makeSyncReadPacket(xl320.XL320_FIRMWARE_VERSION,id_list)\n\n ans,err_num,err_str = self.serial.sendPkt(pkt)\n if ans == []:#In case of an empty packet arrives\n return -2\n else:\n data = []\n for index,val in enumerate(id_list):\n #print (index,val)\n data.append(val) #Append the ID value\n data.append(ans[index*12+9])#Append the respective ID's data\n return data",
"def getFirmwareRevision(self): \n return self.firmware_revision",
"def hw_from_req(req):\n return req.app['com.opentrons.hardware']",
"def get_server_capabilities(self):\n capabilities = {}\n system = self._get_host_details()\n capabilities['server_model'] = system['Model']\n rom_firmware_version = (\n system['Oem']['Hp']['Bios']['Current']['VersionString'])\n capabilities['rom_firmware_version'] = rom_firmware_version\n capabilities.update(self._get_ilo_firmware_version())\n capabilities.update(self._get_number_of_gpu_devices_connected())\n if self._get_tpm_capability():\n capabilities['trusted_boot'] = 'true'\n\n if self._get_cpu_virtualization():\n capabilities['cpu_vt'] = 'true'\n if self._get_nvdimm_n_status():\n capabilities['nvdimm_n'] = 'true'\n try:\n self.get_secure_boot_mode()\n capabilities['secure_boot'] = 'true'\n except exception.IloCommandNotSupportedError:\n # If an error is raised dont populate the capability\n # secure_boot\n pass\n if self._is_sriov_enabled():\n capabilities['sriov_enabled'] = 'true'\n return capabilities",
"def get_fw_ver(self, rec, report):\n\n rec.VAL = self.crate.mch_fw_ver[self.slot]",
"def _GetWhitelistVersion(self, component):\n firmware_info = os.path.join(pyauto.PyUITest.DataDir(),\n 'pyauto_private/chromeos/',\n 'chromeos_firmware_info.txt')\n assert os.path.exists(firmware_info), 'Data file does not exist.'\n return self.EvalDataFrom(firmware_info)[self.ChromeOSBoard()][component]",
"def test_create_hyperflex_server_firmware_version(self):\n pass",
"def get_product_version(self):\n\t\treturn call_sdk_function('PrlSrvInfo_GetProductVersion', self.handle)",
"def get_required_ovs_version(self):\n return self.get_required_version(\"Open vSwitch\", self.openshift_to_ovs_version)",
"def server_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"server_version\")",
"def server_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"server_version\")",
"def fusion_api_edit_server_hardware_mp_firmware_version(self, body, uri, api=None, headers=None):\n return self.sh.update(body, uri, api, headers, param='/mpFirmwareVersion')",
"def get_version(self):\n url = '{}/version'.format(self.url)\n try:\n r = requests.get(url)\n if r.status_code == 200:\n return r.json()['orionld version']\n except Exception as e:\n pass\n return ''",
"async def get_version(self):\n\n # Display info message\n log.info(\"get_version\")\n\n # By default empty string\n version = \"\"\n\n # Run get version on the device\n output = await self.send_command(self.cmd_get_version)\n\n # Seek \"Version \" and \",\" to get the version in the returned output\n version = output.split(\"Version \")[1].split(\",\")[0]\n\n # Display info message\n log.info(f\"get_version: version: {version}\")\n\n # Return the version of the software of the device\n return version",
"def version():\n from MotionWise.log_proc import __version__ as log_ver\n from MotionWise.pm_measurement import __version__ as pm_ver\n from MotionWise.MotionWise_perf_proxy import __version__ as proxy_ver \n from MotionWise.MotionWise_perf_client import __version__ as client_ver \n \n ver = \"$Revision: 80204 $\".split()[1]\n batch = max([ pm_instrument.version().split('.')[-1], log_ver\n , ver, pm_ver, proxy_ver, client_ver, FP.__version__])\n return \"3.0.{}\".format(batch)",
"def do_get_version(self, arg):\n arg = arg\n print(self.phil.if_version)",
"def minor_version(self):\n return self.unpack_dword(0x18)",
"def version(self):\n response = self._request_call('/version')\n return response.version_etcdserver",
"def version(self):\n if not self.proxy:\n self.proxy = self.session.service(\"ALBattery\")\n return self.proxy.version()",
"def getversion(): # 3\n res,resargs = _msk.Env.getversion()\n if res != 0:\n raise Error(rescode(res),\"\")\n _major_return_value,_minor_return_value,_build_return_value,_revision_return_value = resargs\n return _major_return_value,_minor_return_value,_build_return_value,_revision_return_value",
"def GetVersion(self):\n try:\n return self.server.GetVersionString()\n except dbus.DBusException:\n return None",
"def fusion_api_get_appliance_firmware_upgrade_status(self, api=None, headers=None):\n param = '/notification'\n return self.appfirmware.get(api=api, headers=headers, param=param)",
"def update_firmware(self):\n return self._dll.JLINKARM_UpdateFirmwareIfNewer()",
"def _GetSystemVersion(self, component, info):\n # Check if we are on mario, then we need to use the legacy parser\n if self.ChromeOSBoard() == 'x86-mario':\n return self._GetSystemVersionMario(component, info)\n items = info.strip().splitlines()\n # This is going to give us a list of lines, we are looking for the\n # following ones:\n # BIOS version: board.xx.xx.xxx.xxx.xx\n # EC version: foobar\n for line in items:\n line_components = line.split(':')\n # The line we are looking for has at least 2 items\n if len(line_components) >= 2 and line_components[0] == component:\n return line_components[1].strip()\n self.fail('Could not locate the following item %s in the return value '\n 'of chromeos-firmwareupdate.' % component)",
"def get_host_os_major(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetHostOsMajor', self.handle)",
"def version(serial, udp):\n\n if udp:\n solo.fido2.force_udp_backend()\n\n try:\n major, minor, patch = solo.client.find(serial).solo_version()\n print(f\"{major}.{minor}.{patch}\")\n except solo.exceptions.NoSoloFoundError:\n print(\"No Solo found.\")\n print(\"If you are on Linux, are your udev rules up to date?\")\n except (solo.exceptions.NoSoloFoundError, ApduError):\n # Older\n print(\"Firmware is out of date (key does not know the SOLO_VERSION command).\")",
"def version(serial, udp):\n\n if udp:\n solo.fido2.force_udp_backend()\n\n try:\n major, minor, patch = solo.client.find(serial).solo_version()\n print(f\"{major}.{minor}.{patch}\")\n except solo.exceptions.NoSoloFoundError:\n print(\"No Solo found.\")\n print(\"If you are on Linux, are your udev rules up to date?\")\n except (solo.exceptions.NoSoloFoundError, ApduError):\n # Older\n print(\"Firmware is out of date (key does not know the SOLO_VERSION command).\")",
"def get_os_version(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetOsVersion', self.handle)",
"def latestidd():\n pth, _ = run_functions.install_paths(version='8.8.0') # works with any value in version\n dirpth = os.path.dirname(pth)\n dirpth = os.path.dirname(dirpth)\n alldirs = os.listdir(dirpth)\n eplusdirs = [dir for dir in alldirs if dir.startswith('EnergyPlus')]\n maxapp = max(eplusdirs)\n ver = folder2ver(maxapp)\n return ver",
"def get_version():\n return 1",
"async def test_get_fw_version(subject: Controller):\n _, fw_version = _find_smoothie_file()\n assert subject._cached_fw_version == fw_version",
"def _get_firmware_update_service_resource(self):\n manager, uri = self._get_ilo_details()\n try:\n fw_uri = manager['Oem']['Hp']['links']['UpdateService']['href']\n except KeyError:\n msg = (\"Firmware Update Service resource not found.\")\n raise exception.IloCommandNotSupportedError(msg)\n return fw_uri",
"def get_uni_version(self):\n version, major_version = None, None\n response = self.get_resource(category=VERSION, no_version=True)\n if response and response.get('version'):\n version = response['version']\n version_list = version.split('.')\n major_version = version_list[0][1:] + version_list[1]\n return version, major_version",
"def latest_version(self):\n state = self.coordinator.data\n\n try:\n # fake a new update\n # return \"foobar\"\n return dict_get(state, \"firmware_update_info.base.version\")\n except KeyError:\n return None",
"def latestidd():\n pth, _ = run_functions.install_paths(\n version=\"8.8.0\"\n ) # works with any value in version\n dirpth = os.path.dirname(pth)\n dirpth = os.path.dirname(dirpth)\n alldirs = os.listdir(dirpth)\n eplusdirs = [dir for dir in alldirs if dir.startswith(\"EnergyPlus\")]\n maxapp = max(eplusdirs)\n ver = folder2ver(maxapp)\n return ver",
"def get_firmata_version(self):\n return self.firmata_version",
"def version_hassio(self):\n return self._data.get(ATTR_HASSIO)",
"def iperf_version(self):\n # TODO: Is there a better way to get the const char than allocating 30?\n VersionType = c_char * 30\n return VersionType.in_dll(self.lib, \"version\").value.decode('utf-8')",
"def last_available_os_version(self) -> str:\n return pulumi.get(self, \"last_available_os_version\")",
"def __getRedhatVersion(self):\n result, resultErr = self.ksp_ssh.ssh_execute_command('cat /etc/redhat-release')\n if \"Red\" in result:\n linuxVendor = \"RedHat\"\n linuxRelease, resultErr = self.ksp_ssh.ssh_execute_command(\n \"cat /etc/redhat-release | sed 's/^Red Hat Enterprise Linux.* release /EL/' | sed 's/[ .].*//'\")\n elif \"CentOS\" in result:\n linuxVendor = \"CentOS\"\n linuxRelease, resultErr = self.ksp_ssh.ssh_execute_command(\n \"cat /etc/os-release | grep -w \\\"VERSION\\\"| sed 's/VERSION=\\\"/EL/' | sed 's/[ .].*//'\")\n elif \"Cloud\" in result:\n linuxVendor = \"CloudLinux\"\n linuxRelease, resultErr = self.ksp_ssh.ssh_execute_command(\n \"cat /etc/redhat-release | sed 's/^CloudLinux.*release //' | sed 's/[ .].*//'\")\n else:\n linuxVendor = \"unknownVendor\"\n linuxRelease = \"unknownRelease\"\n return linuxVendor.strip(), linuxRelease.strip()",
"def get_frida_version(self):\n if not self.available():\n return None\n\n result = self._do_adb_command('shell frida --version')\n if result:\n if 'not found' in result or 'No such file or directory' in result:\n result = self._do_adb_command('shell frida-server --version')\n if result and 'not found' in result:\n return None\n elif result:\n self._alternate_frida_name = True\n else:\n return None\n\n result = result.split(os.linesep)\n check_ver = result[len(result) - 2].replace('\\r', '').split('.')\n if len(check_ver) == 3:\n try:\n v_major = int(check_ver[0])\n v_minor = int(check_ver[1])\n v_patch = int(check_ver[2])\n\n if v_major >= 12 and v_minor >= 8:\n return '.'.join(check_ver)\n else:\n #print('frida version is outdated')\n return '.'.join(check_ver)\n except ValueError:\n return None\n\n return None",
"def get_version():\n module_path = os.path.join(os.path.dirname('__file__'), 'hwget', 'version.py')\n\n meta = {}\n with open(module_path) as fh:\n exec(fh.read(), meta)\n return meta[\"__version__\"]",
"def getversion():\n major_ = ctypes.c_int32()\n minor_ = ctypes.c_int32()\n revision_ = ctypes.c_int32()\n res = __library__.MSK_XX_getversion(ctypes.byref(major_),ctypes.byref(minor_),ctypes.byref(revision_))\n if res != 0:\n raise Error(rescode(res),Env.getcodedesc(rescode(res))[1])\n major_ = major_.value\n _major_return_value = major_\n minor_ = minor_.value\n _minor_return_value = minor_\n revision_ = revision_.value\n _revision_return_value = revision_\n return (_major_return_value,_minor_return_value,_revision_return_value)",
"def version():\n return uname().version",
"def version():\n return uname().version",
"def get_minknow_version(self):\n\t\tif self.have_metadata is False:\n\t\t\tself._get_metadata()\n\t\t\tself.have_metadata = True\n\n\t\ttry:\n\t\t\treturn self.keyinfo['context_tags'].attrs['verssion']\n\t\texcept:\n\t\t\treturn None",
"def version_string(self):\n return self.server_version",
"def fpga_minor():\n return int, None",
"def get_version():\n major=c_int_t(0)\n minor=c_int_t(0)\n patch=c_int_t(0)\n safe_call(backend.get().af_get_version(c_pointer(major), c_pointer(minor), c_pointer(patch)))\n return major.value,minor.value,patch.value"
] | [
"0.76303834",
"0.7566883",
"0.7366854",
"0.73500246",
"0.73084676",
"0.71665615",
"0.7140502",
"0.6964108",
"0.69159454",
"0.6905643",
"0.6877727",
"0.6852079",
"0.6753259",
"0.6751832",
"0.67452294",
"0.6726138",
"0.6709597",
"0.66727823",
"0.6632324",
"0.65892386",
"0.65594804",
"0.65531516",
"0.6479001",
"0.6444421",
"0.64305294",
"0.6369859",
"0.63692856",
"0.63583475",
"0.6310571",
"0.63086057",
"0.6302832",
"0.6261048",
"0.6251307",
"0.62380254",
"0.62242454",
"0.6204963",
"0.6192853",
"0.618024",
"0.61790353",
"0.6172514",
"0.6143695",
"0.61410546",
"0.61306626",
"0.61269623",
"0.6112678",
"0.60738945",
"0.60712785",
"0.60660833",
"0.60334367",
"0.60275763",
"0.6011716",
"0.59854174",
"0.5963605",
"0.5956634",
"0.5930859",
"0.5928597",
"0.5915129",
"0.5896746",
"0.5893612",
"0.58757395",
"0.58724767",
"0.58724767",
"0.5852518",
"0.5847999",
"0.5841885",
"0.5838051",
"0.5836054",
"0.5794593",
"0.57677543",
"0.5744125",
"0.5741337",
"0.5739901",
"0.57317024",
"0.5720559",
"0.57202506",
"0.5719614",
"0.57135475",
"0.57135475",
"0.57104707",
"0.5709311",
"0.5708022",
"0.5698174",
"0.56972194",
"0.5691811",
"0.5690588",
"0.56883633",
"0.5686459",
"0.5685502",
"0.5663301",
"0.5657127",
"0.56431377",
"0.5631849",
"0.56234336",
"0.5620571",
"0.5614582",
"0.5614582",
"0.5609119",
"0.56008357",
"0.56002635",
"0.5589622"
] | 0.7407143 | 2 |
Return sriov enabled or not | def _is_sriov_enabled(self):
return (self._get_bios_setting('Sriov') == 'Enabled') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_enabled(self):",
"def swo_enabled(self):\n return self._swo_enabled",
"def ms_get_rstp_enabled(self):\n self.open_route('/configure/switch_settings', \"Switch\")\n dropdown_value = page_utils.get_dropdown_value(\n self.get_page(),\n var_id='node_group_use_stp')\n return dropdown_value == 'Enable RSTP'",
"def Enabled(self) -> bool:",
"def get_prog_enable(self):\n #en = self._get_prop(\"enabled\")\n #return bool( en == \"true\" )\n if \"enabled\" in self._mydict:\n return bool(self._mydict[\"enabled\"] == \"true\")\n return True",
"def isEnabled(self):",
"def get_isenabled(self):\n return self.isenabled",
"def isEnabled(self) -> bool:\n ...",
"def enabled(self) -> bool:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> bool:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> bool:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> bool:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> bool:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> bool:\n return pulumi.get(self, \"enabled\")",
"def is_scr_res_enabled(self):\n\t\treturn bool(call_sdk_function('PrlVmCfg_IsScrResEnabled', self.handle))",
"def enabled(self):\n return True",
"def enabled(self):\n return True",
"def has_stp_cli(self):\n if self.is_escom_l:\n cmd = self.cli(\"show spanning-tree\")\n return \"Spanning tree enabled\" in cmd\n else:\n cmd = self.cli(\"show spanning-tree active\")\n return \" enabled \" in cmd",
"def get_new_config(self):\n app_config = zaza.model.get_application_config(self.application_name)\n return 'enable-sriov', str(not app_config['enable-sriov']['value'])",
"def has_sriovdp_enabled(labels):\n if not labels:\n return False\n\n for label in labels:\n if label.label_key == helm_common.LABEL_SRIOVDP and label.label_value:\n return helm_common.LABEL_VALUE_ENABLED == label.label_value.lower()\n\n # We haven't found the sriovdp node key. Return False\n return False",
"def isSirenActive(self) -> bool:\r\n if self.visprotocol is not None:\r\n return self.visprotocol.isSirenActive()\r\n return False",
"def sso_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"sso_enabled\")",
"def is_enabled(self):\n return self.sdk.is_enabled",
"def enabled(self) -> pulumi.Output[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self):\n return self._get('enabled')",
"def is_custom_mode_enabled(self):\n return os.environ.get('SNYK_CUSTOM_MODE', 'false').lower() in ('1', 'yes', 'true')",
"def shortenable(s):\n return s, True",
"def isEnabled(self):\n return self.enabled",
"def getStatus(self):\n return self.enabled",
"def enable_snat(self) -> bool:\n return pulumi.get(self, \"enable_snat\")",
"def is_enabled(self):\n return self.enabled",
"def __bool__(self):\n return any(self.smask)",
"def is_on(self):\n return self._program.get(\"enabled\") is True",
"def is_i2s_enabled(self):\n return ((self.get_control() & CONTROL_ENABLE) > 0)",
"def enabled(self) -> Optional[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> Optional[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> Optional[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> Optional[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> Optional[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> Optional[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> Optional[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> Optional[bool]:\n return pulumi.get(self, \"enabled\")",
"def is_spow_enabled() -> bool:\n\n return _SPOW_ENABLED",
"def is_enabled(self):\n\t\treturn bool(call_sdk_function('PrlShare_IsEnabled', self.handle))",
"def enabled(self):\n return self.__enabled",
"def enabled(self) -> bool:\n return False",
"async def enabled(self) -> bool:\n response = await self.adguard.request(\"parental/status\")\n return response[\"enabled\"]",
"def enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"enabled\")",
"def isEnabled(self):\n return self.__enabled",
"def test_enabled(self):\n # OSA script should have been installed in setUp function, which sets\n # enabled to True by default.\n self.assertTrue(self.run_function(\"assistive.enabled\", [OSA_SCRIPT]))\n # Disable OSA Script\n self.run_function(\"assistive.enable\", [OSA_SCRIPT, False])\n # Assert against new disabled status\n self.assertFalse(self.run_function(\"assistive.enabled\", [OSA_SCRIPT]))",
"def is_enabled(command):\n if command not in Controller.commands:\n return False\n return Controller.commands[command][2]",
"def is_enabled(self):\n with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):\n return self._action('is-enabled').succeeded",
"def isEnabled(state):\n return (isActive(state) or state == State.preEnabled)",
"def is_on(self):\n return self._data[\"enabled\"]",
"def is_shed_tool_conf(self):",
"def is_enabled(self):\n\t\treturn bool(call_sdk_function('PrlVirtNet_IsEnabled', self.handle))",
"def test_getboolean(self):\n self.assertEqual(self.config.getboolean('advanced','bool'),True)",
"def isEnabled(self, p_int): # real signature unknown; restored from __doc__\n return False",
"def enabled(self):\n return self._enabled",
"def enabled(self):\n return self._enabled",
"def enabled(self):\n return self._enabled",
"def enabled(self):\n return self._enabled",
"def enabled(self):\n return self._enabled",
"def enabled(self):\n return self._enabled",
"def enabled(self):\n return self._enabled",
"def enabled(self):\n return self._enabled",
"def is_enabled(self):\n return self._is_enabled",
"def is_on(self):\n return self.wink.state()",
"def enabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"enabled\")",
"def enabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"enabled\")",
"def trainable(self):\n return True",
"def enabled(self):\n return self._packet.get('enabled', True)",
"def enabled(self):\n raise NotImplementedError",
"def is_enabled(self) -> pulumi.Output[bool]:\n return pulumi.get(self, \"is_enabled\")",
"def getEnabled(self):\n if getattr(self, 'installedversion', None) != __version__ :\n return False\n return self.getField('enabled').get(self)",
"def is_enabled(self):\n return self._enabled",
"def is_enabled(self):\n return self._enabled",
"def cr_filter_enabled(self):\n i = ct.c_int()\n self.lib.GetFilterMode(ct.pointer(i))\n return i.value",
"def boolean(self) -> bool:\n return self.random.choice([True, False])",
"def need_ovo(model_name):\n return (model_name == 'logistic') or (model_name == 'sgd')",
"def _get_enable(self):\n return self.__enable",
"def _get_enable(self):\n return self.__enable",
"def is_enabled(self):\n\t\treturn bool(call_sdk_function('PrlVmDev_IsEnabled', self.handle))",
"def Enabled(self):\n return self._get_attribute('enabled')",
"def __call__(self, feature):\n return self.is_enabled(feature)",
"def is_enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"is_enabled\")",
"def is_enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"is_enabled\")",
"def is_enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"is_enabled\")",
"def is_enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"is_enabled\")",
"def is_enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"is_enabled\")",
"def is_enabled(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"is_enabled\")"
] | [
"0.644866",
"0.63986534",
"0.62832564",
"0.6205629",
"0.6096058",
"0.6000314",
"0.5948032",
"0.5870357",
"0.5815211",
"0.5815211",
"0.5815211",
"0.5815211",
"0.5815211",
"0.5815211",
"0.57979065",
"0.5792855",
"0.5792855",
"0.5763365",
"0.57592833",
"0.5725535",
"0.5724957",
"0.57136196",
"0.5710624",
"0.5689521",
"0.5680936",
"0.5666903",
"0.5659068",
"0.5655457",
"0.56496453",
"0.5640356",
"0.56384546",
"0.5620806",
"0.56110936",
"0.5609383",
"0.5594635",
"0.5594635",
"0.5594635",
"0.5594635",
"0.5594635",
"0.5594635",
"0.5594635",
"0.5594635",
"0.5577245",
"0.5572823",
"0.55624217",
"0.5559816",
"0.55518436",
"0.5541812",
"0.5541812",
"0.5541812",
"0.5541812",
"0.5541812",
"0.5541812",
"0.5541812",
"0.5541812",
"0.55372834",
"0.5532702",
"0.5522476",
"0.5520514",
"0.5519525",
"0.5512501",
"0.5503083",
"0.54981476",
"0.5486921",
"0.5485171",
"0.54829276",
"0.54829276",
"0.54829276",
"0.54829276",
"0.54829276",
"0.54829276",
"0.54829276",
"0.54829276",
"0.5470975",
"0.5462644",
"0.5459539",
"0.5459539",
"0.5459539",
"0.5459539",
"0.5447784",
"0.54399073",
"0.54348946",
"0.5427733",
"0.5417614",
"0.541662",
"0.541662",
"0.5411509",
"0.54088044",
"0.5397159",
"0.5394381",
"0.5394381",
"0.53931737",
"0.53903365",
"0.5356948",
"0.53536075",
"0.53536075",
"0.53536075",
"0.53536075",
"0.53536075",
"0.53536075"
] | 0.78212315 | 0 |
Gets server properties which can be used for scheduling | def get_server_capabilities(self):
capabilities = {}
system = self._get_host_details()
capabilities['server_model'] = system['Model']
rom_firmware_version = (
system['Oem']['Hp']['Bios']['Current']['VersionString'])
capabilities['rom_firmware_version'] = rom_firmware_version
capabilities.update(self._get_ilo_firmware_version())
capabilities.update(self._get_number_of_gpu_devices_connected())
if self._get_tpm_capability():
capabilities['trusted_boot'] = 'true'
if self._get_cpu_virtualization():
capabilities['cpu_vt'] = 'true'
if self._get_nvdimm_n_status():
capabilities['nvdimm_n'] = 'true'
try:
self.get_secure_boot_mode()
capabilities['secure_boot'] = 'true'
except exception.IloCommandNotSupportedError:
# If an error is raised dont populate the capability
# secure_boot
pass
if self._is_sriov_enabled():
capabilities['sriov_enabled'] = 'true'
return capabilities | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def properties(self):\n response = self._client.get('server/properties')\n return ServerProperties.from_json(response.text)",
"def properties(self):\r\n if self._properties is None:\r\n res = self._con.get(self._url, {'f':'json'})\r\n self._properties = PropertyMap(res)\r\n return self._properties",
"def properties(self):\r\n if self._properties is None:\r\n res = self._con.get(self._url, {'f':'json'})\r\n self._properties = PropertyMap(res)\r\n return self._properties",
"def getProperties():",
"def get_properties():",
"def get_properties(self):\n return irmc_common.COMMON_PROPERTIES",
"def properties(self) -> Any:\n return pulumi.get(self, \"properties\")",
"def get_servers_info(self):\n return self.mrr_obj.get('/info/servers')",
"def properties_get(self):\n return self._get('properties')",
"def get_serverinfo(self, property=None):\n if property is None:\n property = '*'\n\n uri = 'json/serverinfo/' + property\n data = self._get(uri=uri, headers=self.headers)\n if data.status_code == 200:\n return data.json()\n else:\n return False",
"def get_properties(self):\n return self.properties",
"def properties(self) -> Optional[str]:\n return pulumi.get(self, \"properties\")",
"def get_servicenow_sys_properties(cache: dict):\n response = cache.get(\"get_servicenow_sys_properties\")\n if response:\n print(\"servicenow.access_control cache hit!\")\n return response\n \n # Will need to create the pysnow.Client object everywhere - doesn't appear to be thread-safe\n snow = pysnow.Client(\n instance=SNOW_INSTANCE_NAME,\n user=SNOW_SSPM_USERNAME,\n password=SNOW_SSPM_PASSWORD\n )\n\n sysPropResource = snow.resource(api_path='/table/sys_properties')\n sysProps = sysPropResource.get().all()\n\n cache[\"get_servicenow_sys_properties\"] = sysProps\n\n return cache[\"get_servicenow_sys_properties\"]",
"def get_properties(self):\n return COMMON_PROPERTIES",
"def serverInfo(self):\n return self._server.getServerInfo(self._token)",
"def getServerOptions(self):\n pass",
"def get_server_info(self):\n\t\treturn handle_to_object(call_sdk_function('PrlSrv_GetServerInfo', self.handle))",
"def properties(self):\n\n dict = {\"Host Name\":self.host, \"Stellar Mass\":self.st_mass,\n \"Stellar Radius\":self.st_rad}\n\n return dict",
"def get_srv_config(self):\n\t\treturn Job(SDK.PrlSrv_GetSrvConfig(self.handle)[0])",
"def getProperties(self):\n return self.properties",
"def properties(self) -> 'outputs.DeploymentPropertiesResponse':\n return pulumi.get(self, \"properties\")",
"def servertime(self):\r\n return servertime.Servertime(self)",
"def get_servers(self):\n\t\treturn self.__servers",
"def system_properties(self):\r\n return dict(self._get_system_properties(self.java))",
"def _get_host_properties(pulp_version):\n if pulp_version < Version('3'):\n return _get_v2_host_properties(pulp_version)\n return _get_v3_host_properties(pulp_version)",
"def getServerStats():\n return _xmlUrlToDict(serverString + \"/rest/stats\", int)",
"def get_properties(self):\n return self.properties",
"def properties(self) -> Optional[Mapping[str, str]]:\n return pulumi.get(self, \"properties\")",
"def properties(self) -> Optional[Mapping[str, str]]:\n return pulumi.get(self, \"properties\")",
"def get_server_time(self):\n return dict(\n # current timezone aware date and time.\n datetime=timezone.now(),\n # current timezone\n tz=settings.TIME_ZONE\n )",
"def get_property(self, client):\r\n client.getProperty()",
"def get_cfg(self, server):\n\t\tserver = valid_server(server)\n\t\treturn self._get_cfg_from_list(server)",
"def properties(self):\n return self._props",
"def get_db_info(self):\n db_info = {}\n db_info[\"Mongo Server Info\"] = self.db_client.server_info()\n return db_info",
"def getreplicationsettings(self):\n d = {}\n try:\n con = hcpsdk.Connection(self.target, debuglevel=self.debuglevel)\n except Exception as e:\n raise hcpsdk.HcpsdkError(str(e))\n else:\n self.connect_time = con.connect_time\n try:\n r = con.GET('/mapi/services/replication')\n except Exception as e:\n raise hcpsdk.HcpsdkError(str(e))\n else:\n if r.status == 200:\n # Good status, get and parse the Response\n x = r.read()\n self.service_time = con.service_time2\n for child in Et.fromstring(x):\n d[child.tag] = child.text\n else:\n raise (hcpsdk.HcpsdkError('{} - {}'.format(r.status, r.reason)))\n finally:\n # noinspection PyUnboundLocalVariable\n con.close()\n\n return d",
"def properties(self):\n return self._properties",
"def properties(self):\n return self._properties",
"def config_get():\n server_config = db.get().server_config_get()\n\n if not server_config:\n return flask.jsonify({\n \"message\": \"Netmet server has not been setup yet\"}), 404\n\n return flask.jsonify(server_config), 200",
"def _get_schedule_policy_properties(self):\n flag, response = self._commcell_object._cvpysdk_object.make_request(\n 'GET', self._SCHEDULE_POLICY)\n\n if flag:\n if response.json() and 'taskInfo' in response.json():\n _task_info = response.json()['taskInfo']\n\n if 'associations' in _task_info:\n self._associations = _task_info['associations']\n\n if 'task' in _task_info:\n self._task_json = _task_info['task']\n\n self._app_groups = _task_info['appGroup'].get('appGroups')\n\n self._subtasks = _task_info['subTasks']\n\n for subtask in self._subtasks:\n self._all_schedules.append({\n \"schedule_name\" : subtask[\"subTask\"].get(\"subTaskName\", ''),\n \"schedule_id\": subtask[\"subTask\"][\"subTaskId\"]\n })\n\n\n else:\n raise SDKException('Response', '102')\n else:\n response_string = self._commcell_object._update_response_(\n response.text)\n raise SDKException('Response', '101', response_string)",
"def sso_properties(self) -> Optional[pulumi.Input['SsoPropertiesArgs']]:\n return pulumi.get(self, \"sso_properties\")",
"def sso_properties(self) -> Optional[pulumi.Input['SsoPropertiesArgs']]:\n return pulumi.get(self, \"sso_properties\")",
"def get_common_settings(self):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/Settings/\"))",
"def properties(self):\n return self.properties_with_uid[1:]",
"def _create_properties(self): # pylint: disable=no-self-use\n properties = {}\n properties[\"product\"] = \"eventhub.python\"\n properties[\"version\"] = __version__\n properties[\"framework\"] = \"Python {}.{}.{}\".format(*sys.version_info[0:3])\n properties[\"platform\"] = sys.platform\n return properties",
"def properties(self) -> Optional[pulumi.Input['EventhubSpecPropertiesArgs']]:\n return pulumi.get(self, \"properties\")",
"def NtpServer(self):\n return self._get_attribute('ntpServer')",
"async def serverInfo(self, ctx):\n\t\ts = ctx.message.server\n\t\te = discord.Embed()\n\t\t\n\t\te.title = \"Server Statistics\"\n\t\te.description = \"\"\"\nName: {n}\nNumber of Roles: {r}\nServer Region: {sr}\nNumber of Emojis: {em}\nNumber of Members: {m}\nID: {i}\nOwner: {o}\nCreated at: {c}\n\t\t\t\t\t\t\"\"\".format(n = s.name, r = len(s.roles), sr = s.region, em = len(s.emojis), \n\t\t\t\t\t\t\t\t\tm = s.member_count, i = s.id, o = s.owner.name, c = s.created_at)\n\t\te.set_thumbnail(url = s.icon_url)\n\t\t\n\t\tawait self.bot.say(embed = e)",
"async def server_time(self):\n uri = \"/v3/time\"\n success, error = await self.request(\"GET\", uri)\n return success, error",
"def properties(self):\r\n return resources.Properties(self)",
"def properties(self) -> Optional[pulumi.Input['CosmosDBSpecPropertiesArgs']]:\n return pulumi.get(self, \"properties\")",
"async def server_time(self):\n uri = \"/fapi/v1/time\"\n success, error = await self.request(\"GET\", uri)\n return success, error",
"def properties(self):\n pass",
"def get_cluster_properties(redshift_client):\n cluster_properties = redshift_client.describe_clusters(\n ClusterIdentifier=IDENTIFIER\n )['Clusters'][0]\n return cluster_properties",
"def player_properties(self):\n return self.properties.GetAll(self.player_interface)",
"def remote_properties(self):\n return dat2obj(pn_connection_remote_properties(self._impl))",
"def getPropertiesAll():",
"def get_servers(self) -> dict:\n uri = f\"{self.uri}/servers\"\n\n response = self.request(uri=uri)\n return response.json()",
"def get_properties():\n properties = dict()\n properties['size'] = list()\n properties['color'] = list()\n properties['quality'] = list()\n u = models.Size.query.all()\n for i in u:\n properties['size'].append(i.size_name)\n u = models.Color.query.all()\n for i in u:\n properties['color'].append(i.color_name)\n u = models.Quality.query.all()\n for i in u:\n properties['quality'].append(i.quality_name)\n return make_response(jsonify(properties))",
"def get_server(self):\n\n pass",
"def server_status(self):\n return self._server_status",
"def properties(self) -> Optional[pulumi.Input['EventhubNamespaceSpecPropertiesArgs']]:\n return pulumi.get(self, \"properties\")",
"def properties(self):\n\n return self._properties",
"def metadata(self, server_id):\n\n server = self.compute.servers.get(server_id)\n\n if server.tenant_id not in Scope.projects():\n pecan.abort(403, 'unauthorized access a resource outside of your domain')\n\n # Required by Fog, but oddly not in novaclient.v2.servers\n return {u'metadata': server.metadata}",
"def get_settings(self):\n host = '127.0.0.1'\n port = 6379\n db = 0\n if self.connection_uri is not None:\n re_connection_uri = r'redis://(?:([\\w]+)@)?([\\w\\d\\.]+):(\\d+)(?:/(\\d+))?'\n match = re.match(re_connection_uri, self.connection_uri)\n if match:\n if match.group(2):\n host = match.group(2)\n if match.group(3):\n port = int(match.group(3))\n if match.group(4):\n db = int(match.group(4))\n\n return {\n 'host': host,\n 'port': port,\n 'db': db\n }",
"def _get_v2_host_properties(pulp_version):\n hostname = _get_hostname()\n amqp_broker = _get_amqp_broker_role()\n api_role = _get_api_role(pulp_version)\n shell_role = _get_shell_role(hostname)\n return {\n 'hostname': hostname,\n 'roles': {\n 'amqp broker': {'service': amqp_broker},\n 'api': api_role,\n 'mongod': {},\n 'pulp celerybeat': {},\n 'pulp cli': {},\n 'pulp resource manager': {},\n 'pulp workers': {},\n 'shell': shell_role,\n 'squid': {},\n }\n }",
"def test_server_valid_minimal(self):\n config_data = imageroller.main.read_config(\n self._cmd_args,\n imageroller.test.get_config_parser(self._server_valid_minimal))\n self.assertEqual(config_data.concurrent_workers,\n CONFIG_DATA[\"ConcurrentWorkers\"])\n self.assertEqual(len(config_data.server_data), 1)\n # Test minutes -> seconds in property getters\n # pylint: disable=not-an-iterable\n for server_data in config_data.server_data:\n self.assertEqual(server_data.save_timeout_seconds,\n int(CONFIG_DATA[\"SaveTimeoutMinutes\"]) * 60)\n self.assertEqual(server_data.retain_image_seconds,\n int(CONFIG_DATA[\"RetainImageMinutes\"]) * 60)\n self.assertFalse(server_data.auto_enable)",
"def base_properties(self):\n return self.properties.GetAll(self.mpris_base)",
"def sso_properties(self) -> Optional[pulumi.Input['DevToolPortalSsoPropertiesArgs']]:\n return pulumi.get(self, \"sso_properties\")",
"def web_hook_properties(self) -> Optional[Mapping[str, str]]:\n return pulumi.get(self, \"web_hook_properties\")",
"def properties(self):\n return None",
"def properties(self):\n return None",
"def get_properties(self):\n return self.name, self.author, self.description, self.fmu_type, self.version, self.guid, self.tool, self.numStates",
"def properties(self) -> Optional[pulumi.Input['RedisCacheSpecPropertiesArgs']]:\n return pulumi.get(self, \"properties\")",
"def server_info(ctx):\n data = ctx.obj.get_server_info()\n output_json_data(data)",
"def _GetServers(self) -> List[Dict[str, str]]:\n return [\n {\n \"url\": \"/\",\n \"description\": \"Root path of the GRR API\",\n },\n ]",
"def _extract_properties_from_context(context: mlrun.MLClientCtx) -> Dict[str, Any]:\n run = mlrun.RunObject.from_dict(context.to_dict())\n runs = mlrun.lists.RunList([run.to_dict()])\n info = {}\n for property_name, property_value in list(zip(*runs.to_rows())):\n info[property_name] = property_value\n return info",
"def properties(self, gid, sid, did, etid):\n status, res = self._fetch_properties(did, etid)\n if not status:\n return res\n\n return ajax_response(\n response=res,\n status=200\n )",
"def properties(self):\n from hubspot3.properties import PropertiesClient\n\n return PropertiesClient(**self.auth, **self.options)",
"def _get_all_servers(self, key):\n hints = {}\n hosts = []\n for vm in NovaScheduler.vms:\n if vm['state'] == 'active':\n hosts.append(vm['id'])\n if len(hosts) > 0:\n hints[key] = hosts\n LOG.info(\"%s:%s() %s: %s\", self.__class__.__name__,\n sys._getframe().f_code.co_name, key, hints)\n return hints",
"def get_sys_props_dict(self):\n return self.__sys_props",
"def properties(self) -> pulumi.Output[Sequence['outputs.NotificationChannelProperty']]:\n return pulumi.get(self, \"properties\")",
"def _get_subclient_properties(self):\r\n\r\n self._vmDiskFilter = None\r\n self._vmFilter = None\r\n\r\n if not bool(self._subclient_properties):\r\n super(VirtualServerSubclient, self)._get_subclient_properties()\r\n\r\n if 'vmContent' in self._subclient_properties:\r\n self._vmContent = self._subclient_properties['vmContent']\r\n if 'vmDiskFilter' in self._subclient_properties:\r\n self._vmDiskFilter = self._subclient_properties['vmDiskFilter']\r\n if 'vmFilter' in self._subclient_properties:\r\n self._vmFilter = self._subclient_properties['vmFilter']\r\n if 'vmBackupInfo' in self._subclient_properties:\r\n self._vmBackupInfo = self._subclient_properties['vmBackupInfo']\r\n if 'vsaSubclientProp' in self._subclient_properties:\r\n self._vsaSubclientProp = self._subclient_properties['vsaSubclientProp']",
"def _get_pulp_properties():\n version = click.prompt(\n 'Which version of Pulp is under test?',\n type=PulpVersionType()\n )\n username = click.prompt(\n \"What is the Pulp administrative user's username?\",\n default='admin',\n type=click.STRING,\n )\n password = click.prompt(\n \"What is the Pulp administrative user's password?\",\n default='admin',\n type=click.STRING,\n )\n # We could make this default to \"false\" if version >= 3, but it seems\n # better to assume that Pulp is secure by default, and to annoy everyone\n # about Pulp 3's lack of security.\n selinux_enabled = click.confirm(\n 'Is SELinux supported on the Pulp hosts?',\n default=True,\n )\n return {\n 'auth': [username, password],\n 'selinux enabled': selinux_enabled,\n 'version': version,\n }",
"def get_settings(self):\n return {\n \"game_name\": self.game_name,\n \"n_epochs\": self.n_epochs,\n \"n_episodes\": self.n_episodes,\n \"n_frames\": self.n_frames,\n \"agent\": self.agent.get_settings(),\n \"results_dir\": self.results_dir,\n \"use_minimal_action_set\": self.use_minimal_action_set,\n }",
"def get_properties(\n self,\n ins: common.GetPropertiesIns,\n timeout: Optional[float],\n ) -> common.GetPropertiesRes:\n get_properties_msg = serde.get_properties_ins_to_proto(ins)\n res_wrapper: ResWrapper = self.bridge.request(\n ins_wrapper=InsWrapper(\n server_message=ServerMessage(get_properties_ins=get_properties_msg),\n timeout=timeout,\n )\n )\n client_msg: ClientMessage = res_wrapper.client_message\n get_properties_res = serde.get_properties_res_from_proto(\n client_msg.get_properties_res\n )\n return get_properties_res",
"def get_server(self):\n return self.__server",
"def get_server_status(fields=[]):\n return get_dict_from_db(key='status', fields=fields)",
"def properties(self):",
"def properties(self):",
"def properties(self):",
"def readProperties(self):\r\n print('not yet implemented')",
"def getServerAndSecret(self):\n\n if self._default_master_server is not None and self._default_master_server != \"AUTOMATIC\":\n return (self._default_master_server,self.getSecret())\n \n sl=RadiusServerList(server_file=self._server_file)\n\n rs=sl.getServer()\n\n return((rs.ip,rs.secret))",
"def get_properties(self):\n geospatial = self.get_geospatial()\n temporal = self.get_temporal()\n\n # File system metadata\n filesystem = super(SAFESentinelBase, self).get_filesystem(self.fname)\n self._update_filesystem_metadata(filesystem)\n\n data_format = {\"format\": \"SAFE\"}\n\n # Gather up extra metadata\n extra_metadata = {}\n for key in (\"platform\", \"product_info\", \"orbit_info\"):\n if self.sections.get(key):\n extra_metadata[key] = self.sections[key]\n\n # Set extra content from existing content and filename\n self._update_extra_metadata(extra_metadata)\n \n props = product.Properties(spatial=geospatial,\n temporal=temporal,\n filesystem=filesystem,\n data_format=data_format,\n **extra_metadata)\n\n return props",
"def _get_one_server(self, key):\n hints = {}\n for vm in NovaScheduler.vms:\n if vm['state'] == 'active':\n hints[key] = vm['id']\n break\n LOG.info(\"%s:%s() %s: %s\", self.__class__.__name__,\n sys._getframe().f_code.co_name, key, hints)\n return hints",
"def properties(self) -> Sequence['outputs.GoogleCloudContentwarehouseV1PropertyResponse']:\n return pulumi.get(self, \"properties\")",
"def as_json(self):\n server_data = []\n for server in self.servers:\n server_data.append(server.__dict__)\n return json.dumps(server_data)",
"def _properties(self) -> dict[str, dict[str, str]]:\n schema = self.schema(by_alias=False)\n if schema.get('properties') is not None:\n return schema.get('properties', {})\n return schema.get('definitions', {}).get(self.__class__.__name__, {}).get('properties', {})",
"def status(self):\n try:\n capabilities = []\n with manager.connect(host=netconf_server_ip,\n port=int(netconf_server_port),\n username= netconf_server_username,\n password=netconf_server_password,\n hostkey_verify=False) as m:\n\n for c in m.server_capabilities:\n capabilities.append(c)\n return capabilities\n\n except:\n return \"Can not establish connection with the server, something went wrong\"",
"def properties(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Expression]:",
"def get_server_metadata(self, server):\n res = self.get_server(server)\n metadata = res.get_metadata(self.session)\n result = _server.Server.existing(id=res.id, metadata=metadata)\n return result"
] | [
"0.80747217",
"0.6539375",
"0.6539375",
"0.6380929",
"0.63467556",
"0.63117343",
"0.61406976",
"0.61178446",
"0.6086679",
"0.60554194",
"0.59798664",
"0.5967408",
"0.5949758",
"0.5932225",
"0.58748645",
"0.5870139",
"0.58367234",
"0.5824827",
"0.57901335",
"0.5786792",
"0.5786339",
"0.57857144",
"0.5760927",
"0.57566714",
"0.57539856",
"0.5747972",
"0.5735813",
"0.57300866",
"0.57300866",
"0.5702779",
"0.5700004",
"0.5660908",
"0.5650659",
"0.56330234",
"0.56224597",
"0.5622325",
"0.5622325",
"0.56206214",
"0.5616971",
"0.5616924",
"0.5616924",
"0.56094426",
"0.5601953",
"0.5595154",
"0.55936587",
"0.556542",
"0.5558562",
"0.5556501",
"0.5547266",
"0.55429226",
"0.55382097",
"0.5531873",
"0.5529906",
"0.551732",
"0.55108404",
"0.54881084",
"0.54813206",
"0.5477059",
"0.5470137",
"0.5460075",
"0.54587364",
"0.5453052",
"0.5449129",
"0.54427356",
"0.54318917",
"0.5428395",
"0.5407313",
"0.54049635",
"0.5390165",
"0.53825396",
"0.53825396",
"0.5377779",
"0.537748",
"0.53752923",
"0.5373577",
"0.5370031",
"0.5356234",
"0.5329772",
"0.53251016",
"0.53234076",
"0.5311303",
"0.53039676",
"0.5294154",
"0.52933264",
"0.52690566",
"0.5268023",
"0.52593553",
"0.525791",
"0.525791",
"0.525791",
"0.5257579",
"0.52519864",
"0.5249775",
"0.52484214",
"0.5247152",
"0.5239996",
"0.52340037",
"0.52318335",
"0.5229639",
"0.5221827"
] | 0.53765047 | 73 |
Returns the given virtual media device status and device URI | def _get_vm_device_status(self, device='FLOPPY'):
valid_devices = {'FLOPPY': 'floppy',
'CDROM': 'cd'}
# Check if the input is valid
if device not in valid_devices:
raise exception.IloInvalidInputError(
"Invalid device. Valid devices: FLOPPY or CDROM.")
manager, uri = self._get_ilo_details()
try:
vmedia_uri = manager['links']['VirtualMedia']['href']
except KeyError:
msg = ('"VirtualMedia" section in Manager/links does not exist')
raise exception.IloCommandNotSupportedError(msg)
for status, hds, vmed, memberuri in self._get_collection(vmedia_uri):
status, headers, response = self._rest_get(memberuri)
if status != 200:
msg = self._get_extended_error(response)
raise exception.IloError(msg)
if (valid_devices[device] in
[item.lower() for item in response['MediaTypes']]):
vm_device_uri = response['links']['self']['href']
return response, vm_device_uri
# Requested device not found
msg = ('Virtualmedia device "' + device + '" is not'
' found on this system.')
raise exception.IloError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_vmedia_device_uri(self, device):\n\n try:\n sushy_system = self._get_sushy_system()\n uri = utils.get_subresource_path_by(sushy_system, 'VirtualMedia')\n resp = sushy_system._conn.get(uri)\n vmedia_resp = json.loads(resp.text)\n for val in vmedia_resp.get(\"Members\"):\n for key in val:\n if device in val[key]:\n return val[key]\n except sushy.exceptions.SushyError as e:\n msg = (self._('Not able to find find vmedia device URI. Error: '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def get_vmedia_device_status(self, device=\"cd0\"):\n\n if device not in VALID_VMEDIA_DEVICES:\n raise exception.InvalidInputError(\n \"Invalid device. Valid devices: cd0 or cd1 or hd0 or hd1.\")\n sushy_system = self._get_sushy_system()\n device = VALID_VMEDIA_DEVICES.get(device)\n\n vmedia_device_uri = self.get_vmedia_device_uri(device)\n\n try:\n resp = sushy_system._conn.get(vmedia_device_uri)\n return resp.text\n except sushy.exceptions.SushyError as e:\n msg = (self._('Error: %(error)s') %\n {'error': str(e)})\n raise exception.SDFlexError(msg)",
"def get_vmedia_status(self):\n\n try:\n sushy_system = self._get_sushy_system()\n vmedia_status = sushy_system.vmedia\n except sushy.exceptions.SushyError as e:\n msg = (self._('The vmedia is not found. Error '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n return vmedia_status",
"def get_vm_status(self, device='FLOPPY'):\n response, vm_device_uri = self._get_vm_device_status(device)\n\n # Create RIBCL equivalent response\n # RIBCL provides this data in VM status\n # VM_APPLET = CONNECTED | DISCONNECTED\n # DEVICE = FLOPPY | CDROM\n # BOOT_OPTION = BOOT_ALWAYS | BOOT_ONCE | NO_BOOT\n # WRITE_PROTECT = YES | NO\n # IMAGE_INSERTED = YES | NO\n response_data = {}\n\n if response.get('WriteProtected', False):\n response_data['WRITE_PROTECT'] = 'YES'\n else:\n response_data['WRITE_PROTECT'] = 'NO'\n\n if response.get('BootOnNextServerReset', False):\n response_data['BOOT_OPTION'] = 'BOOT_ONCE'\n else:\n response_data['BOOT_OPTION'] = 'BOOT_ALWAYS'\n\n if response.get('Inserted', False):\n response_data['IMAGE_INSERTED'] = 'YES'\n else:\n response_data['IMAGE_INSERTED'] = 'NO'\n\n if response.get('ConnectedVia') == 'NotConnected':\n response_data['VM_APPLET'] = 'DISCONNECTED'\n # When media is not connected, it's NO_BOOT\n response_data['BOOT_OPTION'] = 'NO_BOOT'\n else:\n response_data['VM_APPLET'] = 'CONNECTED'\n\n response_data['IMAGE_URL'] = response['Image']\n response_data['DEVICE'] = device\n\n # FLOPPY cannot be a boot device\n if ((response_data['BOOT_OPTION'] == 'BOOT_ONCE') and\n (response_data['DEVICE'] == 'FLOPPY')):\n response_data['BOOT_OPTION'] = 'NO_BOOT'\n\n return response_data",
"def new_media_status(self, media_status):\n if (\n media_status\n and media_status.player_is_idle\n and media_status.idle_reason == \"ERROR\"\n ):\n external_url = None\n internal_url = None\n url_description = \"\"\n with suppress(NoURLAvailableError): # external_url not configured\n external_url = get_url(self.hass, allow_internal=False)\n\n with suppress(NoURLAvailableError): # internal_url not configured\n internal_url = get_url(self.hass, allow_external=False)\n\n if media_status.content_id:\n if external_url and media_status.content_id.startswith(external_url):\n url_description = f\" from external_url ({external_url})\"\n if internal_url and media_status.content_id.startswith(internal_url):\n url_description = f\" from internal_url ({internal_url})\"\n\n _LOGGER.error(\n (\n \"Failed to cast media %s%s. Please make sure the URL is: \"\n \"Reachable from the cast device and either a publicly resolvable \"\n \"hostname or an IP address\"\n ),\n media_status.content_id,\n url_description,\n )\n\n self.media_status = media_status\n self.media_status_received = dt_util.utcnow()\n self.schedule_update_ha_state()",
"def get_camera_status():\n\n\ttarget = send_command('getstatus cam')\n\tsplit_ans = target.split()\n\t\n\treturn split_ans",
"def _media_status(self):\n media_status = self.media_status\n media_status_received = self.media_status_received\n\n if (\n media_status is None\n or media_status.player_state == MEDIA_PLAYER_STATE_UNKNOWN\n ):\n groups = self.mz_media_status\n for k, val in groups.items():\n if val and val.player_state != MEDIA_PLAYER_STATE_UNKNOWN:\n media_status = val\n media_status_received = self.mz_media_status_received[k]\n break\n\n return (media_status, media_status_received)",
"def verify_device_dmr(self, device):\n self.assertEqual(device.av_transport_url, AV_TRANSPORT_URL)",
"def getVirtualStatus(self,node,vmid):\n data = self.connect('get','nodes/%s/qemu/%s/status/current' % (node,vmid),None)\n return data",
"def guess_vserver_device():\n\n s = commands.getoutput('/bin/mount | /bin/grep tagxid | /usr/bin/head -n 1')\n device = s.split()[0]\n\n return device",
"def _manufacturer_from_status(status: dict[str, str]) -> str | None:\n return (\n status.get(\"device.mfr\")\n or status.get(\"ups.mfr\")\n or status.get(\"ups.vendorid\")\n or status.get(\"driver.version.data\")\n )",
"def _get_mount_status(self, vm=None):\n result = Shell.run(f\"multipass info {vm} --format=json\")\n\n if f'instance \"{vm}\" does not exist' in result:\n dict_result = {\n 'name': vm,\n 'status': \"instance does not exist\"\n }\n else:\n result = json.loads(result)\n dict_result = {\n 'name': vm,\n 'status': result[\"info\"][vm]['state'],\n 'mounts': result[\"info\"][vm]['mounts']\n }\n return dict_result",
"def get_member_device(self, device):\n for vmedia_device in self.get_members():\n if device in vmedia_device.media_types:\n return vmedia_device",
"def test_device_status(self):\n #071031031E3067\n self.ms.add_response({'\\x14071031031E3067\\x0D': 'PA\\x0D'})\n # Network / Device ID\n response = self.upb.status((49, 3))\n self.assertTrue(response)",
"def _rest_call(self, data, action):\n path = '/wm/device/?ipv4=' + data\n conn = httplib.HTTPConnection(self.host, self.port)\n conn.request('GET', path)\n response = conn.getresponse()\n ret = (response.status, response.reason, response.read())\n conn.close()\n return ret",
"def query_device_handle(runtime, query_str):\r\n devices_manager = runtime.devices_manager\r\n dname, sname = query_str.split('.')\r\n\r\n dev = devices_manager.find_devices(dname)\r\n if dev is None:\r\n print(f'[Debug] Query {dname} from DevicesManager and got None.', file=sys.stderr)\r\n raise ValueError(f'Device {dname} not in database.')\r\n\r\n ret = dev.get_status_value(sname)\r\n if ret is None:\r\n print(f'[Debug] Query {dname}.{sname} from DevicesManager and got None.', file=sys.stderr)\r\n raise ValueError(f'Status {dname}.{sname} not in database.')\r\n\r\n return ret",
"def getDevice(driver):\n devices = list(listDevices(driver))\n if not devices:\n print('No devices found. Ensure your camera is connected.')\n elif len(devices) != 1:\n print('Too many devices found. Only one camera is supported')\n else:\n return devices[0]",
"def ProcessStatusUploadRequest(self, device_status, session_status):\n # Empty responses indicate a successful upload.\n device_status_report_response = dm.DeviceStatusReportResponse()\n session_status_report_response = dm.SessionStatusReportResponse()\n\n response = dm.DeviceManagementResponse()\n response.device_status_report_response.CopyFrom(\n device_status_report_response)\n response.session_status_report_response.CopyFrom(\n session_status_report_response)\n\n return (200, response)",
"async def get_device_status(self, device_id: str) -> dict:\r\n return await self.get(API_DEVICE_STATUS.format(device_id=device_id))",
"def _firmware_from_status(status: dict[str, str]) -> str | None:\n return status.get(\"ups.firmware\") or status.get(\"ups.firmware.aux\")",
"def getDetailedStatus (self):\n try:\n if ((self._activateData == None) or (self._activateData.blockDevice == None)):\n self._log(\"no-block-device-no-status\").debug2(\"no block device was found for file system '%s'\",self._logicalDiskName)\n return None,ReturnCodes.kOk\n\n else:\n terminateTimeOut = self._activeTimeoutsConfig.getStatus\n timer = common.Timer(terminateTimeOut)\n blockDevice = self._activateData.blockDevice\n statusDisctionary,rc = self._tune2fs(blockDevice,timer)\n \n if (rc != ReturnCodes.kOk):\n self._log(\"get-status-failed\").error(\"getDetailedStatus() for file system '%s' failed!\",self._logicalDiskName)\n return None,ReturnCodes.kGeneralError\n\n if (statusDisctionary == None):\n self._log(\"have-block-but-no-status\").debug2(\"block device '%s' for file system '%s' - could not fins file-system status\",blockDevice,self._logicalDiskName)\n else:\n self._log(\"status-found\").debug2(\"block device '%s' for file system '%s' - status found!\",blockDevice,self._logicalDiskName)\n\n return statusDisctionary,rc\n\n except Exception,e:\n self._log(\"get-file-system-status-exception\").error(\"getDetailedStatus(terminateTimeOut=%.2f) faild! exception = '%s'\",terminateTimeOut,e)\n return None,ReturnCodes.kGeneralError",
"def _get_device_info(self) -> NUTDeviceInfo | None:\n if not self._status:\n return None\n\n manufacturer = _manufacturer_from_status(self._status)\n model = _model_from_status(self._status)\n firmware = _firmware_from_status(self._status)\n device_info = NUTDeviceInfo(manufacturer, model, firmware)\n\n return device_info",
"def get(self, devicekey, *args):\n\n base_url = request.url_root[:-1]\n\n out = xml.etree.ElementTree.Element('root')\n out.set('xmlns', \"urn:schemas-upnp-org:device-1-0\")\n\n if devicekey.startswith(self.fhdhr.config.dict[\"main\"][\"uuid\"]):\n origin = devicekey.split(self.fhdhr.config.dict[\"main\"][\"uuid\"])[-1]\n origin_plugin_name = self.fhdhr.origins.origins_dict[origin].plugin_utils.plugin_name\n origin_plugin_version = self.fhdhr.origins.origins_dict[origin].plugin_utils.plugin_manifest[\"version\"]\n\n specVersion_out = sub_el(out, 'specVersion')\n sub_el(specVersion_out, 'major', \"1\")\n sub_el(specVersion_out, 'minor', \"0\")\n\n device_out = sub_el(out, 'device')\n\n sub_el(device_out, 'deviceType', \"urn:plex-tv:device:Media:1\")\n\n sub_el(device_out, 'friendlyName', \"%s %s\" % (self.fhdhr.config.dict[\"fhdhr\"][\"friendlyname\"], origin))\n sub_el(device_out, 'manufacturer', self.fhdhr.config.dict[\"rmg\"][\"reporting_manufacturer\"])\n sub_el(device_out, 'manufacturerURL', \"https://github.com/fHDHR/%s\" % origin_plugin_name)\n sub_el(device_out, 'modelName', self.fhdhr.config.dict[\"rmg\"][\"reporting_model\"])\n sub_el(device_out, 'modelNumber', origin_plugin_version)\n\n sub_el(device_out, 'modelDescription', \"%s %s\" % (self.fhdhr.config.dict[\"fhdhr\"][\"friendlyname\"], origin))\n sub_el(device_out, 'modelURL', \"https://github.com/fHDHR/%s\" % self.fhdhr.config.dict[\"main\"][\"reponame\"])\n\n serviceList_out = sub_el(device_out, 'serviceList')\n service_out = sub_el(serviceList_out, 'service')\n sub_el(out, 'URLBase', \"%s/rmg/%s%s\" % (base_url, self.fhdhr.config.dict[\"main\"][\"uuid\"], origin))\n sub_el(service_out, 'serviceType', \"urn:plex-tv:service:MediaGrabber:1\")\n sub_el(service_out, 'serviceId', \"urn:plex-tv:serviceId:MediaGrabber\")\n\n sub_el(device_out, 'UDN', \"uuid:%s%s\" % (self.fhdhr.config.dict[\"main\"][\"uuid\"], origin))\n\n fakefile = BytesIO()\n fakefile.write(b'<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n')\n fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))\n device_xml = fakefile.getvalue()\n\n return Response(status=200,\n response=device_xml,\n mimetype='application/xml')",
"async def get_status(self) -> str:\n return await self.hw_device.status()",
"def check_media(self, media):\n return AbstractVLC.check_media(self, os.path.join(settings.get(\"path\", \"relative\", \"video\"), media))",
"def test_get_device_presence(self):\n\n device_id = self.properties['device1.id']\n response = self.api.get_device_presence(device_id)\n\n self.assertEqual(device_id, response.sdid, 'Sdids must match')\n self.assertIsNotNone(response.data.last_seen_on, 'last_seen_on')\n self.assertIsNotNone(response.data.connected, 'connected')",
"def media_file_info(self):\n\n if self.observationId and self.playerType == VLC:\n\n media = self.mediaplayer.get_media()\n\n logging.info(\"State: {}\".format(self.mediaplayer.get_state()))\n logging.info(\"Media (get_mrl): {}\".format(bytes_to_str(media.get_mrl())))\n logging.info(\"media.get_meta(0): {}\".format(media.get_meta(0)))\n logging.info(\n \"Track: {}/{}\".format(self.mediaplayer.video_get_track(), self.mediaplayer.video_get_track_count()))\n logging.info(\"number of media in media list: {}\".format(self.media_list.count()))\n logging.info(\"get time: {} duration: {}\".format(self.mediaplayer.get_time(), media.get_duration()))\n logging.info(\"Position: {} %\".format(self.mediaplayer.get_position()))\n logging.info(\"FPS: {}\".format(self.mediaplayer.get_fps()))\n logging.info(\"Rate: {}\".format(self.mediaplayer.get_rate()))\n logging.info(\"Video size: {}\".format(self.mediaplayer.video_get_size(0)))\n logging.info(\"Scale: {}\".format(self.mediaplayer.video_get_scale()))\n logging.info(\"Aspect ratio: {}\".format(self.mediaplayer.video_get_aspect_ratio()))\n logging.info(\"is seekable? {0}\".format(self.mediaplayer.is_seekable()))\n logging.info(\"has_vout? {0}\".format(self.mediaplayer.has_vout()))\n\n vlc_output = (\"State: {}<br>\"\n \"Media Resource Location: {}<br>\"\n \"File name: {}<br>\"\n \"Track: {}/{}<br>\"\n \"Number of media in media list: {}<br>\"\n \"get time: {}<br>\"\n \"duration: {}<br>\"\n \"Position: {} %<br>\"\n \"FPS: {}<br>\"\n \"Rate: {}<br>\"\n \"Video size: {}<br>\"\n \"Scale: {}<br>\"\n \"Aspect ratio: {}<br>\"\n \"is seekable? {}<br>\"\n \"has_vout? {}<br>\").format(self.mediaplayer.get_state(),\n bytes_to_str(media.get_mrl()),\n media.get_meta(0),\n self.mediaplayer.video_get_track(),\n self.mediaplayer.video_get_track_count(),\n self.media_list.count(),\n self.mediaplayer.get_time(),\n self.convertTime(media.get_duration() / 1000),\n self.mediaplayer.get_position(),\n self.mediaplayer.get_fps(),\n self.mediaplayer.get_rate(),\n self.mediaplayer.video_get_size(0),\n self.mediaplayer.video_get_scale(),\n self.mediaplayer.video_get_aspect_ratio(),\n \"Yes\" if self.mediaplayer.is_seekable() else \"No\",\n \"Yes\" if self.mediaplayer.has_vout() else \"No\"\n )\n\n self.results = dialog.ResultsWidget()\n self.results.setWindowTitle(programName + \" - Media file information\")\n self.results.ptText.setReadOnly(True)\n\n self.results.ptText.appendHtml(\"<b>VLC analysis</b><hr>\" + vlc_output)\n\n # FFmpeg analysis\n self.results.ptText.appendHtml(\"<br><b>FFmpeg analysis</b><hr>\")\n for nplayer in self.pj[OBSERVATIONS][self.observationId][FILE]:\n for filePath in self.pj[OBSERVATIONS][self.observationId][FILE][nplayer]:\n media_full_path = project_functions.media_full_path(filePath, self.projectFileName)\n # nframes, duration_ms, duration, fps, hasVideo, hasAudio = accurate_media_analysis(self.ffmpeg_bin, media_full_path)\n\n r = utilities.accurate_media_analysis2(self.ffmpeg_bin, media_full_path)\n nframes = r[\"frames_number\"]\n\n if \"error\" in r:\n self.results.ptText.appendHtml(\n \"File path: {filePath}<br><br>{error}<br><br>\".format(filePath=media_full_path,\n error=r[\"error\"]))\n else:\n self.results.ptText.appendHtml(\n \"File path: {}<br>Duration: {}<br>Bitrate: {}k<br>FPS: {}<br>Has video: {}<br>Has audio: {}<br><br>\".\n format(media_full_path, self.convertTime(r[\"duration\"]), r[\"bitrate\"], r[\"fps\"],\n r[\"has_video\"], r[\"has_audio\"]))\n\n self.results.ptText.appendHtml(\"Total duration: {} (hh:mm:ss.sss)\".\n format(self.convertTime(sum(self.duration) / 1000)))\n\n self.results.show()\n\n else:\n\n fn = QFileDialog(self).getOpenFileName(self, \"Select a media file\", \"\", \"Media files (*)\")\n filePath = fn[0] if type(fn) is tuple else fn\n\n if filePath:\n self.results = dialog.ResultsWidget()\n self.results.setWindowTitle(programName + \" - Media file information\")\n self.results.ptText.setReadOnly(True)\n self.results.ptText.appendHtml(\"<br><b>FFmpeg analysis</b><hr>\")\n # nframes, duration_ms, duration, fps, hasVideo, hasAudio = accurate_media_analysis(self.ffmpeg_bin, filePath)\n r = utilities.accurate_media_analysis2(self.ffmpeg_bin, filePath)\n if \"error\" in r:\n self.results.ptText.appendHtml(\n \"File path: {filePath}<br><br>{error}<br><br>\".format(filePath=filePath, error=r[\"error\"]))\n else:\n self.results.ptText.appendHtml(\n \"File path: {}<br>Duration: {}<br>Bitrate: {}k<br>FPS: {}<br>Has video: {}<br>Has audio: {}<br><br>\".\n format(filePath, self.convertTime(r[\"duration\"]), r[\"bitrate\"], r[\"fps\"], r[\"has_video\"],\n r[\"has_audio\"]))\n\n self.results.show()",
"def device_info(dev, testbed_obj, showcmd='show version', save_to_json=False, logstdout=True):\n\n device = testbed_obj.devices[dev]\n device.connect(log_stdout=logstdout)\n response = device.parse(showcmd)\n print(f\"Response from {dev} is of type {type(response)} and length {len(response)}\")\n print(f\"RAW response: \\n{response}\\n\")\n print(f\"FORMATTED response:\\n{json.dumps(response, indent=4)}\")\n print(response.keys())\n\n if save_to_json:\n json_filename = f\"{dev}.json\"\n with open(json_filename, 'w', encoding='utf-8') as f:\n json.dump(response, f, ensure_ascii=False, indent=4)\n print(f\"\\nFILE SAVED: Saved Response to JSON file {json_filename}\")\n\n return device, response",
"async def _get_scene_device_status(group: int):\n scene = await async_get_scene(group)\n for addr in scene[\"devices\"]:\n device = devices[addr]\n if device:\n await device.async_status()",
"def uber_syntax(self):\n returned = self.get_a_device_id()\n if returned:\n if not self.valid_status_code(uber.command(\"GetDeviceDetails\", ids=DEVICE_ID)):\n returned = False\n if not self.valid_status_code(uber.command(\"GetDeviceDetails\", body={\"ids\": [DEVICE_ID]})):\n returned = False\n if not self.valid_status_code(uber.command(\"GetDeviceDetails\", body={\"ids\": DEVICE_ID})):\n returned = False\n if not self.valid_status_code(uber.command(\"GetDeviceDetails\", parameters={\"ids\": DEVICE_ID})):\n returned = False\n if not self.valid_status_code(uber.command(\"GetDeviceDetailsV1\", ids=DEVICE_ID)):\n returned = False\n if not self.valid_status_code(uber.command(\"GetDeviceDetailsV2\", ids=DEVICE_ID)):\n returned = False\n if not self.valid_status_code(uber.command(\"PostDeviceDetailsV2\", body={\"ids\": [DEVICE_ID]})):\n returned = False\n if not self.valid_status_code(uber.command(\"PostDeviceDetailsV2\", parameters={\"ids\": [DEVICE_ID]})):\n returned = False\n if not self.valid_status_code(uber.command(\"PostDeviceDetailsV2\", ids=DEVICE_ID)):\n returned = False\n if not self.valid_status_code(uber.command(\"PostDeviceDetailsV2\", ids=[DEVICE_ID])):\n returned = False\n\n return returned",
"def _do_get_status(self):\n logging.info(__name__ + ' : Get status of the device.')\n result = self._execute('X')\n usage = {\n 0: \"Channel not in use\",\n 1: \"Channel used for Nitrogen level\",\n 2: \"Channel used for Helium Level (Normal pulsed operation)\",\n 3: \"Channel used for Helium Level (Continuous measurement)\",\n 9: \"Error on channel (Usually means probe unplugged)\"\n }\n # current_flowing = {\n # 0 : \"Curent not flowing in Helium Probe Wire\",\n # 1 : \"Curent not flowing in Helium Probe Wire\"\n # }\n # auto_fill_status = {\n # 00 : \"End Fill (Level > FULL)\",\n # 01 : \"Not Filling (Level < FULL, Level > FILL)\",\n # 10 : \"Filling (Level < FULL, Level > FILL)\",\n # 11 : \"Start Filling (Level < FILL)\"\n # }\n return usage.get(int(result[1]), \"Unknown\")",
"def get_video(self):\n\n def get_video_list(ctx):\n result = []\n for video_enum in ctx.xpath('/domainCapabilities/devices/video/enum'):\n if video_enum.xpath(\"@name\")[0] == \"modelType\":\n for values in video_enum: result.append(values.text)\n return result\n return util.get_xml_path(self.get_dom_cap_xml(),func=get_video_list)",
"def _lsusbv_on_device(bus_id, dev_id):\n _, raw_output = cmd_helper.GetCmdStatusAndOutputWithTimeout(\n ['lsusb', '-v', '-s', '%s:%s' % (bus_id, dev_id)], timeout=10)\n\n device = {'bus': bus_id, 'device': dev_id}\n depth_stack = [device]\n\n # TODO(jbudorick): Add documentation for parsing.\n for line in raw_output.splitlines():\n # Ignore blank lines.\n if not line:\n continue\n # Filter out error mesage about opening device.\n if _COULDNT_OPEN_ERROR_RE.match(line):\n continue\n # Find start of device information.\n m = _LSUSB_BUS_DEVICE_RE.match(line)\n if m:\n if m.group(1) != bus_id:\n logging.warning(\n 'Expected bus_id value: %r, seen %r', bus_id, m.group(1))\n if m.group(2) != dev_id:\n logging.warning(\n 'Expected dev_id value: %r, seen %r', dev_id, m.group(2))\n device['desc'] = m.group(3)\n continue\n\n indent_match = _INDENTATION_RE.match(line)\n if not indent_match:\n continue\n\n depth = 1 + len(indent_match.group(1)) / 2\n if depth > len(depth_stack):\n logging.error(\n 'lsusb parsing error: unexpected indentation: \"%s\"', line)\n continue\n\n while depth < len(depth_stack):\n depth_stack.pop()\n\n cur = depth_stack[-1]\n\n m = _LSUSB_GROUP_RE.match(line)\n if m:\n new_group = {}\n cur[m.group(1)] = new_group\n depth_stack.append(new_group)\n continue\n\n m = _LSUSB_ENTRY_RE.match(line)\n if m:\n new_entry = {\n '_value': m.group(2),\n '_desc': m.group(3),\n }\n cur[m.group(1)] = new_entry\n depth_stack.append(new_entry)\n continue\n\n logging.error('lsusb parsing error: unrecognized line: \"%s\"', line)\n\n return device",
"def _get_current_media(self):\n key = int(self.status.content_id.split(\"/\")[-1])\n media_item = self.pms.fetchItem(key).reload()\n media_idx = self.status.media_custom_data.get(\"mediaIndex\", 0)\n part_idx = self.status.media_custom_data.get(\"partIndex\", 0)\n media = media_item.media[media_idx]\n part = media.parts[part_idx]\n\n return media_item, media, part",
"def mock_valid_device_url() -> Generator[None, None, None]:\n with patch(\n \"afsapi.AFSAPI.get_webfsapi_endpoint\",\n return_value=\"http://1.1.1.1:80/webfsapi\",\n ):\n yield",
"def multi_video_feed(device):\n client_ip = request.environ['REMOTE_ADDR'][:3]\n if str(client_ip[:3]) == \"192\" or str(client_ip) == \"127.0.0.1\":\n camera_stream = import_module('camera_multicv').BaseCamera\n camera_stream.set_video_source(int(device))\n return Response(gen(camera_stream(int(device))),\n mimetype='multipart/x-mixed-replace; boundary=frame')\n else:\n return render_template('404.html')",
"def DoIt(self, host, vm, variable):\n\n vm = Operation.GetVm(host, vm)\n\n variableComponents = variable.split('.', 1)\n device = vm.GetDevice(variableComponents[0])\n if device:\n if len(variableComponents) > 1:\n return rec_getattr(device, variableComponents[1])\n else:\n return device\n\n\n value = vm.GetExtraConfig().get(variable, None)\n if value: return value\n\n return rec_getattr(vm, self.GetVmodlProperty(variable))",
"def remote_status():",
"def getStatus():\n return json.dumps({'camera': Camera.status(), 'rover': rover.status()}), 200",
"def _get_details(self, device_object, **kwargs):\r\n params = dict()\r\n if kwargs:\r\n for key, val in kwargs.items():\r\n if '_' in key:\r\n new_key = key.replace(\"_\",\"-\") \r\n params[new_key] = val\r\n else:\r\n params[key] = val\r\n\r\n try: \r\n response = requests.get(device_object.href,\r\n auth=(self.user,self.pwd), \r\n params=params, verify=False)\r\n info = json.loads(response.text)\r\n return info[\"content\"]\r\n except requests.exceptions.RequestException as e:\r\n print \"Error:\",e\r\n return 1",
"def get_device(self, device):\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.network.networkdevice.1.0+xml'})\n\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tresp = self.ise.get('{0}/config/networkdevice?filter=name.EQ.{1}'.format(self.url_base, device))\n\t\tfound_device = ERS._to_json(resp.text)\n\n\t\tif found_device['ns3:searchResult']['@total'] == '1':\n\t\t\tresp = self.ise.get('{0}/config/networkdevice/{1}'.format(\n\t\t\t\t\tself.url_base, found_device['ns3:searchResult']['ns3:resources']['ns5:resource']['@id']))\n\t\t\tif resp.status_code == 200:\n\t\t\t\tresult['success'] = True\n\t\t\t\tresult['response'] = ERS._to_json(resp.text)['ns4:networkdevice']\n\t\t\t\treturn result\n\t\t\telif resp.status_code == 404:\n\t\t\t\tresult['response'] = '{0} not found'.format(device)\n\t\t\t\tresult['error'] = resp.status_code\n\t\t\t\treturn result\n\t\t\telse:\n\t\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\t\tresult['error'] = resp.status_code\n\t\t\t\treturn result\n\t\telif found_device['ns3:searchResult']['@total'] == '0':\n\t\t\t\tresult['response'] = '{0} not found'.format(device)\n\t\t\t\tresult['error'] = 404\n\t\t\t\treturn result\n\t\telse:\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result",
"def parse_url(cls, urlstr: str, scheme: str,\n vdict: Dict[str, int],\n pdict: Dict[int, Dict[str, int]],\n default_vendor: int) -> Tuple[UsbDeviceDescriptor, int]:\n urlparts = urlsplit(urlstr)\n if scheme != urlparts.scheme:\n raise UsbToolsError(\"Invalid URL: %s\" % urlstr)\n try:\n if not urlparts.path:\n raise UsbToolsError('URL string is missing device port')\n path = urlparts.path.strip('/')\n if path == '?' or (not path and urlstr.endswith('?')):\n report_devices = True\n else:\n interface = to_int(path)\n report_devices = False\n except (IndexError, ValueError) as exc:\n raise UsbToolsError('Invalid device URL: %s' % urlstr) from exc\n candidates, idx = cls.enumerate_candidates(urlparts, vdict, pdict,\n default_vendor)\n if report_devices:\n UsbTools.show_devices(scheme, vdict, pdict, candidates)\n raise SystemExit(candidates and\n 'Please specify the USB device' or\n 'No USB-Serial device has been detected')\n if idx is None:\n if len(candidates) > 1:\n raise UsbToolsError(\"%d USB devices match URL '%s'\" %\n (len(candidates), urlstr))\n idx = 0\n try:\n desc, _ = candidates[idx]\n vendor, product = desc[:2]\n except IndexError:\n raise UsbToolsError('No USB device matches URL %s' %\n urlstr) from None\n if not vendor:\n cvendors = {candidate[0] for candidate in candidates}\n if len(cvendors) == 1:\n vendor = cvendors.pop()\n if vendor not in pdict:\n raise UsbToolsError('Vendor ID %s not supported' %\n (vendor and '0x%04x' % vendor))\n if not product:\n cproducts = {candidate[1] for candidate in candidates\n if candidate[0] == vendor}\n if len(cproducts) == 1:\n product = cproducts.pop()\n if product not in pdict[vendor].values():\n raise UsbToolsError('Product ID %s not supported' %\n (product and '0x%04x' % product))\n devdesc = UsbDeviceDescriptor(vendor, product, desc.bus, desc.address,\n desc.sn, idx, desc.description)\n return devdesc, interface",
"async def device_status(self, value):\n if value is None:\n return\n \n binvalue = str(bin(value))\n binarr = binvalue[::-1]\n binarr = binarr[:len(DEVICE_STATUS)]\n return_value = []\n for x in range(len(DEVICE_STATUS)):\n if binarr[len(binarr) - 1 - x] == \"1\":\n return_value.append(DEVICE_STATUS[x])\n\n return return_value",
"def show_devices_status(releaser):\n\n devices = releaser.get_devices_by_status()\n for tag in devices:\n tag_devices = \", \".join([c[\"uuid\"][:6] for c in devices[tag].values()])\n click.echo(f\"{tag}: {tag_devices}\")",
"def get_device(self, dev_id):\n return self.api_request('GET', self.url + '/device/' + str(dev_id), {})",
"def test_get_device(self):\n pass",
"def test_get_device(self):\n pass",
"def get_status(self):\n try:\n status = {\n 'camexptime': self.opt.getParameter(\"ExposureTime\"),\n 'camtemp': self.opt.getParameter(\"SensorTemperatureReading\"),\n 'camspeed': self.opt.getParameter(\"AdcSpeed\"),\n 'state': self.opt.getParameter(\"OutputSignal\")\n }\n logger.info(status)\n return status\n except Exception as e:\n logger.error(\"Error getting the camera status\", exc_info=True)\n return {\n \"error\": str(e), \"camexptime\": -9999,\n \"camtemp\": -9999, \"camspeed\": -999\n }",
"def get_status(self):\n return self.o.read_register(self.dev_id, STATUS)",
"def findLink(status):\n link = False\n try:\n match2 = re.findall(r'bit.ly[\\w./:0-9]*', status)\n if match2:\n link = match2[0]\n #Find full urls\n match = re.findall(r'http[\\w./:0-9]*', status)\n if match:\n link = match[0]\n resp = urllib.urlopen(link)\n if resp.url:\n link = resp.url\n else:\n link = False\n except:\n link = False\n return link",
"def device():\n return G.DEVICE",
"def show_media(self, media):\n msg = media_to_chromecast_command(media, type=TYPE_DETAILS, requestid=self._inc_request())\n\n def cb():\n self._send_cmd(msg, inc_session_id=True, inc=False)\n\n self.launch(cb)",
"def connect_device(uri):\n d = urlparse(uri)\n platform = d.scheme\n host = d.netloc\n uuid = d.path.lstrip(\"/\")\n params = dict(parse_qsl(d.query))\n if host:\n params[\"host\"] = host.split(\":\")\n dev = init_device(platform, uuid, **params)\n return dev",
"async def async_update_via_upnp(self):\n import validators\n radio = False\n\n if self._upnp_device is None:\n return\n\n self._service = self._upnp_device.service('urn:schemas-upnp-org:service:AVTransport:1')\n #_LOGGER.debug(\"GetMediaInfo for: %s, UPNP service:%s\", self.entity_id, self._service)\n \n media_info = dict()\n media_metadata = None\n try:\n media_info = await self._service.action(\"GetMediaInfo\").async_call(InstanceID=0)\n self._trackc = media_info.get('CurrentURI')\n self._media_uri_final = media_info.get('TrackSource')\n media_metadata = media_info.get('CurrentURIMetaData')\n #_LOGGER.debug(\"GetMediaInfo for: %s, UPNP media_metadata:%s\", self.entity_id, media_info)\n except:\n _LOGGER.warning(\"GetMediaInfo/CurrentURIMetaData UPNP error: %s\", self.entity_id)\n\n if media_metadata is None:\n return\n\n self._media_title = None\n self._media_album = None\n self._media_artist = None\n self._media_image_url = None\n\n xml_tree = ET.fromstring(media_metadata)\n\n xml_path = \"{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}item/\"\n title_xml_path = \"{http://purl.org/dc/elements/1.1/}title\"\n artist_xml_path = \"{urn:schemas-upnp-org:metadata-1-0/upnp/}artist\"\n album_xml_path = \"{urn:schemas-upnp-org:metadata-1-0/upnp/}album\"\n image_xml_path = \"{urn:schemas-upnp-org:metadata-1-0/upnp/}albumArtURI\"\n radiosub_xml_path = \"{http://purl.org/dc/elements/1.1/}subtitle\"\n\n if radio:\n title = xml_tree.find(\"{0}{1}\".format(xml_path, radiosub_xml_path)).text\n if title.find(' - ') != -1:\n titles = title.split(' - ')\n self._media_artist = string.capwords(titles[0].strip())\n self._media_title = string.capwords(titles[1].strip())\n else:\n self._media_title = string.capwords(title.strip())\n else:\n self._media_title = xml_tree.find(\"{0}{1}\".format(xml_path, title_xml_path)).text\n self._media_artist = xml_tree.find(\"{0}{1}\".format(xml_path, artist_xml_path)).text\n self._media_album = xml_tree.find(\"{0}{1}\".format(xml_path, album_xml_path)).text\n \n self._media_image_url = xml_tree.find(\"{0}{1}\".format(xml_path, image_xml_path)).text\n\n if not validators.url(self._media_image_url):\n self._media_image_url = None",
"def get_device_details(device):\n ret = device.wait_for_output(\"SetupQRCode\")\n if ret is None or len(ret) < 2:\n return None\n\n qr_code = re.sub(\n r\"[\\[\\]]\", \"\", ret[-1].partition(\"SetupQRCode:\")[2]).strip()\n try:\n device_details = dict(SetupPayload().ParseQrCode(\n \"VP:vendorpayload%{}\".format(qr_code)).attributes)\n except exceptions.ChipStackError as ex:\n log.error(ex.msg)\n return None\n\n return device_details",
"def media_track(self):\n media_status = self._media_status()[0]\n return media_status.track if media_status else None",
"def redirected_opid_syntax(self):\n returned = self.get_a_device_id()\n if returned:\n if not self.valid_status_code(falcon.GetDeviceDetails(DEVICE_ID)):\n returned = False\n if not self.valid_status_code(falcon.GetDeviceDetails(ids=DEVICE_ID)):\n returned = False\n if not self.valid_status_code(falcon.GetDeviceDetails(parameters={\"ids\": [DEVICE_ID]})):\n returned = False\n if not self.valid_status_code(falcon.GetDeviceDetails(body={\"ids\": [DEVICE_ID]})):\n returned = False\n return returned",
"def device(self):\n return self.broker.device(**{\"VirtualNetworkMemberID\": self.VirtualNetworkMemberID})",
"def device(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"device\"), kwargs)",
"def device(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"device\"), kwargs)",
"def device(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"device\"), kwargs)",
"def device(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"device\"), kwargs)",
"def extract_status(self, status) -> None:\r\n if \"VehicleInfo\" in status:\r\n if \"RemoteHvacInfo\" in status[\"VehicleInfo\"]:\r\n self.hvac = status[\"VehicleInfo\"][\"RemoteHvacInfo\"]\r\n\r\n if \"ChargeInfo\" in status[\"VehicleInfo\"]:\r\n self.battery = status[\"VehicleInfo\"][\"ChargeInfo\"]",
"def get_device_information(self):\n return self.mycam.devicemgmt.GetDeviceInformation()",
"def get_device_properties(device):\n results = devices.show(device)\n return jsonify(results)",
"def _get_device_path(self, connection_properties):\n (out, _err) = self.execute('/usr/sbin/iscsiadm', 'list',\n 'target', '-S',\n connection_properties['target_iqn'])\n\n found = False\n for line in [l.strip() for l in out.splitlines()]:\n if line.startswith(\"LUN:\"):\n lun = line.split()[-1]\n if int(lun) == int(connection_properties['target_lun']):\n found = True\n continue\n if found:\n if line.startswith(\"OS Device Name:\"):\n dev_path = line.split()[-1]\n return dev_path\n elif line.startswith(\"LUN:\"):\n found = False\n\n if not found:\n LOG.error(_(\"No device is found for the target %s LUN %s.\") %\n (connection_properties['target_iqn'],\n connection_properties['target_lun']))\n raise",
"async def get_direct_rtsp_url(self, hd: bool = False) -> str:\n return (\n f\"rtsp://{self.data[Attribute.USERNAME]}:{self.data[Attribute.PASSWORD]}@{self.ip_address}:{self.data[Attribute.CAMERA_IP_PORT]}/{self.data[Attribute.CAMERA_DIRECT_STREAM_PATH if hd else Attribute.CAMERA_DIRECT_STREAM_PATH_STANDARD]}\"\n if self.data[Attribute.CAMERA_DIRECT_AVAILABLE]\n and self.data.get(Attribute.ACTUAL_TYPE) not in SKIP_DIRECT\n else None\n )",
"def status_check_callback(self, req, res):\n try:\n res.single_camera_status = 1\n res.stereo_camera_status = 1\n res.lidar_status = 1\n if self.camera_buffer.read_buffer is not None \\\n and isinstance(self.camera_buffer.read_buffer, list):\n if len(self.camera_buffer.read_buffer) == 2:\n res.stereo_camera_status = 0\n elif len(self.camera_buffer.read_buffer) == 1:\n res.single_camera_status = 0\n if self.lidar_buffer.read_buffer is not None:\n res.lidar_status = 0\n return res\n except Exception as ex:\n self.get_logger().error(f\"Failed to get sensor data status: {ex}\")",
"def platform_status(**params):\n endpoint = 'platform/status'\n return request(authenticate=False, version=2, endpoint=endpoint, method='GET', query_params=params)",
"def _get_device():\n return context.get_context('device_target')",
"def extract_media_v1(data):\n user = data[\"user\"]\n location = data.get(\"location\")\n if location:\n location = {\"pk\": int(location.get(\"pk\")), \"name\": location.get(\"name\")}\n video_url = \"\"\n if \"video_versions\" in data:\n # Select Best Quality by Resolutiuon\n video_url = sorted(\n data[\"video_versions\"], key=lambda o: o[\"height\"] * o[\"width\"]\n ).pop()[\"url\"]\n product_type = data.get(\"product_type\", \"\")\n if data[\"media_type\"] == 2 and not product_type:\n product_type = \"feed\"\n thumbnail_url = ''\n if 'image_versions2' in data:\n thumbnail_url = sorted(\n data[\"image_versions2\"][\"candidates\"],\n key=lambda o: o[\"height\"] * o[\"width\"],\n ).pop()[\"url\"]\n return {\n \"pk\": int(data[\"pk\"]),\n \"taken_at\": int(data[\"taken_at\"]),\n \"id\": data[\"id\"],\n \"media_type\": data[\"media_type\"],\n \"product_type\": product_type,\n \"code\": data[\"code\"],\n \"thumbnail_url\": thumbnail_url,\n \"location\": location,\n \"user\": extract_user_short(user),\n \"comment_count\": int(data.get(\"comment_count\") or 0),\n \"like_count\": int(data.get(\"like_count\") or 0), # the media just published has no like_count\n \"caption_text\": json_value(data, \"caption\", \"text\", default=\"\"),\n \"usertags\": [\n extract_usertag(usertag)\n for usertag in data.get(\"usertags\", {}).get(\"in\", [])\n ],\n \"video_url\": video_url,\n \"view_count\": int(data.get('view_count') or 0),\n \"video_duration\": data.get('video_duration'),\n \"title\": data.get(\"title\") or None,\n \"resources\": [\n extract_resource_v1(edge)\n for edge in data.get('carousel_media', [])\n ]\n }",
"def status(cls):\n return {'type': 'Emulated camera'}",
"def get_device_info(handle, timeout):\n device_info = dict()\n device_info['ls'] = ceph_mon_command(handle, 'device ls', timeout)\n\n return device_info",
"def get_video_url(data):\n # type: (dict) -> Optional[str]\n resource = data.get(\"resources\", [{}])[0]\n url = resource.get(\"video_stream\") # try m3u8\n if not url: # try mp4\n files = resource.get(\"files\")[0]\n mp4 = get_mime_property(files, \"url\", \"video/mp4\")\n url = \"https:{}\".format(mp4) if mp4 and mp4.startswith(\"//\") else mp4\n if not url: # try x-video\n idx = get_mime_property(files, \"mediaObjectId\", \"application/x-video\")\n media = get_json(LOS_MEDIA_TEMPLATE.format(idx))\n derivative = media.get(\"mediaObject\").get(\"derivatives\")[0]\n url = \"https://{}/{}\".format(\n derivative.get(\"fqdn\"),\n derivative.get(\"derivativeMediaUrl\").replace(\"mp4:\", \"\"))\n return url",
"async def _get_remote_media_impl(\n self, server_name: str, media_id: str\n ) -> Tuple[Optional[Responder], dict]:\n media_info = await self.store.get_cached_remote_media(server_name, media_id)\n\n # file_id is the ID we use to track the file locally. If we've already\n # seen the file then reuse the existing ID, otherwise generate a new\n # one.\n\n # If we have an entry in the DB, try and look for it\n if media_info:\n file_id = media_info[\"filesystem_id\"]\n file_info = FileInfo(server_name, file_id)\n\n if media_info[\"quarantined_by\"]:\n logger.info(\"Media is quarantined\")\n raise NotFoundError()\n\n if not media_info[\"media_type\"]:\n media_info[\"media_type\"] = \"application/octet-stream\"\n\n responder = await self.media_storage.fetch_media(file_info)\n if responder:\n return responder, media_info\n\n # Failed to find the file anywhere, lets download it.\n\n try:\n media_info = await self._download_remote_file(\n server_name,\n media_id,\n )\n except SynapseError:\n raise\n except Exception as e:\n # An exception may be because we downloaded media in another\n # process, so let's check if we magically have the media.\n media_info = await self.store.get_cached_remote_media(server_name, media_id)\n if not media_info:\n raise e\n\n file_id = media_info[\"filesystem_id\"]\n if not media_info[\"media_type\"]:\n media_info[\"media_type\"] = \"application/octet-stream\"\n file_info = FileInfo(server_name, file_id)\n\n # We generate thumbnails even if another process downloaded the media\n # as a) it's conceivable that the other download request dies before it\n # generates thumbnails, but mainly b) we want to be sure the thumbnails\n # have finished being generated before responding to the client,\n # otherwise they'll request thumbnails and get a 404 if they're not\n # ready yet.\n await self._generate_thumbnails(\n server_name, media_id, file_id, media_info[\"media_type\"]\n )\n\n responder = await self.media_storage.fetch_media(file_info)\n return responder, media_info",
"def status(self):\n status = self._socket_client.media_controller.status\n status.episode_title = episode_title\n return status",
"def get_devices():\n devices = []\n for path in hookenv.action_get('osd-devices').split(' '):\n path = path.strip()\n if not os.path.isabs(path):\n raise Error('{}: Not absolute path.'.format(path))\n devices.append(path)\n return devices",
"def multimedia_path(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def _getURL(self, params):\n qs = Media.objects.filter(pk=params['id'], deleted=False)\n if not qs.exists():\n raise Http404\n response_data = list(qs.values(*MEDIA_PROPERTIES))\n # Use 24-hour URLS\n _presign(24*3600, response_data)\n\n element = params['element']\n if element == 'auto':\n if qs[0].meta.dtype == 'video':\n element = 'streaming'\n elif qs[0].meta.dtype == 'image':\n element = 'image'\n elif qs[0].meta.dtype == 'multi':\n return None\n if element == 'audio':\n return response_data[0].get('media_files',{}).get('audio',[])[0]['path']\n elif element == 'thumbnail':\n search_in = response_data[0].get('media_files',{}).get('thumbnail',[])\n elif element == 'thumbnail_gif':\n search_in = response_data[0].get('media_files',{}).get('thumbnail_gif',[])\n elif element == 'image':\n search_in = response_data[0].get('media_files',{}).get('image',[])\n elif element == 'streaming':\n search_in = response_data[0].get('media_files',{}).get('streaming',[])\n elif element == 'archival':\n search_in = response_data[0].get('media_files',{}).get('archival',[])\n elif element == 'attachment':\n search_in = response_data[0].get('media_files',{}).get('attachment',[])\n\n if not search_in:\n return None\n quality = params['quality']\n max_delta = sys.maxsize\n quality_idx = 0\n for idx, info in enumerate(search_in):\n delta = abs(quality-info['resolution'][0])\n if delta < max_delta:\n quality_idx = idx\n max_delta = delta\n return search_in[quality_idx]['path']",
"def get_device(link):\n device = Device(\"\",0,0,0,0,0)\n device.link = link\n return device.identify()",
"def _serial_from_status(status: dict[str, str]) -> str | None:\n serial = status.get(\"device.serial\") or status.get(\"ups.serial\")\n if serial and (\n serial.lower() in NUT_FAKE_SERIAL or serial.count(\"0\") == len(serial.strip())\n ):\n return None\n return serial",
"def snmp_status_check(device_id):\n device = DevicesTmp.objects.get(device_id=device_id)\n ostype = Ostype.objects.get(ostypeid=device.ostype_id)\n time_out = ostype.snmp_timeout\n payload = {\n \"timeout\": time_out,\n \"ip\": device.ip,\n \"port\": device.snmp_port,\n \"hostname\": device.hostname,\n \"community\": device.snmp_community,\n \"snmp_version\": device.snmp_version,\n \"channel\": \"status_snmp\"\n }\n r = requests.post((constants.VERIFY_WHETHER_CAN_CONNECT_URL % (\n constants.VERIFY_WHETHER_EXECUTING_SERVER_IP, constants.VERIFY_WHETHER_EXECUTING_SERVER_PORT)),\n data=json.dumps(payload))\n response = json.loads(r.text)\n if response.get('status') == 'success':\n status = \"Success\"\n else:\n status = \"Fail\"\n return status, device_id\n # for x in response.get('output'):\n # if 'message' in x.keys():\n # if x.get('message').find('timeout') > -1:\n # return 'Fali', device_id\n # else:\n # return 'Success', device_id\n # else:\n # return 'Success', device_id",
"def get_device_info(target_project_arn):\n try:\n device_info = device_farm.list_devices(\n arn=target_project_arn,\n filters=[\n {\n \"attribute\": \"PLATFORM\",\n \"operator\": \"EQUALS\",\n \"values\": ['ANDROID', ]\n },\n {\n \"attribute\": \"OS_VERSION\",\n \"operator\": \"GREATER_THAN_OR_EQUALS\",\n \"values\": ['9', ]\n },\n {\n \"attribute\": \"MANUFACTURER\",\n \"operator\": \"EQUALS\",\n \"values\": ['Google', ]\n },\n {\n \"attribute\": \"AVAILABILITY\",\n \"operator\": \"EQUALS\",\n \"values\": ['HIGHLY_AVAILABLE', ]\n },\n {\n \"attribute\": \"FLEET_TYPE\",\n \"operator\": \"EQUALS\",\n \"values\": ['PUBLIC', ]\n }\n ])['devices']\n\n if device_info is not None:\n device_arn = device_info[0]['arn']\n device_name = device_info[0]['name']\n device_manufacture = device_info[0]['manufacturer']\n device_model = device_info[0]['model']\n device_model_id = device_info[0]['modelId']\n device_type = device_info[0]['formFactor']\n device_platform = device_info[0]['platform']\n device_os = device_info[0]['os']\n device_visibility = device_info[0]['fleetType']\n device_availability = device_info[0]['availability']\n\n print('Device Name - {} with Manufacture {}, model {}, modelId {} & type {}'.format(\n device_name,\n device_manufacture,\n device_model,\n device_model_id,\n device_type\n )\n )\n print('Device Platform {} with OS {}, visibility {} & availability - {} '.format(\n device_platform,\n device_os,\n device_visibility,\n device_availability\n )\n )\n\n if device_availability == TARGET_AVAILABILITY:\n print('AWS setup is complete')\n else:\n print('Problem, device is not available')\n else:\n print('Problem finding device info')\n\n except IndexError:\n print('Problem finding device from pool {}'.format(device_info))",
"def device_status_overview(self):\n if \"deviceStatusOverview\" in self._prop_dict:\n if isinstance(self._prop_dict[\"deviceStatusOverview\"], OneDriveObjectBase):\n return self._prop_dict[\"deviceStatusOverview\"]\n else :\n self._prop_dict[\"deviceStatusOverview\"] = DeviceConfigurationDeviceOverview(self._prop_dict[\"deviceStatusOverview\"])\n return self._prop_dict[\"deviceStatusOverview\"]\n\n return None",
"def __call__(self, device, timeout, bitrate, compress):\n if device is None:\n devices = android.list_devices()\n device = console.prompt_for_options(\"Choose device: \", devices)\n current_dir = os.getcwd()\n file_path_on_device = android.record_video(device, timeout, bitrate)\n file_name = os.path.basename(file_path_on_device)\n android.download_file(device, file_path_on_device, current_dir)\n android.remove_file(device, file_path_on_device)\n result_file_path = os.path.join(current_dir, file_name)\n if compress:\n console.compress_video(result_file_path)\n log.info(\"Find result at \" + result_file_path)",
"def _model_from_status(status: dict[str, str]) -> str | None:\n return (\n status.get(\"device.model\")\n or status.get(\"ups.model\")\n or status.get(\"ups.productid\")\n )",
"def _decode_sensor_status(self, status: str) -> str:\n k = int(status)\n return self.SENSOR_STATUSES[k]",
"def get_vidurl(self):\n if self.assets is None:\n self.get_assets()\n \n df = self.assets\n des = df.loc[(df['container']==self.container) & (df['display_name']==self.resolution), 'url']\n if des.shape[0] == 1:\n self.vidurl = des.iloc[0].replace('.bin',f'.{self.container}')\n return self.vidurl",
"def find_device_info(xcresult_path):\n parsed = xcresulttool_json('get', '--path', xcresult_path)\n actions = parsed['actions']['_values']\n action = actions[-1]\n\n result = action['runDestination']['targetDeviceRecord']['modelUTI']['_value']\n return result",
"def media_image_url(self) -> str:\n return self._device.movie.cover",
"async def async_get_media_image(self) -> tuple[bytes | None, str | None]:\n if self._client.current_track:\n image = bytes(self._client.current_track[\"art\"])\n return (image, \"image/png\")\n\n return None, None",
"def device_path(self):\n return self._device_path",
"def test_disk_media_item_display_url(db):\n data = datadir.join('1200x6566.png').read(mode='rb')\n item = media.fetch_or_create_media_item(data, file_type='png')\n assert item.display_url == (\n '/static/media/a5/de/ef/a5deef985bde4438969b5f74a1864f7a5b1d127df3197b4fadf3f855201278b4.png')",
"def getVirtualRRD(self,node,vmid):\n data = self.connect('get','nodes/%s/qemu/%s/rrd' % (node,vmid),None)\n return data",
"def device(self):\n # return self.inv_threshold.device\n return self.threshold.device",
"def get_devices_status(self):\n return {\n\n dev_name:{\n 'running_processes': self.get_running_processes(dev_handler),\n 'gpu_memory_free': utils.psutil_parse_readable_bytes(\n NvmlHandler.exec_nvml_function(nvmlDeviceGetMemoryInfo, dev_handler, 'free')\n ),\n 'gpu_memory_used': utils.psutil_parse_readable_bytes(\n NvmlHandler.exec_nvml_function(nvmlDeviceGetMemoryInfo, dev_handler, 'used')\n )\n } for dev_name, dev_handler in self.devices.items()\n }",
"def get(self, data):\n ret = self._rest_call(data, 'GET')\n result = json.loads(ret[2])\n if result != []:\n try:\n port = str(result[0]['attachmentPoint'][0]['port'])\n dpid = str(result[0]['attachmentPoint'][0]['switchDPID'])\n mac = str(result[0]['mac'][0])\n except IndexError:\n raise\n try:\n vlan = str(result[0]['vlan'][0])\n except:\n vlan = '-1'\n return (port, dpid, mac, vlan)\n else:\n raise KeyError",
"def device_path(self):\n return self._engine.device_path()",
"def get_mount_target(devname, label=None):\n return join(sep, 'media', label or basename(devname))",
"def device_info(device_id):\n device_info_map = listall.device_raw_info()[\"devices\"]\n for operating_system in device_info_map.keys():\n devices = device_info_map[operating_system]\n for device in devices:\n if device[\"udid\"].lower() == device_id.lower():\n return device\n return None"
] | [
"0.7345246",
"0.65547144",
"0.652944",
"0.60734904",
"0.5523844",
"0.5463692",
"0.5434447",
"0.5412849",
"0.5385215",
"0.5331038",
"0.5283186",
"0.5263492",
"0.52565235",
"0.5243921",
"0.5218811",
"0.5213309",
"0.5212679",
"0.51299715",
"0.50834125",
"0.5077698",
"0.5065594",
"0.50595325",
"0.5039625",
"0.50188446",
"0.50167763",
"0.5006009",
"0.49876255",
"0.49872783",
"0.4979496",
"0.49682114",
"0.49616858",
"0.49517387",
"0.4930984",
"0.49264023",
"0.49261308",
"0.4923019",
"0.49217257",
"0.4917428",
"0.491081",
"0.49104872",
"0.48890257",
"0.4887125",
"0.4881792",
"0.4877814",
"0.48760095",
"0.48619667",
"0.48619667",
"0.4861344",
"0.48606965",
"0.48535374",
"0.48406884",
"0.48381567",
"0.48378876",
"0.4835225",
"0.48290497",
"0.4826087",
"0.48052165",
"0.47954297",
"0.4788108",
"0.4788108",
"0.4788108",
"0.4788108",
"0.47745606",
"0.47685266",
"0.47657457",
"0.47633547",
"0.47596174",
"0.47588313",
"0.47564104",
"0.4752382",
"0.47493276",
"0.47475865",
"0.47457725",
"0.4744514",
"0.47437933",
"0.47335333",
"0.4733071",
"0.47318807",
"0.47205383",
"0.47153547",
"0.47121373",
"0.47053683",
"0.47033912",
"0.46987274",
"0.4696029",
"0.4688898",
"0.46864945",
"0.46780732",
"0.46730557",
"0.4669319",
"0.46672386",
"0.46622735",
"0.46612093",
"0.4657401",
"0.46557158",
"0.4651967",
"0.46404997",
"0.46299186",
"0.46232772",
"0.46219566"
] | 0.70259494 | 1 |
Returns the virtual media drive status. | def get_vm_status(self, device='FLOPPY'):
response, vm_device_uri = self._get_vm_device_status(device)
# Create RIBCL equivalent response
# RIBCL provides this data in VM status
# VM_APPLET = CONNECTED | DISCONNECTED
# DEVICE = FLOPPY | CDROM
# BOOT_OPTION = BOOT_ALWAYS | BOOT_ONCE | NO_BOOT
# WRITE_PROTECT = YES | NO
# IMAGE_INSERTED = YES | NO
response_data = {}
if response.get('WriteProtected', False):
response_data['WRITE_PROTECT'] = 'YES'
else:
response_data['WRITE_PROTECT'] = 'NO'
if response.get('BootOnNextServerReset', False):
response_data['BOOT_OPTION'] = 'BOOT_ONCE'
else:
response_data['BOOT_OPTION'] = 'BOOT_ALWAYS'
if response.get('Inserted', False):
response_data['IMAGE_INSERTED'] = 'YES'
else:
response_data['IMAGE_INSERTED'] = 'NO'
if response.get('ConnectedVia') == 'NotConnected':
response_data['VM_APPLET'] = 'DISCONNECTED'
# When media is not connected, it's NO_BOOT
response_data['BOOT_OPTION'] = 'NO_BOOT'
else:
response_data['VM_APPLET'] = 'CONNECTED'
response_data['IMAGE_URL'] = response['Image']
response_data['DEVICE'] = device
# FLOPPY cannot be a boot device
if ((response_data['BOOT_OPTION'] == 'BOOT_ONCE') and
(response_data['DEVICE'] == 'FLOPPY')):
response_data['BOOT_OPTION'] = 'NO_BOOT'
return response_data | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_vmedia_status(self):\n\n try:\n sushy_system = self._get_sushy_system()\n vmedia_status = sushy_system.vmedia\n except sushy.exceptions.SushyError as e:\n msg = (self._('The vmedia is not found. Error '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n return vmedia_status",
"def status(self) -> pulumi.Output['outputs.VirtualHardDiskStatusResponse']:\n return pulumi.get(self, \"status\")",
"def status(self) -> \"VolumeAttachmentStatus\":\n return typing.cast(\n \"VolumeAttachmentStatus\",\n self._properties.get(\"status\"),\n )",
"def getVirtualStatus(self,node,vmid):\n data = self.connect('get','nodes/%s/qemu/%s/status/current' % (node,vmid),None)\n return data",
"def get_vmedia_device_status(self, device=\"cd0\"):\n\n if device not in VALID_VMEDIA_DEVICES:\n raise exception.InvalidInputError(\n \"Invalid device. Valid devices: cd0 or cd1 or hd0 or hd1.\")\n sushy_system = self._get_sushy_system()\n device = VALID_VMEDIA_DEVICES.get(device)\n\n vmedia_device_uri = self.get_vmedia_device_uri(device)\n\n try:\n resp = sushy_system._conn.get(vmedia_device_uri)\n return resp.text\n except sushy.exceptions.SushyError as e:\n msg = (self._('Error: %(error)s') %\n {'error': str(e)})\n raise exception.SDFlexError(msg)",
"def volume_state(self):\r\n return self.status",
"def files_status(self):\n return self._get('files/status')",
"def status(self):\n status = self._socket_client.media_controller.status\n status.episode_title = episode_title\n return status",
"def status(self):\n if self._vsanobj.id is None:\n raise VsanNotPresent(\"Vsan \" + str(self._vsanobj._id) + \" is not present on the switch.\")\n out = self.__show_zone_status()\n return out.get(get_key(zonekeys.STATUS, self._SW_VER), None)",
"def status(self) -> VacuumStatus:\n return VacuumStatus(self.send(\"get_status\")[0])",
"def recording_status(self):\n return self._get('recording/status')",
"def VMStatus(self):\n try:\n status = self.vmInstance.get_status()\n LOGGER.info('Current status of virtual machine \"{}\": {}'.format(VM_NAME, status))\n\n except Exception as e:\n status = None\n LOGGER.debug(e)\n LOGGER.error(traceback.format_exc())\n LOGGER.error('An error occured while getting status of virtual machine \"{}\"!'.format(VM_NAME))\n\n return status",
"def _get_vm_device_status(self, device='FLOPPY'):\n valid_devices = {'FLOPPY': 'floppy',\n 'CDROM': 'cd'}\n\n # Check if the input is valid\n if device not in valid_devices:\n raise exception.IloInvalidInputError(\n \"Invalid device. Valid devices: FLOPPY or CDROM.\")\n\n manager, uri = self._get_ilo_details()\n try:\n vmedia_uri = manager['links']['VirtualMedia']['href']\n except KeyError:\n msg = ('\"VirtualMedia\" section in Manager/links does not exist')\n raise exception.IloCommandNotSupportedError(msg)\n\n for status, hds, vmed, memberuri in self._get_collection(vmedia_uri):\n status, headers, response = self._rest_get(memberuri)\n if status != 200:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)\n\n if (valid_devices[device] in\n [item.lower() for item in response['MediaTypes']]):\n vm_device_uri = response['links']['self']['href']\n return response, vm_device_uri\n\n # Requested device not found\n msg = ('Virtualmedia device \"' + device + '\" is not'\n ' found on this system.')\n raise exception.IloError(msg)",
"def status(self):\n if not self.volume:\n # no volume active\n status = volume_status.NONE\n elif self._status and self._last_status_check >= time.time() - MIN_TIME_BETWEEN_STATUS_CHECKS:\n status = self._status\n else:\n try:\n self.volume.update()\n # Take only the first word of the status as openstack adds some extra info\n # after a space\n status = volume_status_map.get(self.volume.status.split(' ')[0], None)\n if status == volume_status.IN_USE and self.volume.attachment_state() == 'attached':\n status = volume_status.ATTACHED\n if not status:\n log.error(\"Unknown volume status: {0}. Setting status to volume_status.NONE\"\n .format(self.volume.status))\n status = volume_status.NONE\n self._status = status\n self._last_status_check = time.time()\n except EC2ResponseError as e:\n log.error(\n 'Cannot retrieve status of current volume. {0}'.format(e))\n status = volume_status.NONE\n return status",
"def status(self):\n if self.qemu.is_running():\n status = 0\n self.log.info(\"vm-status\", result=\"online\")\n for device in list(self.qemu.block_info().values()):\n self.log.info(\n \"disk-throttle\",\n device=device[\"device\"],\n iops=device[\"inserted\"][\"iops\"],\n )\n else:\n status = 1\n self.log.info(\"vm-status\", result=\"offline\")\n for volume in self.ceph.volumes:\n locker = volume.lock_status()\n self.log.info(\"rbd-status\", volume=volume.fullname, locker=locker)\n consul = locate_live_service(self.consul, \"qemu-\" + self.name)\n if consul:\n self.log.info(\n \"consul\", service=consul[\"Service\"], address=consul[\"Address\"]\n )\n else:\n self.log.info(\"consul\", service=\"<not registered>\")\n return status",
"async def get_status(self) -> str:\n return await self.hw_device.status()",
"def get_play_status(self):\n return self.get(COMMAND_UIC, 'GetPlayStatus')",
"def _get_mount_status(self, vm=None):\n result = Shell.run(f\"multipass info {vm} --format=json\")\n\n if f'instance \"{vm}\" does not exist' in result:\n dict_result = {\n 'name': vm,\n 'status': \"instance does not exist\"\n }\n else:\n result = json.loads(result)\n dict_result = {\n 'name': vm,\n 'status': result[\"info\"][vm]['state'],\n 'mounts': result[\"info\"][vm]['mounts']\n }\n return dict_result",
"def status(self):\n return self._bp.get_motor_status(self._port)",
"def status(self) -> dict:\n return {\"volume\": self.volume, \"mute\": self.mute}",
"def GetStatus(self):\r\n return self.status",
"def media_track(self):\n media_status = self._media_status()[0]\n return media_status.track if media_status else None",
"def _media_status(self):\n media_status = self.media_status\n media_status_received = self.media_status_received\n\n if (\n media_status is None\n or media_status.player_state == MEDIA_PLAYER_STATE_UNKNOWN\n ):\n groups = self.mz_media_status\n for k, val in groups.items():\n if val and val.player_state != MEDIA_PLAYER_STATE_UNKNOWN:\n media_status = val\n media_status_received = self.mz_media_status_received[k]\n break\n\n return (media_status, media_status_received)",
"def get_volume_status(self, volume_id):\n r = self.get_volume_details(volume_id)\n return r['status'], None",
"def get_camera_status():\n\n\ttarget = send_command('getstatus cam')\n\tsplit_ans = target.split()\n\t\n\treturn split_ans",
"def status(self):\n return self._select_interface(self._rc_status, self._http_status)",
"def getStatus(self):\r\n return self.controller.getStatus()",
"def status(self):\n ret = self.dev.ctrl_transfer(0xc0, 0x01, 0x0081, 0x0000, 0x0001)\n if ret[0] == 0xa0:\n return self.POWER_ON\n return self.POWER_OFF",
"def get_status(self):\n if self.vm.get_cloud_status() != \"ACTIVE\":\n return \"stopped\"\n #wait for the vm to be ready and SSH-able\n self.vm.wait_ready()\n status = self.vm.run_command(\"ctool status\", indent=0, prefix='')\n return status.strip()",
"def getstatus(self):\n return self.__status",
"def get_status(self):\n return self._status",
"def getDetailedStatus (self):\n try:\n if ((self._activateData == None) or (self._activateData.blockDevice == None)):\n self._log(\"no-block-device-no-status\").debug2(\"no block device was found for file system '%s'\",self._logicalDiskName)\n return None,ReturnCodes.kOk\n\n else:\n terminateTimeOut = self._activeTimeoutsConfig.getStatus\n timer = common.Timer(terminateTimeOut)\n blockDevice = self._activateData.blockDevice\n statusDisctionary,rc = self._tune2fs(blockDevice,timer)\n \n if (rc != ReturnCodes.kOk):\n self._log(\"get-status-failed\").error(\"getDetailedStatus() for file system '%s' failed!\",self._logicalDiskName)\n return None,ReturnCodes.kGeneralError\n\n if (statusDisctionary == None):\n self._log(\"have-block-but-no-status\").debug2(\"block device '%s' for file system '%s' - could not fins file-system status\",blockDevice,self._logicalDiskName)\n else:\n self._log(\"status-found\").debug2(\"block device '%s' for file system '%s' - status found!\",blockDevice,self._logicalDiskName)\n\n return statusDisctionary,rc\n\n except Exception,e:\n self._log(\"get-file-system-status-exception\").error(\"getDetailedStatus(terminateTimeOut=%.2f) faild! exception = '%s'\",terminateTimeOut,e)\n return None,ReturnCodes.kGeneralError",
"def get_status(self):\n return self.status",
"def get_status(self):\n return self.status",
"def get_status(self):\n return self.status",
"def status(self):\n\t\treturn self._status",
"def status(self):\n return self._get(path='status')",
"def status(self, name=None):\n volume_info = self.cm.find_name(name)\n if volume_info:\n status = volume_info[0]['State']\n else:\n Console.error(\"volume is not existed\")\n return volume_info",
"def _get_status(self):\n return self.__status",
"def getStatus(self):\n return self.__status",
"def storage_bytes_status(self) -> str:\n return pulumi.get(self, \"storage_bytes_status\")",
"def get_status(self) -> RobovacStatus:\n message = self._build_get_device_status_user_data_message()\n robovac_response = self._send_packet(message, True)\n received_status_bytes = robovac_response.c.usr_data\n received_status_ints = [x for x in received_status_bytes]\n\n return RobovacStatus(\n 1 if received_status_ints[6] & 4 > 0 else 0,\n 1 if received_status_ints[6] & 2 > 0 else 0,\n received_status_ints[1] & 255,\n received_status_ints[8] & 255,\n received_status_ints[11] & 255,\n received_status_ints[10] & 255,\n received_status_ints[12] & 255,\n received_status_ints[13] & 255\n )",
"def disk_encryption_status(self) -> 'outputs.DiskEncryptionStatusResponse':\n return pulumi.get(self, \"disk_encryption_status\")",
"def getStatus(self):\n return self._status",
"def get_status(self):\n return self.o.read_register(self.dev_id, STATUS)",
"def get_status(self):\n return self.read_register(259, 0, 3)",
"def status(self):\n return STATUSES.get(self._mower_status, {}).get('message', self._mower_status)",
"def status(self):\r\n return self._status",
"def status(self):\r\n return self._status",
"def getVKBStatus(self):\r\n inputStatus = self.phone.sx(self.getStatusSxString)\r\n if not inputStatus:\r\n return {}\r\n inputStatus = inputStatus.replace(\"\\n\",\"\\\\n\").replace(\"\\\\\",\"\\\\\\\\\")\r\n\r\n try:\r\n returnValue = eval(inputStatus)\r\n except Exception, e:\r\n debug.err(e)\r\n raise TestException(\"Invalid response when getting keyboard status: %s\"%str(e), self.phone)\r\n else:\r\n return returnValue",
"def wm_raw(self):\n return self.get_par(\"raw_drive\")",
"def status(self):\n if \"status\" in self._prop_dict:\n if isinstance(self._prop_dict[\"status\"], OneDriveObjectBase):\n return self._prop_dict[\"status\"]\n else :\n self._prop_dict[\"status\"] = AutomaticRepliesStatus(self._prop_dict[\"status\"])\n return self._prop_dict[\"status\"]\n\n return None",
"def status(self):\n return self.__status",
"def status(self):\n return self.__status",
"def get_stat(self):\n return os.stat(self.sync_path)",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n return self._status",
"def status(self):\n if hasattr(self, \"_status\"):\n return self._status\n else:\n return None",
"def status(self):\n return self.m.status",
"def drive_mode(self):\n return self._read(MX_DRIVE_MODE)",
"def get_status(self):\n\n return self._system",
"def status(self):\n return self.status",
"def status(self):\n return status_dict[self._get_property_(self.STATUS).upper()]",
"def status(self):\n\n # --- get 0 padded string representation of status register\n response = self.send_lens_cmd(['90', 'B9', '00'], fast_mode=True)\n state_str = bin(int('0x' + response['MISO'][2], 16))\n state_str = state_str[2:]\n for p in range(8 - len(state_str)):\n state_str = '0' + state_str\n\n self._status = dict(AF_switch=bool(int(state_str[0])),\n F_move=bool(int(state_str[5])),\n F_acc=bool(int(state_str[2])),\n FD_endStop=bool(int(state_str[3])),\n status_byte=state_str)\n\n return self._status",
"def status(self):\n\n return self._status",
"def status(self):\n\n return self._status",
"def status(self):\n\n return self._status",
"def smart_status(self) -> SmartSsdSmartStatus:\n return self._smart_status",
"def status(self):\n return self._query_status()['status']",
"def Status(self):\r\n\t\treturn self._get_attribute('status')",
"def status(self) -> int:\n return self._status",
"def getStatus(self, request, context):\n \n statusDrone = str(self.vehicle.system_status).rpartition(':')[2]\n\t \n return droneconnect_pb2.Status(status = statusDrone)",
"def get_status(self):\n # TODO retrieve from db if not set\n return self.status",
"def disk_encryption_status(self) -> pulumi.Output['outputs.DiskEncryptionStatusResponse']:\n return pulumi.get(self, \"disk_encryption_status\")",
"def status(self):\n return self._dbattr('status')",
"def getStatus(self, statClass=None):\n if not self.monitored() \\\n or not self.device() \\\n or not self.device().monitorDevice(): return -1\n if not statClass: statClass = \"/Status/%s\" % self.meta_type\n return EventView.getStatus(self, statClass)",
"def status(self):\n return self._data['status']",
"def status(self):\n return self.get(self._names[\"status\"])"
] | [
"0.75640255",
"0.69452286",
"0.66328543",
"0.6481011",
"0.6425003",
"0.637285",
"0.6320982",
"0.62134093",
"0.6203544",
"0.6191531",
"0.6184872",
"0.6158805",
"0.61556655",
"0.608565",
"0.60727584",
"0.60462505",
"0.6010634",
"0.6006689",
"0.59516776",
"0.5919756",
"0.58666056",
"0.5851946",
"0.5845334",
"0.5840501",
"0.5832929",
"0.58325595",
"0.58308303",
"0.5826182",
"0.5822828",
"0.58182937",
"0.57682467",
"0.5767261",
"0.5754434",
"0.5754434",
"0.5754434",
"0.5751897",
"0.574875",
"0.574224",
"0.57391745",
"0.5727107",
"0.57197744",
"0.57127416",
"0.57023686",
"0.5692164",
"0.5690855",
"0.56862664",
"0.5663963",
"0.5650174",
"0.5650174",
"0.5645052",
"0.56367457",
"0.56042236",
"0.5599326",
"0.5599326",
"0.5599221",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.55939794",
"0.5586369",
"0.5576731",
"0.5567586",
"0.5563502",
"0.5553183",
"0.554026",
"0.55209345",
"0.5519866",
"0.5519866",
"0.5519866",
"0.5501403",
"0.5500019",
"0.54975253",
"0.5491231",
"0.5488588",
"0.54843664",
"0.548188",
"0.54703563",
"0.5465064",
"0.5456535",
"0.5447096"
] | 0.5910968 | 20 |
Sets the Virtual Media drive status It sets the boot option for virtual media device. | def set_vm_status(self, device='FLOPPY',
boot_option='BOOT_ONCE', write_protect='YES'):
# CONNECT is a RIBCL call. There is no such property to set in RIS.
if boot_option == 'CONNECT':
return
boot_option_map = {'BOOT_ONCE': True,
'BOOT_ALWAYS': False,
'NO_BOOT': False
}
if boot_option not in boot_option_map:
msg = ('Virtualmedia boot option "' + boot_option + '" is '
'invalid.')
raise exception.IloInvalidInputError(msg)
response, vm_device_uri = self._get_vm_device_status(device)
# Update required property
vm_settings = {}
vm_settings['Oem'] = (
{'Hp': {'BootOnNextServerReset': boot_option_map[boot_option]}})
# perform the patch operation
status, headers, response = self._rest_patch(
vm_device_uri, None, vm_settings)
if status >= 300:
msg = self._get_extended_error(response)
raise exception.IloError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def enable_vmedia(self, set_vmedia_state):\n\n if not isinstance(set_vmedia_state, bool):\n msg = ('The parameter \"%(parameter)s\" value \"%(value)s\" for '\n 'vmedia is invalid. Valid values are: True/False.' %\n {'parameter': 'ServiceEnabled',\n 'value': set_vmedia_state})\n raise exception.InvalidInputError(msg)\n sushy_system = self._get_sushy_system()\n sdflex_virtual_media.VirtualMedia.enable_vmedia(sushy_system,\n set_vmedia_state)",
"def step7(self):\n for indx, mr in enumerate(self.mrs):\n self.log.info(\"Set boot drive on controller:%d\"\n % (mr.ctrl_id))\n for vd in self.mr_vds[indx]:\n if (int(mr.cli.bootdrive_vd_get()) != vd):\n mr.cli.bootdrive_vd_set(vd_id=self.mr_vds[indx][indx],\n setting=\"On\")\n break",
"def set_vm_status(self, boot_on_next_reset):\n data = {\n \"Oem\": {\n \"Hpe\": {\n \"BootOnNextServerReset\": boot_on_next_reset\n }\n }\n }\n self._conn.patch(self.path, data=data)",
"def set_media_volume_sync(self, dut_name, enable=True):\n try:\n if self.phone_info.phone_type == PhoneType.ANDROID and 'SM' in self._get_android_phone_model():\n is_bt_connected_to_device = self.bt_is_connected_to(dut_name)\n if not is_bt_connected_to_device:\n logger.debug(\n 'For phone found that DUT {} is not connected with {} , '\n 'So Media Volume Sync option is not available '.format(\n dut_name,\n self.phone_info.bluetooth_name))\n return False\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_more_options,\n 5)\n self.find_element(self.driver.appium_driver,\n self.bluetooth_more_options, 0).click()\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.media_volume_text,\n 10)\n self.find_element(self.driver.appium_driver,\n self.media_volume_text, 0).click()\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.media_volume_sync_switch,\n 10)\n\n volume_sync_switch = self.find_element(\n self.driver.appium_driver, self.media_volume_sync_switch,\n 0)\n\n # Now click that button if we're in the wrong state.\n is_media_volume_sync = self._toggle_switch(volume_sync_switch,\n enable)\n self.driver.appium_driver.back()\n logger.debug(\n \"Media Volume option is set to {} on connected bluetooth devices {}\".format(\n enable, dut_name))\n return is_media_volume_sync\n logger.warning(\n \"Media Volume Sync Option is not available on {} connected bluetooth devices\".format(\n self.phone_info.bluetooth_name))\n except Exception as e:\n logger.warning(\n \"Could not enable/disable Media Volume Sync on connected mobile devices {}\"\n .format(self.phone_info.bluetooth_name))\n logger.warning(repr(e))\n return False",
"def set_status(self, status, status_extra, last_command=None, last_device_command=None, delay=None):\n if delay is None:\n delay = 0.100\n\n if last_device_command is not None:\n command = last_device_command.command\n request_id = last_device_command.request_id\n else:\n command = None\n request_id = None\n\n if last_command is not None:\n command = last_command\n\n if status is None:\n self.yombo_device.set_status_delayed(\n delay=delay,\n machine_status_extra=status_extra,\n request_id=request_id,\n reported_by=\"Wemo node\"\n )\n else:\n self.yombo_device.set_status_delayed(\n delay=delay,\n command=command,\n request_id=request_id,\n machine_status=status,\n machine_status_extra=status_extra,\n reported_by=\"Wemo node\"\n )",
"def set_virtual_stage(self, virtual_stage: int) -> None:\n self.virtual_stage = virtual_stage",
"def set_drive_mode(mode):",
"def SetStatus(self, status):\r\n self.status = status",
"def set_status(self, status):\n if status == 'qw':\n status = 'Waiting'\n elif status == 'hqw':\n status = 'Held'\n elif status == 'Eqw':\n status = 'Error'\n else:\n sys.exit(20)\n self.status = status\n return",
"def update(self):\n try:\n if self._remote.power() == 1:\n self._state = STATE_ON\n else:\n self._state = STATE_OFF\n\n # Set TV to be able to remotely power on\n # self._remote.power_on_command_settings(2)\n if self._remote.mute() == 2:\n self._muted = False\n else:\n self._muted = True\n self._volume = self._remote.volume() / 60\n except OSError:\n self._state = STATE_OFF",
"def set_status(self, status):\n self.status = status",
"def set_status(self, status):\n self.status = status",
"def set_status(self, status):\n self.status = status",
"def setstatus(self, status):\n with self.lock:\n self.status = status",
"def _setBootable(self, bootable):\n if self.partedPartition:\n if arch.isS390():\n return\n if self.flagAvailable(parted.PARTITION_BOOT):\n if bootable:\n self.setFlag(parted.PARTITION_BOOT)\n else:\n self.unsetFlag(parted.PARTITION_BOOT)\n else:\n raise errors.DeviceError(\"boot flag not available for this partition\", self.name)\n\n self._bootable = bootable\n else:\n self.req_bootable = bootable",
"def set_status(self, root, status='queued'):\n # Touch the status file\n Path(f'{root}.{status}').touch()",
"def manual_driving(self):\n\n self.start_driving()",
"def set_status(self, status):\n # TODO log to db\n self.status = status",
"def set_states(self) -> None:\n self._attr_state = (\n MediaPlayerState.ON if self._zone.power else MediaPlayerState.OFF\n )\n self._attr_is_volume_muted = self._zone.mute\n self._attr_volume_level = self._zone.volume_as_percentage\n self._attr_media_title = self._zone.input_name\n self._attr_app_name = self._zone.input_format\n self._attr_source = self._zone.input_name\n self._attr_source_list = self.avr.input_list",
"def set_remote_status(self, mode):\n status = {\n 0: \"Local and locked\",\n 1: \"Remote and locked\",\n 2: \"Local and unlocked\",\n 3: \"Remote and unlocked\",\n }\n logging.info(__name__ + ' : Setting remote control status to %s' % status.get(mode, \"Unknown\"))\n self._execute('C%s' % mode)",
"def set_vpn_state(self, status):\n if hasattr(self, status):\n self.change_to(getattr(self, status))",
"def setStatus(self, status):\n self.__status = status",
"def set_autoreboot_status(self, status: int) -> str:\n return self._req_post(self._URLS['SetAutoreboot'], data={\"autoRebootEn\": status, \"delayRebootEn\": True, \"rebootTime\": \"02: 00\"})",
"def setOn(self, command):\r\n self.setDriver('ST', 1)",
"def status(self, cmd):\n\n self.actor.sendVersionKey(cmd)\n self.actor.camera.sendStatusKeys(cmd)\n \n cmd.inform('text=\"Present!\"')\n cmd.finish()",
"def change_status():\n if self.on:\n connect.SOCKET.sendall(bytes(\"OFF\\n\", \"utf-8\"))\n self.on = False\n else:\n connect.SOCKET.sendall(bytes(\"ON\\n\", \"utf-8\"))\n self.on = True",
"def set_directory_status(self, i, status):\n\t\tself.directoryModel.set_value(i, 'directoryStatus', status)",
"def initStatus(status):\n if status == 0 :\n print(\"Supported controller connected\")\n elif status < 0 :\n print(\"No supported controller detected\")\n else:\n print(\"Waiting for controller {}\".format(status) )",
"def drive_mode(self, value):\n self._write(MX_DRIVE_MODE, value)",
"def test_list_drives_drive_firmware_update(self):\n pass",
"def _set_status(self, status):\n with self.status_lock:\n if (status in _ENDING_STATUSES) or (not self.status in _ENDING_STATUSES):\n self.status = status",
"async def set_status(self, ctx, *, status: str = \"online\"):\n\n try:\n status = discord.Status[status.lower()]\n except KeyError:\n await ctx.error(\"Invalid Status\", \"Only `online`, `idle` or `dnd` statuses are available.\")\n else:\n await self.bot.change_presence(status=status, activity=ctx.me.activity)\n await ctx.success(f\"Status changed to {status}.\")",
"def test_create_drives_drive_suspend_item(self):\n pass",
"def status(self, status):\n self._set_property_(self.STATUS, str(status))",
"def set_volume(self, target: int) -> None:\n self.media.set_volume(target)\n self.system.notify(f\"Jarvis::Volume has been set to: {self.media.get_volume()['volume']}%\")",
"def get_vmedia_status(self):\n\n try:\n sushy_system = self._get_sushy_system()\n vmedia_status = sushy_system.vmedia\n except sushy.exceptions.SushyError as e:\n msg = (self._('The vmedia is not found. Error '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n return vmedia_status",
"def set_status(self, status):\n if not status == self._status:\n self._status = status\n self.winstance.send_event('State changed to ' + self._status)\n\n self.completed = not self.parent_node.is_job or \\\n self._status == 'COMPLETED'\n\n if self.completed:\n self.publish()\n\n if not self.parent_node.is_job:\n self.failed = False\n else:\n self.failed = self.parent_node.is_job and \\\n (self._status == 'BOOT_FAIL' or\n self._status == 'CANCELLED' or\n self._status == 'FAILED' or\n self._status == 'REVOKED' or\n self._status == 'TIMEOUT')",
"def VMStartWait(self):\n try:\n status = self.VMStatus()\n\n if status == 'POWERED OFF':\n LOGGER.debug('Trying to start VM...')\n self.vmInstance.power_on()\n\n status = self.VMStatus()\n\n LOGGER.debug('Waiting until OS started...')\n self.vmInstance.wait_for_tools(timeout=OP_TIMEOUT)\n\n else:\n LOGGER.warning('Virtual machine \"{}\" powered on already!'.format(VM_NAME))\n\n except Exception as e:\n status = None\n LOGGER.debug(e)\n LOGGER.error(traceback.format_exc())\n LOGGER.error('An error occured while starting virtual machine \"{}\" and waiting for guest OS start!'.format(VM_NAME))\n\n return status",
"def test_change_volume_status(self, volume, volumes_steps):\n volumes_steps.change_volume_status(volume.name, 'Error')\n volumes_steps.change_volume_status(volume.name, 'Available')",
"def status(self, status):\n self._status = status",
"def status(self, status):\n self._status = status",
"def status(self, status):\n self._status = status",
"def status(self, status):\n self._status = status",
"def status(self, status):\n self._status = status",
"def status(self, status):\n self._status = status",
"def status(self, status):\n self._status = status",
"def step4(self):\n for mr in self.mrs:\n self.log.info(\"Boot drive of controller: %d is %d\"\n % (mr.ctrl_id, mr.cli.bootdrive_vd_get()))",
"def update_volume_after_attached_to_vm(self, info, vms):\n path = info[0]['path']\n path_list = path.split(sep='/')\n machine_path_list = [\"~\", \"Home\"]\n machine_path_list.extend(path_list[3:])\n info[0]['machine_path'] = \"/\".join(machine_path_list)\n info[0]['AttachedToVm'] = vms\n info[0]['State'] = 'in-use'\n info[0]['time'] = datetime.datetime.now()\n return info",
"async def put(self, virtual_id, request) -> web.Response:\n virtual = self._ledfx.virtuals.get(virtual_id)\n if virtual is None:\n response = {\n \"status\": \"failed\",\n \"reason\": f\"Virtual with ID {virtual_id} not found\",\n }\n return web.json_response(data=response, status=404)\n\n try:\n data = await request.json()\n except JSONDecodeError:\n response = {\n \"status\": \"failed\",\n \"reason\": \"JSON Decoding failed\",\n }\n return web.json_response(data=response, status=400)\n active = data.get(\"active\")\n if active is None:\n response = {\n \"status\": \"failed\",\n \"reason\": 'Required attribute \"active\" was not provided',\n }\n return web.json_response(data=response, status=400)\n\n # Update the virtual's configuration\n try:\n virtual.active = active\n except ValueError as msg:\n response = {\n \"status\": \"failed\",\n \"payload\": {\"type\": \"warning\", \"reason\": str(msg)},\n }\n return web.json_response(data=response, status=202)\n\n # Update ledfx's config\n for idx, item in enumerate(self._ledfx.config[\"virtuals\"]):\n if item[\"id\"] == virtual.id:\n item[\"active\"] = virtual.active\n self._ledfx.config[\"virtuals\"][idx] = item\n break\n\n save_config(\n config=self._ledfx.config,\n config_dir=self._ledfx.config_dir,\n )\n\n response = {\"status\": \"success\", \"active\": virtual.active}\n return web.json_response(data=response, status=200)",
"def sync_status_to_vc(status, context):\n conn = self._vc_connection\n conn.vip.health.set_status(status, context)",
"def system_status(self, system_status):\n\n self._system_status = system_status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def status(self, status):\n\n self._status = status",
"def setSyncMode(self, IsPauseOn = True):\n self._IsPauseOn = IsPauseOn",
"def power_on(self, sync=True, wait_for_guest_ready=True):\n self.vmomi_object.PowerOn()\n if sync: self._wait_for_power_on(wait_for_guest_ready)",
"def virtual_flag(self, value):\n if not isinstance(value, bool):\n raise TypeError(\"virtual_flag must be bool.\")\n self._virtual_flag = value",
"def boot_VM(self, server, vmfile):\n vmaddr = \"\"\n fail = False\n \n #print vmfile\n #-----read template into string -------------------------\n #s=open('./share/examples/ubuntu_context.one','r').read()\n \n s = open(os.path.expanduser(vmfile), 'r').read()\n #self.logger.debug(\"Vm template:\\n\"+s)\n \n #-----Start VM-------------------------------------------\n vm = server.one.vm.allocate(self.oneauth, s)\n \n #print self.oneauth\n #print vm\n \n if vm[0]:\n self.logger.debug(\"VM ID: \" + str(vm[1]))\n \n #monitor VM\n booted = False\n maxretry = self.wait_max / 5 #time that the VM has to change from penn to runn \n retry = 0\n while not booted and retry < maxretry: #eventually the VM has to boot or fail\n try:\n #-------Get Info about VM -------------------------------\n vminfo = server.one.vm.info(self.oneauth, vm[1])\n #print vminfo[1]\n manifest = parseString(vminfo[1])\n \n #VM_status (init=0, pend=1, act=3, fail=7)\n vm_status = manifest.getElementsByTagName('STATE')[0].firstChild.nodeValue.strip()\n \n if vm_status == \"3\": #running\n #LCM_status (prol=1,boot=2,runn=3, fail=14, unk=16)\n lcm_status = manifest.getElementsByTagName('LCM_STATE')[0].firstChild.nodeValue.strip()\n \n if lcm_status == \"3\": #if vm_status is 3, this will be 3 too.\n booted = True\n elif vm_status == \"7\": #fail\n self.logger.error(\"Fail to deploy VM \" + str(vm[1]))\n booted = True\n fail = True\n vmaddr = \"fail\"\n elif vm_status == \"6\": #done\n self.logger.error(\"The status of the VM \" + str(vm[1]) + \" is DONE\")\n booted = True\n fail = True\n vmaddr = \"fail\"\n else:\n retry += 1\n time.sleep(5)\n except:\n pass\n if retry >= maxretry:\n self.logger.error(\"The VM \" + str(vm[1]) + \" did not change to runn status. Please verify that the status of the OpenNebula hosts \"\n \"or increase the wait time in the configuration file (max_wait) \\n\")\n vmaddr = \"fail\"\n fail = True\n if not fail:\n #get IP\n nics = manifest.getElementsByTagName('NIC')\n \n for i in range(len(nics)):\n if(nics[i].childNodes[0].firstChild.nodeValue.strip() == self.bridge):\n vmaddr = nics[i].childNodes[1].firstChild.nodeValue.strip()\n if vmaddr.strip() != \"\":\n self.logger.debug(\"IP of the VM \" + str(vm[1]) + \" is \" + str(vmaddr))\n \n access = False\n maxretry = 240 #this says that we wait 20 minutes maximum to allow the VM get online. \n #this also prevent to get here forever if the ssh key was not injected propertly.\n retry = 0\n self.logger.debug(\"Waiting to have access to VM\")\n while not access and retry < maxretry:\n cmd = \"ssh -q -oBatchMode=yes root@\" + vmaddr + \" uname\"\n p = Popen(cmd, shell=True, stdout=PIPE)\n status = os.waitpid(p.pid, 0)[1]\n #print status\n if status == 0:\n access = True\n self.logger.debug(\"The VM \" + str(vm[1]) + \" with ip \" + str(vmaddr) + \"is accessible\")\n else:\n retry += 1\n time.sleep(5)\n if retry >= maxretry:\n self.logger.error(\"Could not get access to the VM \" + str(vm[1]) + \" with ip \" + str(vmaddr) + \"\\n\" \n \"Please verify the OpenNebula templates to make sure that the public ssh key to be injected is accessible to the oneadmin user. \\n\"\n \"Also verify that the VM has ssh server and is active on boot.\")\n vmaddr = \"fail\"\n else:\n self.logger.error(\"Could not determine the IP of the VM \" + str(vm[1]) + \" for the bridge \" + self.bridge)\n vmaddr = \"fail\"\n else:\n vmaddr = \"fail\"\n \n return [vmaddr, vm[1]]",
"def status(self, status: str):\n\n self._status = status",
"def status(self, status: str):\n\n self._status = status",
"def _set_status(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"status\", rest_name=\"status\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"status must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"status\", rest_name=\"status\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__status = t\n if hasattr(self, '_set'):\n self._set()",
"def test_set_state_partial(self):\n host = synthetic_host(\"myserver\")\n self.create_simple_filesystem(host)\n mgt_uri = \"/api/target/%s/\" % self.mgt.id\n with mock.patch(\"chroma_core.models.Command.set_state\", mock.Mock(return_value=None)):\n self.api_set_state_partial(mgt_uri, \"unmounted\")\n Command.set_state.assert_called_once()",
"def status(self):\n ret = self.dev.ctrl_transfer(0xc0, 0x01, 0x0081, 0x0000, 0x0001)\n if ret[0] == 0xa0:\n return self.POWER_ON\n return self.POWER_OFF",
"def test_set_scan_status(self):\n pass",
"def set_pause(self, pause):\n\n game_status = self.game.get_game_status();\n if(game_status == GameStatus.NotStarted or game_status == GameStatus.Finished):\n return;\n\n if(pause == True):\n self.game.set_game_status(GameStatus.Paused);\n self.bttn_pause.set_text(\"Reprendre la partie\");\n\n self.game.stop_timer();\n\n elif(pause == False):\n self.game.set_game_status(GameStatus.InProgress);\n self.bttn_pause.set_text(\"Mettre en pause\");\n\n self.game.start_timer();",
"def vzone_update_status(self, send):\n for zone in self._vzones:\n childs_lockedby = []\n # used to set status of a _vzone\n for child in self._vzones[zone]['childs']:\n childs_lockedby.append(self._pzones[child]['lockedby'])\n # if the len of the set of childs_lockedby is stricly superior to 1, the v_zone must be locked\n # because another vzone is already up\n if len(set(childs_lockedby)) > 2:\n self._vzones[zone]['Status'] = \"locked\"\n else:\n # if one or minus than one child zone is locked, the status can be on or off\n # to know it, we take the first p_zone child as model\n first_pzone = self._vzones[zone]['childs'][0]\n if self._pzones[first_pzone]['lockedby'] == self._vzones[zone]['name']:\n self._vzones[zone]['Status'] = \"on\"\n # if the p_zone model isn't locked and the len of the set of childs_lockedby is equal to 1\n elif self._pzones[first_pzone]['lockedby'] == '' and len(set(childs_lockedby)) == 1:\n self._vzones[zone]['Status'] = \"off\"\n # if the child is locked by another v_zone, the actual _vzone must be locked\n else:\n self._vzones[zone]['Status'] = \"locked\"\n val = (\"Status\", self._vzones[zone]['Status'])\n\t send(zone,val)\n\t # To replace all command widget to its original values\n\t for cle in PZONE_TO_VZONE:\n send(zone, (cle, self._vzones[zone][cle]))",
"def mute(self, status=None):\n if status is None:\n status = not self.status.volume_muted\n\n self._socket_client.receiver_controller.set_volume_muted(status)",
"def on_set_volume(self, event):\n self.currentVolume = self.volumeCtrl.GetValue()\n self.mplayer.SetProperty(\"volume\", self.currentVolume)",
"def set_status_running(self) -> None:\n if self._is_aborted():\n return\n assert self._status == self.Status.WAITING_FOR_TEST_START\n self._status = self.Status.RUNNING\n self.notify_update()",
"def status(self, value: typing.Union[\"VolumeAttachmentStatus\", dict]):\n if isinstance(value, dict):\n value = typing.cast(\n VolumeAttachmentStatus,\n VolumeAttachmentStatus().from_dict(value),\n )\n self._properties[\"status\"] = value",
"def set_visible(self, status):\n if isinstance(status, bool):\n if status:\n self._visible = True\n else:\n self._visible = False\n else:\n raise ValueError(\"Input must a bool.\")",
"def set_virtual_disk_storage_profile(vm, hardware_device, profile):\n\n spec = vim.vm.ConfigSpec()\n device_specs = []\n profile_specs = []\n profile_spec = vim.vm.DefinedProfileSpec()\n profile_spec.profileId = profile.profileId.uniqueId\n profile_specs.append(profile_spec)\n\n device_spec = vim.vm.device.VirtualDeviceSpec()\n device_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit\n device_spec.device = hardware_device\n device_spec.profile = profile_specs\n device_specs.append(device_spec)\n spec.deviceChange = device_specs\n vm.ReconfigVM_Task(spec)",
"def test_set_state_full(self):\n host = synthetic_host(\"myserver\")\n self.create_simple_filesystem(host)\n mgt_uri = \"/api/target/%s/\" % self.mgt.id\n with mock.patch(\"chroma_core.models.Command.set_state\", mock.Mock(return_value=None)):\n self.api_set_state_full(mgt_uri, \"unmounted\")\n Command.set_state.assert_called_once()",
"def drive(self, **kw):\n _append_conds(self._default_cond, types.Drive, kw)\n return self",
"def set_wps_status(self, status: int) -> str:\n return self._req_post(self._URLS['SetWPS'], data={'wpsEn': status})",
"def status(self, value: ControllerStatus):\n self._status = value\n self.__status_event.set()",
"def _turn_on_dev_mode(self):\n if self._device is not None:\n self._char_write(self._BLE_SERVICE_ANTI_DOS,\n [ord(c) for c in self._ANTI_DOS_MESSAGE])\n self._char_write(self._BLE_SERVICE_TX_POWER,\n [self._TX_POWER_VALUE])\n # Sending 0x01 to the wake service wakes the sphero.\n self._char_write(self._BLE_SERVICE_WAKE, [0x01])",
"def configurar_volume(self):\n\n print(\"Volume configurado\")",
"def start_driving(self):\n\n self.stop_driving()\n self.drive_thread = DriveThread()\n self.drive_thread.start()",
"def set_status(self, status):\n if status == \"offline\":\n self._status.set_message(\"N\")\n self._status.set_foreground_color(\"red\")\n \n elif status == \"online\":\n self._status.set_message(\"Y\")\n self._status.set_foreground_color(\"Green\")\n \n elif status == \"away\":\n self._status.set_message(\"A\")\n self._status.set_foreground_color(\"Grey\")\n \n elif status == \"busy\":\n self._status.set_message(\"B\")\n self._status.set_foreground_color(\"Yellow\")",
"def _set_status(self):\n result = self._get_status()\n if result and result[0]['state'] == 'aborted':\n raise Exception(\"Aborted because the status flag is set to 'aborted' in dynamodb\")\n\n # record the status\n self.status['timestamp'] = time.strftime(\"%Y-%m-%dT%H:%M:%SZ\")\n self.db_handler.update_item({'api_version': TsV2CatalogHandler.api_version}, self.status)"
] | [
"0.6145363",
"0.60608554",
"0.604248",
"0.58181196",
"0.5729332",
"0.56799835",
"0.56557316",
"0.5576355",
"0.5562009",
"0.5548635",
"0.5530569",
"0.5530569",
"0.5530569",
"0.54632413",
"0.54603094",
"0.54399",
"0.5374403",
"0.53504235",
"0.52688646",
"0.5256338",
"0.5245759",
"0.52405393",
"0.5226826",
"0.52200913",
"0.51879424",
"0.51837105",
"0.5166063",
"0.516539",
"0.5162931",
"0.51392525",
"0.50920504",
"0.5091559",
"0.50897366",
"0.5086923",
"0.5071917",
"0.50658035",
"0.5056809",
"0.5056498",
"0.50460654",
"0.50424254",
"0.50424254",
"0.50424254",
"0.50424254",
"0.50424254",
"0.50424254",
"0.50424254",
"0.50369626",
"0.5014706",
"0.4994648",
"0.49907276",
"0.49875337",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.498221",
"0.4977144",
"0.49545985",
"0.49484074",
"0.4944736",
"0.493588",
"0.493588",
"0.49289954",
"0.49242148",
"0.49227625",
"0.49109432",
"0.49085858",
"0.4899807",
"0.48990625",
"0.48965397",
"0.48958567",
"0.48928824",
"0.48909658",
"0.4885931",
"0.48815984",
"0.48743758",
"0.48697037",
"0.4867217",
"0.48639873",
"0.48569158",
"0.4853466",
"0.48409268",
"0.48348"
] | 0.6670368 | 0 |
Notifies iLO of the location of a virtual media diskette image. | def insert_virtual_media(self, url, device='FLOPPY'):
response, vm_device_uri = self._get_vm_device_status(device)
# Eject media if there is one. RIBCL was tolerant enough to overwrite
# existing media, RIS is not. This check is to take care of that
# assumption.
if response.get('Inserted', False):
self.eject_virtual_media(device)
# Update required property
vm_settings = {}
vm_settings['Image'] = url
# Perform the patch operation
status, headers, response = self._rest_patch(
vm_device_uri, None, vm_settings)
if status >= 300:
msg = self._get_extended_error(response)
raise exception.IloError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update_volume_after_attached_to_vm(self, info, vms):\n path = info[0]['path']\n path_list = path.split(sep='/')\n machine_path_list = [\"~\", \"Home\"]\n machine_path_list.extend(path_list[3:])\n info[0]['machine_path'] = \"/\".join(machine_path_list)\n info[0]['AttachedToVm'] = vms\n info[0]['State'] = 'in-use'\n info[0]['time'] = datetime.datetime.now()\n return info",
"def test_view_volume(self, volume, volumes_steps):\n volumes_steps.view_volume(volume.name)",
"def test_disk(self):\n self.command.package = self.input_ovf\n self.command.file_id = \"file1\"\n self.command.run()\n self.command.finished()\n self.check_diff(\"\"\"\n <ovf:References>\n- <ovf:File ovf:href=\"input.vmdk\" ovf:id=\"file1\" ovf:size=\"{vmdk_size}\" />\n <ovf:File ovf:href=\"input.iso\" ovf:id=\"file2\" ovf:size=\"{iso_size}\" />\n...\n <ovf:Info>Virtual disk information</ovf:Info>\n- <ovf:Disk ovf:capacity=\"1\" ovf:capacityAllocationUnits=\"byte * 2^30\" \\\novf:diskId=\"vmdisk1\" ovf:fileRef=\"file1\" ovf:format=\"http://www.vmware.com/\\\ninterfaces/specifications/vmdk.html#streamOptimized\" />\n </ovf:DiskSection>\n...\n <rasd:AddressOnParent>0</rasd:AddressOnParent>\n- <rasd:ElementName>Hard Drive</rasd:ElementName>\n- <rasd:HostResource>ovf:/disk/vmdisk1</rasd:HostResource>\n- <rasd:InstanceID>6</rasd:InstanceID>\n- <rasd:Parent>3</rasd:Parent>\n- <rasd:ResourceType>17</rasd:ResourceType>\n- </ovf:Item>\n- <ovf:Item>\n- <rasd:AddressOnParent>0</rasd:AddressOnParent>\n <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>\n\"\"\".format(vmdk_size=self.FILE_SIZE['input.vmdk'],\n iso_size=self.FILE_SIZE['input.iso']))\n self.assertFalse(os.path.exists(os.path.join(self.temp_dir,\n \"input.vmdk\")),\n \"deleted file should not be exported\")",
"def command_photo(self, bot, update):\n\n self.send_message(bot, update, \"Not implemented yet.\")",
"def update_volume_after_detach(self, info, vms):\n info[0]['AttachedToVm'] = vms\n if len(vms) == 0:\n info[0]['machine_path'] = None\n info[0]['State'] = 'available'\n info[0]['time'] = datetime.datetime.now()\n return info",
"def eject_virtual_media(self, device='FLOPPY'):\n response, vm_device_uri = self._get_vm_device_status(device)\n\n # Check if virtual media is connected.\n if response.get('Inserted') is False:\n return\n\n # Update required property\n vm_settings = {}\n vm_settings['Image'] = None\n\n # perform the patch operation\n status, headers, response = self._rest_patch(\n vm_device_uri, None, vm_settings)\n\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def test_aws_service_api_volume_attachment_put(self):\n pass",
"def pv(self, *args, **kwargs):\n return _image.image_pv(self, *args, **kwargs)",
"def attach_volume(self, connection_info, instance, mountpoint):\n instance_name = instance['name']\n vm_ref = vm_util.get_vm_ref_from_name(self._session, instance_name)\n if vm_ref is None:\n raise exception.InstanceNotFound(instance_id=instance_name)\n # Attach Volume to VM\n LOG.debug(_(\"Attach_volume: %(connection_info)s, %(instance_name)s, \"\n \"%(mountpoint)s\") % locals())\n driver_type = connection_info['driver_volume_type']\n if driver_type not in ['iscsi']:\n raise exception.VolumeDriverNotFound(driver_type=driver_type)\n data = connection_info['data']\n mount_unit = volume_util.mountpoint_to_number(mountpoint)\n\n # Discover iSCSI Target\n device_name, uuid = self.discover_st(data)\n if device_name is None:\n raise volume_util.StorageError(_(\"Unable to find iSCSI Target\"))\n\n # Get the vmdk file name that the VM is pointing to\n hardware_devices = self._session._call_method(vim_util,\n \"get_dynamic_property\", vm_ref,\n \"VirtualMachine\", \"config.hardware.device\")\n vmdk_file_path, controller_key, adapter_type, disk_type, unit_number \\\n = vm_util.get_vmdk_path_and_adapter_type(hardware_devices)\n # Figure out the correct unit number\n if unit_number < mount_unit:\n unit_number = mount_unit\n else:\n unit_number = unit_number + 1\n self.attach_disk_to_vm(vm_ref, instance_name,\n adapter_type, disk_type=\"rdmp\",\n controller_key=controller_key,\n unit_number=unit_number,\n device_name=device_name)\n LOG.info(_(\"Mountpoint %(mountpoint)s attached to \"\n \"instance %(instance_name)s\") % locals())",
"def on_image(self, image):",
"def newMoteDetected(self, mote):\n if self._printSWAP == True:\n print \"New mote with address \" + str(mote.address) + \" : \" + mote.definition.product + \\\n \" (by \" + mote.definition.manufacturer + \")\"",
"def volumes(self):",
"def segment(self):\n warning = QErrorMessage()\n warning.setWindowModality(Qt.WindowModal)\n warning.showMessage('Warning: IVUS Phenotyping is currently only supported for 20MHz images. Interpret other images with extreme caution')\n warning.exec_()",
"def energy_use(update: 'Update', context: 'CallbackContext'):\n bot = context.bot\n chat_id = update.message.chat_id\n url = \"https://vloer.ko-lab.space/verbruikdag.png?random=\" + str(randint(1,9999))\n\n try:\n bot.send_photo(chat_id=chat_id, photo=url)\n except Exception as err:\n msg = \"Oops...something went wrong: {}\".format(err)\n print(msg)\n update.message.reply_text(msg)",
"def mount_root_vm(self):\n print \"montage de la partition root de %s\" % name_vm_dest\n self.exec_cmd(\"mount /dev/%s/root-%s %s\" % (vgname, name_vm_dest, self.rep_vhosts_vm))",
"def set_volume(self, target: int) -> None:\n self.media.set_volume(target)\n self.system.notify(f\"Jarvis::Volume has been set to: {self.media.get_volume()['volume']}%\")",
"def image(self,v):\n self.set('heightfield.image',v)\n #assert fileExists(environment.makeFilePath(v)), \"Warning: HeightField's image file, {}, not found in images folder.\".format(v) \n return self",
"def __mount_ebs_volume( self ):\n ebs_volume_size = self.instance_tag( 'ebs_volume_size' ) or '0'\n ebs_volume_size = int( ebs_volume_size )\n if ebs_volume_size:\n instance_name = self.instance_tag( 'Name' )\n cluster_ordinal = int( self.instance_tag( 'cluster_ordinal' ) )\n volume_name = '%s__%d' % (instance_name, cluster_ordinal)\n volume = EC2VolumeHelper( ec2=self.ec2,\n availability_zone=self.availability_zone,\n name=volume_name,\n size=ebs_volume_size,\n volume_type=\"gp2\" )\n # TODO: handle case where volume is already attached\n device_ext = '/dev/sdf'\n device = '/dev/xvdf'\n volume.attach( self.instance_id, device_ext )\n\n # Wait for inode to appear and make sure its a block device\n while True:\n try:\n assert stat.S_ISBLK( os.stat( device ).st_mode )\n break\n except OSError as e:\n if e.errno == errno.ENOENT:\n time.sleep( 1 )\n else:\n raise\n\n # Only format empty volumes\n volume_label = volume_label_hash( volume_name )\n if check_output( [ 'file', '-sL', device ] ).strip( ) == device + ': data':\n check_call( [ 'mkfs', '-t', 'ext4', device ] )\n check_call( [ 'e2label', device, volume_label ] )\n else:\n # If the volume is not empty, verify the file system label\n actual_label = check_output( [ 'e2label', device ] ).strip( )\n if actual_label != volume_label:\n raise AssertionError(\n \"Expected volume label '%s' (derived from '%s') but got '%s'\" %\n (volume_label, volume_name, actual_label) )\n current_mount_point = self.__mount_point( device )\n if current_mount_point is None:\n mkdir_p( self.persistent_dir )\n check_call( [ 'mount', device, self.persistent_dir ] )\n elif current_mount_point == self.persistent_dir:\n pass\n else:\n raise RuntimeError(\n \"Can't mount device %s on '%s' since it is already mounted on '%s'\" % (\n device, self.persistent_dir, current_mount_point) )\n else:\n # No persistent volume is attached and the root volume is off limits, so we will need\n # to place persistent data on the ephemeral volume.\n self.persistent_dir = self.ephemeral_dir",
"def test_upload_new_vdisk(self, mock_create_file):\n\n # traits are already set to use the REST API upload\n\n # First need to load in the various test responses.\n vg_orig = tju.load_file(UPLOAD_VOL_GRP_ORIG, self.adpt)\n vg_post_crt = tju.load_file(UPLOAD_VOL_GRP_NEW_VDISK, self.adpt)\n\n self.adpt.read.return_value = vg_orig\n self.adpt.update_by_path.return_value = vg_post_crt\n mock_create_file.return_value = self._fake_meta()\n\n n_vdisk, f_wrap = ts.upload_new_vdisk(\n self.adpt, self.v_uuid, self.vg_uuid, None, 'test2', 50,\n d_size=25, sha_chksum='abc123')\n\n # Ensure the create file was called\n mock_create_file.assert_called_once_with(\n self.adpt, 'test2', vf.FileType.DISK_IMAGE, self.v_uuid,\n f_size=50, tdev_udid='0300f8d6de00004b000000014a54555cd9.3',\n sha_chksum='abc123')\n\n # Ensure cleanup was called after the upload\n self.adpt.delete.assert_called_once_with(\n 'File', service='web',\n root_id='6233b070-31cc-4b57-99bd-37f80e845de9')\n self.assertIsNone(f_wrap)\n self.assertIsNotNone(n_vdisk)\n self.assertIsInstance(n_vdisk, stor.VDisk)",
"def update_volumes():\n print 'do something useful here'",
"def add_volume_info(self, vi):\n vol_num = vi.volume_number\n self.volume_info_dict[vol_num] = vi\n if self.fh:\n self.fh.write(vi.to_string() + \"\\n\")",
"def image_received(self, image_message):\n # Convert the image message to something usable by opencv\n # http://wiki.ros.org/cv_bridge/Tutorials/ConvertingBetweenROSImagesAndOpenCVImagesPython\n # Note that mono8 and bgr8 are the two image encodings expected by most OpenCV functions.\n cv_image = self.bridge.imgmsg_to_cv2(image_message, desired_encoding=\"bgr8\")\n image_data = extract_data(cv_image)\n linear_velocity, angular_velocity = self.clf.predict(image_data)\n self.cmd_vel = Twist(linear=Vector3(x=linear_velocity), angular=Vector3(z=angular_velocity))\n rospy.loginfo(self.cmd_vel)",
"def update_info(self):\n # Return if it is locked\n if self.lock:\n return\n # Hide again if it was shown due to an error message\n if self.was_hidden:\n self.was_hidden = False\n self.toggle()\n # Left side\n try:\n # Directory if library is focused\n if self.vimiv.library.treeview.is_focus():\n self.left_label.set_text(os.getcwd())\n # Position, name and thumbnail size in thumb mode\n elif self.vimiv.thumbnail.toggled:\n pos = self.vimiv.get_pos()\n name = os.path.basename(self.vimiv.paths[pos])\n message = \"{0}/{1} {2} {3}\". \\\n format(pos + 1, len(self.vimiv.paths),\n name, self.vimiv.thumbnail.size)\n self.left_label.set_text(message)\n # Image info in image mode\n else:\n name = os.path.basename(self.vimiv.paths[self.vimiv.index])\n message = \"{0}/{1} {2} [{3:.0f}%]\". \\\n format(self.vimiv.index + 1, len(self.vimiv.paths), name,\n self.vimiv.image.zoom_percent * 100)\n self.left_label.set_text(message)\n except:\n self.left_label.set_text(\"No open images\")\n # Center\n if not (self.vimiv.thumbnail.toggled or\n self.vimiv.library.treeview.is_focus()) and self.vimiv.paths:\n mark = \"[*]\" if self.vimiv.paths[self.vimiv.index] \\\n in self.vimiv.mark.marked else \"\"\n else:\n mark = \"\"\n if self.vimiv.slideshow.running:\n slideshow = \"[slideshow - {0:.1f}s]\".format(\n self.vimiv.slideshow.delay)\n else:\n slideshow = \"\"\n message = \"{0} {1}\".format(mark, slideshow)\n self.center_label.set_text(message)\n # Right side\n mode = self.get_mode()\n message = \"{0:15} {1:4}\".format(mode, self.vimiv.keyhandler.num_str)\n self.right_label.set_markup(message)\n # Window title\n try:\n name = os.path.basename(self.vimiv.paths[self.vimiv.index])\n self.vimiv.set_title(\"vimiv - \" + name)\n except:\n self.vimiv.set_title(\"vimiv\")\n # Size of statusbar for resizing image\n self.size = self.vimiv.statusbar.bar.get_allocated_height()",
"def test_items_are_mounted(self):\n response2 = self.client.get(\"/importer/design26/models.py\")\n self.assertEquals(response2.status_code, 200)",
"def on_station_admin_volume_host_path_added(\n self, func,\n ):\n self._set_event_handler(\"stations\")\n self._events.on_station_admin_volume_host_path_added(func)",
"def test_manage_volume_attachments(self, volume, instance, volumes_steps):\n volumes_steps.attach_instance(volume.name, instance.name)\n volumes_steps.detach_instance(volume.name, instance.name)",
"def get_image_path(self):\n\t\treturn call_sdk_function('PrlVmDev_GetImagePath', self.handle)",
"def drive_args(self, image, index):\n index_letter = chr(ord('a') + index)\n image_dir = \"%s/out/target/product/trusty\" % self.config.android\n return [\n \"-drive\",\n \"file=%s/%s.img,index=%d,if=none,id=hd%s,format=raw,snapshot=on\" %\n (image_dir, image, index, index_letter), \"-device\",\n \"virtio-blk-device,drive=hd%s\" % index_letter\n ]",
"def addSquareVignette(size,position,img):\n\n #img[position[1]:min(position[1]+size[1],img.shape[1]),position[0]:min(position[0]+size[0],img.shape[0])]*=0.5\n img[position[1]:position[1]+size[1],position[0]:position[0]+size[0]]*=0.5",
"def viewNMDinVMD(filename):\n\n vmd = pathVMD()\n if vmd:\n os.system('{0} -e {1}'.format(vmd, abspath(filename)))",
"def attach_volume(self, context, connection_info, instance, mountpoint,\n disk_bus=None, device_type=None, encryption=None):",
"def probe(self):\n log_method_call(self, self.name, exists=self.exists)\n if not self.exists or not self.disklabelSupported:\n return\n\n self._size = Size(self.partedPartition.getLength(unit=\"B\"))\n self.targetSize = self._size\n\n self._partType = self.partedPartition.type\n\n self._bootable = self.getFlag(parted.PARTITION_BOOT)",
"def plug_vifs(self, instance, network_info):\n raise NotImplementedError()",
"def generate_volume_info(self, NAME, path):\n info = {'tags': [], 'name': NAME, 'path': path, 'AttachedToVm': [],\n 'State': 'available', 'machine_path': None,\n 'time': datetime.datetime.now()}\n return info",
"def createLogicalVolume(self, vg, filesystem, name, mountpoint, size):\n lv = {}\n lv['command'] = 'create:logvol'\n lv['vg'] = vg\n lv['fs'] = filesystem\n lv['size'] = size - EXTENT_SIZE + 1\n lv['name'] = name\n lv['mountPoint'] = mountpoint\n lv['format'] = 'yes'\n\n return lv",
"def show(vol_path):\n name = \"qemu-img\"\n image = \"breqwatr/qemu-img:latest\"\n path = Path(vol_path)\n vol_abspath = path.absolute().__str__()\n run = f\"qemu-img info {vol_abspath}\"\n mount = f\"-v {vol_abspath}:{vol_abspath}\"\n cmd = f\"docker run --rm -it --name {name} {mount} {image} {run}\"\n shell(cmd)",
"def screeninfo(self):\n\t\tDevice().capture_screenshot()\n\t\tresolution = (self.width, self.height)\n\t\tdroid = AQMdroid('image.png', resolution, self.filename)\n\t\t\n\t\ttry:\n\t\t\tdroid.getorigin()\n\t\texcept Exception as e:\n\t\t\tScriptGen(self.filename).log_checker(self.log_handler)\n\t\t\tScriptGen(self.filename).log_checker(self.generate_log_file)\n\t\t\tprint \"\\nExit Point Triggered.\"\n\t\t\tsys.exit()",
"def test_attachment_deletion_allowed_vm_not_found(self, mock_get_server):\n mock_get_server.side_effect = nova.API.NotFound(404)\n attachment = self._get_attachment()\n self.volume_api.attachment_deletion_allowed(self.context, attachment)\n\n mock_get_server.assert_called_once_with(self.context, fake.INSTANCE_ID,\n fake.VOLUME_ID)",
"def shell_image_changed(self, image):\n self.set_image(image)",
"def shell_image_changed(self, image):\n self.set_image(image)",
"def initDevMsgImage(self):\n return",
"def tell(self):\n ...",
"async def async_locate(self, **kwargs: Any) -> None:\n await self._vacuum_bot.execute_command(PlaySound())",
"def tell(self):\n return self._upload_position",
"def test_upload_new_vdisk_coordinated(self, mock_create_file):\n\n # Override adapter's traits to use the coordinated local API\n self.adptfx.set_traits(fx.LocalPVMTraits)\n\n # First need to load in the various test responses.\n vg_orig = tju.load_file(UPLOAD_VOL_GRP_ORIG, self.adpt)\n vg_post_crt = tju.load_file(UPLOAD_VOL_GRP_NEW_VDISK, self.adpt)\n\n self.adpt.read.return_value = vg_orig\n self.adpt.update_by_path.return_value = vg_post_crt\n mock_create_file.return_value = self._fake_meta()\n\n n_vdisk, f_wrap = ts.upload_new_vdisk(\n self.adpt, self.v_uuid, self.vg_uuid, None, 'test2', 50,\n d_size=25, sha_chksum='abc123')\n\n # Ensure the create file was called\n mock_create_file.assert_called_once_with(\n self.adpt, 'test2', vf.FileType.DISK_IMAGE_COORDINATED,\n self.v_uuid, f_size=50,\n tdev_udid='0300f8d6de00004b000000014a54555cd9.3',\n sha_chksum='abc123')\n\n # Ensure cleanup was called after the upload\n self.adpt.delete.assert_called_once_with(\n 'File', service='web',\n root_id='6233b070-31cc-4b57-99bd-37f80e845de9')\n self.assertIsNone(f_wrap)\n self.assertIsNotNone(n_vdisk)\n self.assertIsInstance(n_vdisk, stor.VDisk)",
"def attach_volume(self, instance_name, device_path, mountpoint):\n return True",
"def show_il(self, update, context):\n\n # Send preliminary message\n msg = 'Some other message...'\n self.send_str(msg, update, context)\n\n # Send pic\n self.sendPic('il.png', update, context)",
"def on_station_admin_volume_host_path_removed(\n self, func,\n ):\n self._set_event_handler(\"stations\")\n self._events.on_station_admin_volume_host_path_removed(func)",
"def test_06_migrate_vm_live_attach_disk(self):\n \n global vm\n global data_disk_1\n data_disk_1 = self.helper.create_custom_disk(\n self.apiclient,\n {\"diskname\":\"StorPoolDisk\" },\n zoneid=self.zone.id,\n size = 5,\n miniops = 2000,\n maxiops = 5000,\n account=self.account.name,\n domainid=self.account.domainid,\n diskofferingid=self.disk_offerings.id,\n )\n\n self.debug(\"Created volume with ID: %s\" % data_disk_1.id)\n\n self.virtual_machine_live_migration_1.attach_volume(\n self.apiclient,\n data_disk_1\n )\n\n destinationHost, vol_list = self.helper.get_destination_pools_hosts(self.apiclient, vm, self.host)\n for v in vol_list:\n self.helper.check_storpool_volume_iops(self.spapi, v)\n vm = self.helper.migrateVm(self.apiclient, self.virtual_machine_live_migration_1, destinationHost)\n\n\n self.virtual_machine_live_migration_1.attach_volume(\n self.apiclient,\n self.volume\n )\n\n destinationHost, vol_list = self.helper.get_destination_pools_hosts(self.apiclient,vm, self.host)\n for v in vol_list:\n self.helper.check_storpool_volume_iops(self.spapi, v)\n vm = self.helper.migrateVm(self.apiclient, self.virtual_machine_live_migration_1, destinationHost)\n\n destinationHost, vol_list = self.helper.get_destination_pools_hosts(self.apiclient,vm, self.host)\n for v in vol_list:\n self.helper.check_storpool_volume_iops(self.spapi, v)",
"def driver(self):\n return '<static-vmedia>'",
"def test_11_migrate_vm_live_attach_disk_on_remote(self):\n \n global vm2\n global data_disk_2\n data_disk_2 = self.helper.create_custom_disk(\n self.apiclient,\n {\"diskname\":\"StorPoolDisk\" },\n zoneid=self.zone.id,\n size = 5,\n miniops = 2000,\n maxiops = 5000,\n account=self.account.name,\n domainid=self.account.domainid,\n diskofferingid=self.disk_offerings.id,\n )\n\n self.debug(\"Created volume with ID: %s\" % data_disk_2.id)\n\n self.virtual_machine_live_migration_2.attach_volume(\n self.apiclient,\n data_disk_2\n )\n\n destinationHost, vol_list = self.helper.get_destination_pools_hosts(self.apiclient, vm2, self.host_remote)\n for v in vol_list:\n self.helper.check_storpool_volume_iops(self.spapi, v)\n vm2 = self.helper.migrateVm(self.apiclient, self.virtual_machine_live_migration_2, destinationHost)\n\n\n self.virtual_machine_live_migration_2.attach_volume(\n self.apiclient,\n self.volume_2\n )\n\n destinationHost, vol_list = self.helper.get_destination_pools_hosts(self.apiclient, vm2, self.host_remote)\n for v in vol_list:\n self.helper.check_storpool_volume_iops(self.spapi, v)\n vm2 = self.helper.migrateVm(self.apiclient, self.virtual_machine_live_migration_2, destinationHost)",
"def imageFromFOV(self, view_x=None, view_y=None): # pragma: no cover\n # to be overloaded by the child class.\n return 0",
"def monitorVirtualMachine(self,node,vmid,command):\n post_data = {'command': str(command)}\n data = self.connect('post',\"nodes/%s/qemu/%s/monitor\" % (node,vmid), post_data)\n return data",
"def create_snapshot_helper(\n vm,\n target_version=None,\n drives=None,\n balloon=False,\n diff_snapshots=False,\n):\n if diff_snapshots is False:\n snapshot_type = SnapshotType.FULL\n else:\n # Version 0.24 and greater has Diff and balloon support.\n snapshot_type = SnapshotType.DIFF\n\n if balloon:\n # Add a memory balloon with stats enabled.\n vm.api.balloon.put(\n amount_mib=0, deflate_on_oom=True, stats_polling_interval_s=1\n )\n\n test_drives = [] if drives is None else drives\n\n # Add disks.\n for scratch in test_drives:\n # Add a scratch 64MB RW non-root block device.\n scratchdisk = drive_tools.FilesystemFile(tempfile.mktemp(), size=64)\n vm.add_drive(scratch, scratchdisk.path)\n\n # Workaround FilesystemFile destructor removal of file.\n scratchdisk.path = None\n\n for _ in range(4):\n vm.add_net_iface()\n\n vm.start()\n\n # Iterate and validate connectivity on all ifaces after boot.\n for i in range(4):\n exit_code, _, _ = vm.ssh_iface(i).run(\"sync\")\n assert exit_code == 0\n\n # Mount scratch drives in guest.\n for blk in test_drives:\n # Create mount point and mount each device.\n cmd = f\"mkdir -p /tmp/mnt/{blk} && mount /dev/{blk} /tmp/mnt/{blk}\"\n exit_code, _, _ = vm.ssh.run(cmd)\n assert exit_code == 0\n\n # Create file using dd using O_DIRECT.\n # After resume we will compute md5sum on these files.\n dd = f\"dd if=/dev/zero of=/tmp/mnt/{blk}/test bs=4096 count=10 oflag=direct\"\n exit_code, _, _ = vm.ssh.run(dd)\n assert exit_code == 0\n\n # Unmount the device.\n cmd = f\"umount /dev/{blk}\"\n exit_code, _, _ = vm.ssh.run(cmd)\n assert exit_code == 0\n\n snapshot = vm.make_snapshot(snapshot_type, target_version=target_version)\n print(\"========== Firecracker create snapshot log ==========\")\n print(vm.log_data)\n vm.kill()\n return snapshot",
"def test_aws_service_api_volume_patch(self):\n pass",
"def test_is_virtual0001(self, monkeypatch):\n\n def fake_collect_dmesg_lines(_):\n return [\n 'real mem = 17074860032 (16283MB)',\n 'avail mem = 16550350848 (15783MB)',\n 'virtio3 at pci0 dev 4 function 0 \"OpenBSD VMM Control\" rev 0x00',\n ]\n monkeypatch.setattr(OpenBSDPlatform, \"_collect_dmesg_lines\", fake_collect_dmesg_lines)\n platform = OpenBSDPlatform(None, None)\n assert platform.is_virtual()",
"def gdl_changed(self, signal_name, device_udi, *args):\n global copiant\n if not copiant: #If not filecopy started, regenerate partition_list\n if signal_name==\"DeviceAdded\":\n obj = self.bus.get_object(\"org.freedesktop.UDisks\", device_udi)\n dev = dbus.Interface(obj, 'org.freedesktop.UDisks.Device')\n if str(dev.GetPropertyStringList(\"info.capabilities\")).find(\"volume\")>=0: #If it's a volume\n self.llista_particions()\t#Reload partition list\n if signal_name==\"DeviceRemoved\":\n self.llista_particions()",
"def on_remote(self, action, state=False):\n action.set_state(state)\n self._remote_revealer.set_visible(state)\n self._remote_revealer.set_reveal_child(state)\n\n if state:\n self._http_api.send(HttpAPI.Request.VOL, \"state\", self.update_volume)",
"def do_disk_event(client, args):\n args.type = 'disk'\n do_event_show(client, args)",
"def current_image_changed(self, value):\n\n if (self._current_image_index != self._ui.imageFileNavigatorView.currentIndex()):\n self.all_models.current_id = self._file_table_model._filelist[self._ui.imageFileNavigatorView.currentIndex()]\n self._current_image_index = self._ui.imageFileNavigatorView.currentIndex()\n self._main_controller.update_models_from_file_table(self._file_table_model._data[self._current_image_index])\n #self._model.image_scale = float(self._file_table_model._data[self._current_image_index][3])\n self._ui.segmentationMaskFileDisplay.setText(self._model.segmentation_label)",
"def test_finish_resize_with_volumes(self):\n\n # create instance\n instance = self._create_fake_instance_obj()\n request_spec = objects.RequestSpec()\n\n # create volume\n volume = {'instance_uuid': None,\n 'device_name': None,\n 'id': uuids.volume,\n 'size': 200,\n 'attach_status': 'detached'}\n bdm = objects.BlockDeviceMapping(\n **{'context': self.context,\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'volume_id': uuids.volume,\n 'instance_uuid': instance['uuid'],\n 'device_name': '/dev/vdc'})\n bdm.create()\n\n # stub out volume attach\n def fake_volume_get(self, context, volume_id, microversion=None):\n return volume\n self.stub_out('nova.volume.cinder.API.get', fake_volume_get)\n\n def fake_volume_check_availability_zone(self, context,\n volume_id, instance):\n pass\n self.stub_out('nova.volume.cinder.API.check_availability_zone',\n fake_volume_check_availability_zone)\n\n def fake_get_volume_encryption_metadata(self, context, volume_id):\n return {}\n self.stub_out('nova.volume.cinder.API.get_volume_encryption_metadata',\n fake_get_volume_encryption_metadata)\n\n orig_connection_data = {\n 'target_discovered': True,\n 'target_iqn': 'iqn.2010-10.org.openstack:%s.1' % uuids.volume_id,\n 'target_portal': '127.0.0.0.1:3260',\n 'volume_id': uuids.volume_id,\n }\n connection_info = {\n 'driver_volume_type': 'iscsi',\n 'data': orig_connection_data,\n }\n\n def fake_init_conn(self, context, volume_id, session):\n return connection_info\n self.stub_out('nova.volume.cinder.API.initialize_connection',\n fake_init_conn)\n\n def fake_attach(self, context, volume_id, instance_uuid, device_name,\n mode='rw'):\n volume['instance_uuid'] = instance_uuid\n volume['device_name'] = device_name\n self.stub_out('nova.volume.cinder.API.attach', fake_attach)\n\n # stub out virt driver attach\n def fake_get_volume_connector(*args, **kwargs):\n return {}\n self.stub_out('nova.virt.fake.FakeDriver.get_volume_connector',\n fake_get_volume_connector)\n\n def fake_attach_volume(*args, **kwargs):\n pass\n self.stub_out('nova.virt.fake.FakeDriver.attach_volume',\n fake_attach_volume)\n\n # attach volume to instance\n self.compute.attach_volume(self.context, instance, bdm)\n\n # assert volume attached correctly\n self.assertEqual(volume['device_name'], '/dev/vdc')\n disk_info = db.block_device_mapping_get_all_by_instance(\n self.context, instance.uuid)\n self.assertEqual(len(disk_info), 1)\n for bdm in disk_info:\n self.assertEqual(bdm['device_name'], volume['device_name'])\n self.assertEqual(bdm['connection_info'],\n jsonutils.dumps(connection_info))\n\n # begin resize\n flavor = self.default_flavor\n instance.task_state = task_states.RESIZE_PREP\n instance.save()\n self.compute.prep_resize(self.context, instance=instance,\n flavor=flavor,\n image={}, request_spec=request_spec,\n filter_properties={}, node=None,\n clean_shutdown=True, migration=None,\n host_list=[])\n\n # fake out detach for prep_resize (and later terminate)\n def fake_terminate_connection(self, context, volume, connector):\n connection_info['data'] = None\n self.stub_out('nova.volume.cinder.API.terminate_connection',\n fake_terminate_connection)\n\n migration = objects.Migration.get_by_instance_and_status(\n self.context.elevated(),\n instance.uuid, 'pre-migrating')\n self.compute.resize_instance(self.context, instance=instance,\n migration=migration, image={},\n # TODO(stephenfin): Why a JSON string?\n flavor=jsonutils.to_primitive(flavor),\n clean_shutdown=True, request_spec=request_spec)\n\n # assert bdm is unchanged\n disk_info = db.block_device_mapping_get_all_by_instance(\n self.context, instance.uuid)\n self.assertEqual(len(disk_info), 1)\n for bdm in disk_info:\n self.assertEqual(bdm['device_name'], volume['device_name'])\n cached_connection_info = jsonutils.loads(bdm['connection_info'])\n self.assertEqual(cached_connection_info['data'],\n orig_connection_data)\n # but connection was terminated\n self.assertIsNone(connection_info['data'])\n\n # stub out virt driver finish_migration\n def fake(*args, **kwargs):\n pass\n self.stub_out('nova.virt.fake.FakeDriver.finish_migration', fake)\n\n instance.task_state = task_states.RESIZE_MIGRATED\n instance.save()\n\n # new initialize connection\n new_connection_data = dict(orig_connection_data)\n new_iqn = 'iqn.2010-10.org.openstack:%s.2' % uuids.volume_id,\n new_connection_data['target_iqn'] = new_iqn\n\n def fake_init_conn_with_data(self, context, volume, session):\n connection_info['data'] = new_connection_data\n return connection_info\n self.stub_out('nova.volume.cinder.API.initialize_connection',\n fake_init_conn_with_data)\n\n self.compute.finish_resize(self.context,\n migration=migration,\n disk_info={}, image={}, instance=instance,\n request_spec=request_spec)\n\n # assert volume attached correctly\n disk_info = db.block_device_mapping_get_all_by_instance(\n self.context, instance['uuid'])\n self.assertEqual(len(disk_info), 1)\n for bdm in disk_info:\n self.assertEqual(bdm['connection_info'],\n jsonutils.dumps(connection_info))\n\n # stub out detach\n def fake_detach(self, context, volume_uuid):\n volume['device_path'] = None\n volume['instance_uuid'] = None\n self.stub_out('nova.volume.cinder.API.detach', fake_detach)\n\n # clean up\n self.compute.terminate_instance(self.context, instance, [])",
"def do_info (self, line) :\n\t\tprint\n\t\tprint get_info_string( self.__image )\n\t\tprint",
"def disk(self, disk):\n self._context[\"disk\"] = disk",
"def on_station_member_volume_host_path_added(\n self, func,\n ):\n self._set_event_handler(\"stations\")\n self._events.on_station_member_volume_host_path_added(func)",
"def test_upload_volume_to_image(self, volume, images_steps, volumes_steps):\n image_name = next(generate_ids('image', length=20))\n volumes_steps.upload_volume_to_image(volume.name, image_name)\n\n images_steps.page_images().table_images.row(\n name=image_name).wait_for_presence(30)\n images_steps.delete_image(image_name)",
"def test_ipython_robot_report_image(self):\n if PLATFORM == \"windows\":\n return\n\n self.activate_magic()\n\n with patch(\"jupyter_kernel_test.validate_message\", fake_validate):\n reply, outputs = self.execute_helper(code=MAGIC_IMAGE_TASK, timeout=60)\n assert reply[\"content\"][\"status\"] == \"ok\"\n assert any(\"image/png\" in output[\"content\"][\"data\"] for output in outputs)",
"def image_reference(self, image_id):\n info = self.image_info[image_id]\n if info[\"source\"] == \"vesicle\":\n return info[\"path\"]\n else:\n super(self.__class__, self).image_reference(image_id)",
"def loopattach(diskimg):\n result = subprocess.run(['losetup', '--find', diskimg], check=True)\n return loopdev(diskimg)",
"def _attach_volume(self):\n return []",
"def test_attachment_update_volume_in_error_state(self):\n volume_params = {'status': 'available'}\n\n vref = tests_utils.create_volume(self.context, **volume_params)\n aref = self.volume_api.attachment_create(self.context,\n vref,\n fake.UUID2)\n self.assertEqual(fake.UUID2, aref.instance_uuid)\n self.assertIsNone(aref.attach_time)\n self.assertEqual('reserved', aref.attach_status)\n self.assertEqual(vref.id, aref.volume_id)\n self.assertEqual({}, aref.connection_info)\n vref.status = 'error'\n vref.save()\n connector = {'fake': 'connector',\n 'host': 'somehost'}\n self.assertRaises(exception.InvalidVolume,\n self.volume_api.attachment_update,\n self.context,\n aref,\n connector)",
"def on_created(self, event):\n\t\tcurrentImage = event.src_path\n\n\t\t# Create array with directory information and file name\n\t\tfileInfo = event.src_path.split('/')\n\t\tprint fileInfo[2] + ' was ' + event.event_type + ' in ' + fileInfo[1]\n\n\t\tif fileInfo[1] == watchFolder:\n\t\t\tprint 'New image found. Processing...'\n\t\t\tself.handleImage(event)\n\t\telif fileInfo[1] == printFolder:\n\t\t\t# self.printFile(event) # Invoke printFile function with current image\n\t\t\tprint 'Placeholder text for image being printed. This line should call print function with image passed in.'",
"def test_ipam_vrfs_update(self):\n pass",
"def multimedia_path(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def launch_image_manager(self):\n \n self._image_manager_view = ImageManagerView(self._file_table_model, self._image_manager_controller)\n self._image_manager_view.show()",
"async def snapshot(self, msg, *args):\n if not Guard.has_permission(msg, 'attach_files'):\n await msg.channel.send(**{\n 'content': 'Cannot send images on this channel',\n 'reference': msg.to_reference(),\n 'mention_author': True,\n 'delete_after': 3,\n })\n return\n if not args:\n return\n args = list(args)\n if args[0] == 'world':\n include_world = True\n args.pop(0)\n else:\n include_world = False\n if args and args[0] == 'marker':\n show_marker = True\n args.pop(0)\n else:\n show_marker = False\n try:\n if len(args) == 2:\n lat, lng = map(float, args)\n zoom = 0\n elif len(args) == 3:\n lat, lng, zoom = map(float, args)\n else:\n return\n except:\n return\n if show_marker:\n map_controller = MapController(lat, lng, zoom, mlat=lat, mlng=lng)\n else:\n map_controller = MapController(lat, lng, zoom)\n if not map_controller.is_valid():\n await msg.channel.send(**{\n 'content': f'Invalid location {lat} {lng} {zoom}',\n 'reference': msg.to_reference(),\n 'mention_author': True,\n 'delete_after': 3,\n })\n return\n image = await map_controller.generate_snapshot(include_world=include_world)\n snapshot_id = map_controller.get_id().replace('_', ', ').replace('m', '')\n location_str = f'center at -{snapshot_id}'\n content = f'Here is a snapshot of that location ({location_str}).'\n await msg.channel.send(**{\n 'content': content,\n 'file': discord.File(image, filename=f'snapshot_{map_controller.get_id()}.png'),\n 'reference': msg.to_reference(),\n 'mention_author': True,\n })",
"def send_infected_file_list_to_admin():\n admins = User.objects.get_superusers()\n admin_emails = []\n for admin in admins:\n admin_emails.append(admin.email)\n c = {\n 'infected_files': list_of_infected_files,\n }\n send_html_email('Virus Detected',\n 'api3/sysadmin/virus_detected_files.html', c, None, admin_emails)",
"def get_os_virtual_hard_disk(self):\n if self.is_vm_image():\n return None\n i = self.virtual_environment[self.T_I]\n sa = self.virtual_environment[self.T_SA]\n c = self.virtual_environment[self.T_C]\n now = datetime.datetime.now()\n blob = self.BLOB_BASE % (i[self.T_I_N],\n str(now.year),\n str(now.month),\n str(now.day),\n str(now.hour),\n str(now.minute),\n str(now.second),\n str(current_thread().ident))\n media_link = self.MEDIA_BASE % (sa[self.T_SA_SN],\n sa[self.T_SA_UB],\n c,\n blob)\n os_virtual_hard_disk = OSVirtualHardDisk(i[self.T_I_N], media_link)\n return os_virtual_hard_disk",
"def vision_analysis(sender, **kwargs):\n instance = kwargs['instance']\n instance_query = Photo.objects.filter(pk=instance.pk)\n try:\n visual_report = get_visual_report(instance.image)\n instance_query.update(visual_report=visual_report)\n except:\n pass",
"def updateServePosition(self):\n pass",
"def vdisk_in_flashcopy(self, diskname):\n LOG.debug(\"Entering\")\n cmd = ''.join([\"svcinfo lsfcmap -filtervalue \",\n \"target_vdisk_name=%s -delim :\" % diskname])\n output = self._svc_command(cmd)[0]\n\n if len(output) != 2:\n return(100, None)\n\n header = output[0].split(':')\n values = output[1].split(':')\n index = header.index('progress')\n progress = values[index]\n index = header.index('id')\n map_id = values[index]\n\n LOG.debug(\"Exiting (progress = %s, map_id = %s)\" % (progress, map_id))\n return progress, map_id",
"def update_see(self):\n _LOGGER.debug(\"Updating device tracker: %s\", self._name)\n self._see(\n dev_id=self.dev_id,\n host_name=self.name,\n battery=self.battery,\n gps=(self.lat, self.lon),\n attributes={\n 'status': self.status,\n 'id': self.dev_id,\n 'name': self.name,\n CONF_ICON: self.icon,\n 'vendor': VENDOR,\n 'model': self.model})",
"def saveImage(self, event):\r\n fileWritten = self.image.writeFile()\r\n self.statusBar.SetStatusText(\"Saved {}\".format(fileWritten))",
"def set_image_path(self, sNewImagePath):\n\t\tcall_sdk_function('PrlVmDev_SetImagePath', self.handle, sNewImagePath)",
"def test_pvcvolume_attach(self):\n v = self.cs.volumes.get('pvcvolume')\n self.cs.volumes.attach(v, 1, '/dev/vdc')\n self.cs.assert_called('POST',\n '/volumes/pvcvolume/action')",
"def attach_volume(self):\n\n # Choose volume\n volume_id = self._choose_among_available_volumes()\n\n # Cancel\n if not volume_id:\n print 'Operation cancelled'\n return\n\n # Choose instance\n instance_id = self._choose_among_running_instances()\n\n # Cancel\n if not instance_id:\n print 'Operation cancelled'\n return\n\n # Attach the volume\n print '# Attaching volume \"%s\"!' % volume_id\n if self.compute.attach_volume(volume_id, instance_id):\n print 'The volume has been attached!'\n else:\n print 'The volume could not been attached'",
"def vf_nav(self, command):\n # Me.info_message(\"Current pose:\\n\" + str(self.current_pose.position))\n success = False\n try:\n # Pull new VF in case the location changed\n found = False\n while not found:\n found = \\\n self.get_inspection(client=self.clients[self.clients\n ['client']],\n inspection=self.inspection['name'])\n self.update_current_pose(string='robot')\n self.update_current_pose(string='marker')\n\n if 'pose' in command:\n Me.info_message(\"Pose command: \" + command + \" received.\")\n success = self.pose_nav(command=command.split(':')[1])\n elif 'path' in command:\n Me.info_message(\"Path command: \" + command + \" received.\")\n success = self.path_nav(command=command.split(':')[1])\n else:\n Me.error_message(\"Unknown pose or path command received.\")\n except IndexError:\n success = False\n except KeyboardInterrupt:\n raise KeyboardInterrupt\n return success",
"def handle_flight_received(self, event, sender, data):\n path = f'{self.media_directory}/tello-{datetime.datetime.now().strftime(self.date_fmt)}.jpg' \n with open(path, 'wb') as out_file:\n out_file.write(data)\n log.info('Saved photo to %s' % path)",
"def get_vm_image_name(self):\n return self.virtual_environment[self.T_I][self.T_I_N] if self.is_vm_image() else None",
"def _applyIVM(self,parlistentry):\n\n if parlistentry['ivmname'] != None:\n print \"Applying user supplied IVM files...\"\n\n #Parse the input file name to get the extension we are working on\n sciextn = parlistentry['image'].extn\n index = sciextn.find(',') \n extn = \"IVM,\"+sciextn[index+1:]\n \n #Open the mask image for updating and the IVM image\n mask = fileutil.openImage(parlistentry['image'].maskname,mode='update')\n ivm = fileutil.openImage(parlistentry['ivmname'],mode='readonly')\n\n ivmfile = fileutil.getExtn(ivm,extn)\n \n # Multiply the IVM file by the input mask in place. \n mask[0].data = ivmfile.data * mask[0].data\n\n mask.close()\n ivm.close()\n\n # Update 'wt_scl' parameter to match use of IVM file\n parlistentry['wt_scl'] = pow(parlistentry['exptime'],2)/pow(parlistentry['scale'],4)\n \n else:\n \n imageobj = parlistentry['image']\n \n print \"Automatically creating IVM files...\"\n # If no IVM files were provided by the user we will \n # need to automatically generate them based upon \n # instrument specific information.\n \n flat = imageobj.getflat()\n RN = imageobj.getReadNoiseImage()\n darkimg = imageobj.getdarkimg()\n skyimg = imageobj.getskyimg()\n \n ivm = (flat)**2/(darkimg+(skyimg*flat)+RN**2)\n\n #Open the mask image for updating\n mask = fileutil.openImage(parlistentry['image'].maskname,mode='update')\n \n # Multiply the IVM file by the input mask in place. \n mask[0].data = ivm * mask[0].data\n mask.close()\n \n # Update 'wt_scl' parameter to match use of IVM file\n parlistentry['wt_scl'] = pow(parlistentry['exptime'],2)/pow(parlistentry['scale'],4)",
"def update(self, image):\n indices = np.where(image[50:, :, 0] == 236)\n if len(indices[0]) > 0:\n x = (np.amin(indices[1]) + np.amax(indices[1])) // 2\n y = 50 + (np.amin(indices[0]) + np.amax(indices[0])) // 2\n self.velocity = (x - self.location[0], y - self.location[1])\n self.location = (x, y)",
"def test_pvresize_not_pv():\n pvdisplay = MagicMock(return_value=False)\n with patch(\"salt.modules.linux_lvm.pvdisplay\", pvdisplay):\n assert linux_lvm.pvresize(\"A\", override=False) == \"A is not a physical volume\"\n\n pvdisplay = MagicMock(return_value=False)\n with patch(\"salt.modules.linux_lvm.pvdisplay\", pvdisplay):\n assert linux_lvm.pvresize(\"A\") is True",
"def attachDiskToMinipad(self , disk):\n return",
"def add_known_image_points(self, point, latlonalt=None):\n if self.tracking:\n self.add_tracking_point(point)\n else:\n self.add_pnp_point(point, latlonalt)\n\n # Finally, draw the points.\n self.draw_known_points()",
"def imageinfo(self, *args, **kwargs):\n return self.logger.log(logging.INFO-1, *args, **kwargs)",
"def put(self, image_path):\n status = None\n try:\n current_app.mnt_mutex.acquire()\n mounted_disk = mount_image(image_path)\n\n if mounted_disk and mounted_disk.mountpoint is not None:\n current_app.logger.info(f\"Image mounted successfully: {image_path}\")\n current_app.mnt_mutex.release()\n return mounted_disk\n\n # TODO: refactor to not duplicate code in the mount_form in views.py\n except imagemounter.exceptions.SubsystemError:\n status = f\"Thumbtack was unable to mount {image_path} using the imagemounter Python library.\"\n except PermissionError:\n status = f\"Thumbtack does not have mounting privileges for {image_path}. Are you running as root?\"\n except UnexpectedDiskError:\n status = \"Unexpected number of disks. Thumbtack can only handle disk images that contain one disk.\"\n except NoMountableVolumesError:\n status = f\"No volumes in {image_path} were able to be mounted.\"\n except ImageNotInDatabaseError:\n status = f\"Cannot mount {image_path}. Image is not in Thumbtack database.\"\n\n current_app.mnt_mutex.release()\n current_app.logger.error(status)\n abort(400, message=str(status))",
"def virtual(**kwds):\n # get the virtual filesystem factory\n from .Filesystem import Filesystem\n\n # make one and return it\n return Filesystem(**kwds)",
"def flag_virtual(self, flag_virtual):\n self._flag_virtual = flag_virtual",
"def volume_type(self):\n return 'UNKNOWN'",
"def DirEV():\n\n target.BoundarySync()",
"def makeVolumeFromImage(self , imageid , initialconfig, instancename):\n self.initCreate(initialconfig)\n disk = self.createDisk(instancename)\n self.attachDiskToMinipad(disk )\n \n if self.startConversion(imageid , self.__server_ip , \"ImportVolume\") == False:\n return None\n\n self.detachDiskFromMinipad(disk)\n return str(disk)"
] | [
"0.53416073",
"0.50306785",
"0.49840182",
"0.49023584",
"0.48749703",
"0.48598105",
"0.48007807",
"0.4782172",
"0.47274348",
"0.46836528",
"0.46513668",
"0.46505046",
"0.4647543",
"0.46356696",
"0.4622129",
"0.46161428",
"0.46150172",
"0.46038243",
"0.4593726",
"0.4592042",
"0.45886222",
"0.45761573",
"0.45605978",
"0.45576236",
"0.4542211",
"0.45299044",
"0.45179075",
"0.45157865",
"0.44944018",
"0.44839922",
"0.44716975",
"0.447097",
"0.4470054",
"0.4466602",
"0.44621092",
"0.44436264",
"0.44427943",
"0.44411457",
"0.4431176",
"0.4431176",
"0.44306177",
"0.4429238",
"0.44279742",
"0.44218755",
"0.44168842",
"0.44121543",
"0.44099164",
"0.4407215",
"0.44019058",
"0.44018668",
"0.44005686",
"0.4400442",
"0.43985045",
"0.43977088",
"0.4394815",
"0.43866912",
"0.43842137",
"0.43749303",
"0.43709597",
"0.43707576",
"0.43660387",
"0.4360194",
"0.43592277",
"0.43572596",
"0.43502337",
"0.43479732",
"0.43478957",
"0.43469587",
"0.43444467",
"0.43382913",
"0.4328262",
"0.4327491",
"0.4325812",
"0.4325548",
"0.43220308",
"0.43206286",
"0.43198398",
"0.4313534",
"0.43100643",
"0.4308629",
"0.43067726",
"0.43049952",
"0.4297466",
"0.42964345",
"0.42892274",
"0.42779374",
"0.42777354",
"0.4274878",
"0.42706227",
"0.42702067",
"0.42652687",
"0.4262815",
"0.4262362",
"0.42588907",
"0.42581967",
"0.4255072",
"0.42510045",
"0.4250073",
"0.42481476",
"0.4247133"
] | 0.52932024 | 1 |
Ejects the Virtual Media image if one is inserted. | def eject_virtual_media(self, device='FLOPPY'):
response, vm_device_uri = self._get_vm_device_status(device)
# Check if virtual media is connected.
if response.get('Inserted') is False:
return
# Update required property
vm_settings = {}
vm_settings['Image'] = None
# perform the patch operation
status, headers, response = self._rest_patch(
vm_device_uri, None, vm_settings)
if status >= 300:
msg = self._get_extended_error(response)
raise exception.IloError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _handle_removed_media(self):\r\n if self.has_media():\r\n try:\r\n image = str(self.image)\r\n os.remove(image)\r\n except OSError:\r\n raise('Failure trying to remove image from filesystem.')\r\n return True",
"def eject_image(self, identity, device):\n device_info = self._get_device(identity, device)\n\n device_info['Image'] = ''\n device_info['ImageName'] = ''\n device_info['Inserted'] = False\n device_info['WriteProtected'] = False\n device_info['UserName'] = ''\n device_info['Password'] = ''\n\n self._devices.update({(identity, device): device_info})\n\n local_file = device_info.pop('_local_file', None)\n if local_file:\n try:\n os.unlink(local_file)\n\n self._logger.debug(\n 'Removed local file %(file)s for %(identity)s' % {\n 'identity': identity, 'file': local_file})\n except FileNotFoundError:\n # Ignore error as we are trying to remove the file anyway\n pass",
"def delete(self, *args, **kwargs):\n\t\tself.emo_img.delete(False)\n\t\tsuper(Emotion, self).delete(*args, **kwargs)",
"def __on_delete(self):\n self.image.delete()",
"def __on_delete(self):\n self.image.delete()",
"def eject_vmedia(self, device):\n device_name = VALID_VMEDIA_DEVICES.get(device)\n if not device_name:\n raise exception.InvalidInputError(\n \"Invalid device. Valid devices: cd0 or cd1 or hd0 or hd1.\")\n vmedia_partition_id = self.get_vmedia_device_uri(device_name)\n try:\n virtual_media_object = virtual_media.VirtualMedia(\n self._sushy._conn, vmedia_partition_id)\n virtual_media_object.eject_media()\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish System \"%(partition_id)s\" was '\n 'not found. Error %(error)s') %\n {'partition_id': vmedia_partition_id, 'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def clear_images(self):\r\n\r\n # audio = self.MutagenType(self['filename'])\r\n self.audio.pop(\"metadata_block_picture\", None)\r\n self.audio.pop(\"coverart\", None)\r\n self.audio.pop(\"coverartmime\", None)\r\n self.audio.save()",
"def clear_images(self):\r\n\r\n with translate_errors():\r\n self.audio.clear_pictures()\r\n self.audio.save()\r\n\r\n super().clear_images()",
"def clearImage(self):\n if self.hasImage():\n self.scene.removeItem(self._image)\n self._image = None",
"def delete(self, *args, **kwargs):\n self.image.storage.delete(self.image.name)\n delete(self.image)\n super().delete(*args, **kwargs)",
"def delete(self):\n\t\tif self.hasUdim:\n\t\t\tfor a in self.udimPaths:\n\t\t\t\ta.delete()\n\t\telse:\n\t\t\tsuper( textureFile, self ).delete()",
"def clean(self):\n if self.image:\n self.glance.images.delete(self.image['id'])\n\n if self.image_file:\n shutil.rmtree(self.download_path)",
"def kill_video(self):\n self.cap.truncate(0)\n cv2.destroyAllWindows()",
"def remove_image_file(sender, instance, **kwargs):\n # Pass false so ImageField doesn't save the model.\n instance.image.delete(False)",
"def __del__(self):\n if self.video:\n self.video.release()",
"def tearDown(self):\n self.image.delete()",
"def clearImage(self):\n if self.hasImage():\n self.scene.removeItem(self._pixmapHandle)\n self._pixmapHandle = None\n self.zoom=-1\n self.scene.clear()",
"def delete(self):\n os.remove(self.file_path)\n super(VideoFile, self).delete()",
"def destroy(self):\n url = \"/images/%s/destroy\" % (str(self.id))\n\n data = self._conn.request(url)\n\n log.debug(data)",
"def delete_image(self):\n Image.objects.get(id = self.id).delete()",
"def remove_image(self, imagename, del_img=False):\n os.system('rm -r {}.model'.format(imagename))\n os.system('rm -r {}.flux'.format(imagename))\n os.system('rm -r {}.psf'.format(imagename))\n os.system('rm -r {}.residual'.format(imagename))\n if del_img:\n os.system('rm -r {}.image'.format(imagename))",
"def __del__(self):\n self.vid.release()",
"def auto_delete_image_lecture_on_delete(sender, instance, **kwargs):\n if instance.file:\n instance.file.delete(save=False)",
"def photo_file_cleanup(sender, **kwargs):\n instance = kwargs.get('instance')\n filename = instance.path.url[1:]\n if os.path.exists(filename):\n os.remove(filename)",
"def delImg(img_name):\n img = Image.objects.raw({\"_id\": img_name}).first()\n img.delete()\n return",
"def clean(context):\n print(f\"Attempting to forcefully remove image {IMAGE_NAME}:{IMAGE_VER}\")\n context.run(f\"docker rmi {IMAGE_NAME}:{IMAGE_VER} --force\")\n print(f\"Successfully removed image {IMAGE_NAME}:{IMAGE_VER}\")",
"def removeScene(self):\n del self.scene, self.imgPixmapItem",
"def remove(self, done=False, verbose=True):\n return _image.image_remove(self, done, verbose)",
"def test_cambia_imagen_elimina_la_antigua(self):\n self.image_path = os.path.join(os.path.dirname(__file__), 'image_for_model2.jpg')\n image_path = self.image_obj.image.path\n self.image_obj.image = simple_uploaded_file(self.image_path)\n self.image_obj.save()\n\n self.assertNotEqual(image_path, self.image_obj.image.path)\n self.assertFalse(os.path.exists(image_path))",
"def __del__(self):\n self.video.release()",
"def delete(self, image_path=None):\n current_app.mnt_mutex.acquire()\n unmount_image(image_path)\n current_app.mnt_mutex.release()",
"def tearDown(self):\n self.recipe.image.delete()",
"def auto_delete_file_on_delete(sender, instance, **kwargs):\n if instance.img:\n if os.path.isfile(instance.img.path):\n os.remove(instance.img.path)",
"def clear_video(self):\n self.video_file = None\n self.video_parser = None\n\n self.video_box.delete(0, END)",
"def photo_edit_file_cleanup(sender, **kwargs):\n instance = kwargs.get('instance')\n filename = instance.upload.url[1:]\n if os.path.exists(filename):\n os.remove(filename)",
"def delAvatarImage(self, imgName = None): \n if imgName:\n self.window.remove_child(self.images[imgName])\n self.images[imgName].destroy()\n self.images[imgName] = None\n else:\n for key in self.images:\n if self.images[key]:\n self.window.remove_child(self.images[key])\n self.images[key].destroy()\n self.images[key] = None",
"def delete(self, *args, **kwargs):\n super(Image, self).delete(*args, **kwargs)",
"def delete(self, *args, **kwargs):\n self.image.delete()\n super(Recipe, self).delete(*args, **kwargs)",
"def unpropagateImage(self, dryrun):\n pass",
"def tearDown(self):\n if os.path.exists(settings.MEDIA_ROOT):\n shutil.rmtree(settings.MEDIA_ROOT)",
"def tearDown(self):\n if os.path.exists(settings.MEDIA_ROOT):\n shutil.rmtree(settings.MEDIA_ROOT)",
"def remove_images(self):\n hardware_components.log_method(self, \"remove_images\")\n communication_object = self._get_control_software().connection\n communication_object.remove_all()",
"def auto_delete_image_and_thumbnail_on_delete(sender, instance, **kwargs):\n if instance.image:\n if os.path.isfile(instance.image.path):\n os.remove(instance.image.path)\n\n if instance.thumbnail:\n if os.path.isfile(instance.thumbnail.path):\n os.remove(instance.thumbnail.path)\n\n return False",
"def _delete_image_volume(self,\n context: context.RequestContext,\n cache_entry: dict) -> None:\n volume = objects.Volume.get_by_id(context, cache_entry['volume_id'])\n\n # Delete will evict the cache entry.\n self.volume_api.delete(context, volume)",
"def delete(self, *args, **kwargs):\n\n user_n=str(self.sujeto.user.pk)\n img_name=str(self.sujeto.pk)\n \n file_path=settings.MEDIA_ROOT+self.path[len('/media'):]\n\n os.remove(file_path)\n super(img_to_show, self).delete(*args, **kwargs)",
"def delete_image(Name=None):\n pass",
"def del_image(self, name):\r\n if self.images is None or name not in self.images:\r\n return\r\n l = self.images\r\n self.images = None\r\n l.setdefault('/empties/', [])\r\n # push the number on the empties list\r\n l['/empties/'].append(l[name])\r\n del l[name]\r\n self.images = l",
"def test_delete_image(self):\n pass",
"async def eject(self) -> None:\n await self.dbus.Drive.call_eject(UDISKS2_DEFAULT_OPTIONS)",
"def test_remove_vm(self, instance_name):\n self.instances.pop(instance_name)",
"def detached(self, mind):\n self.remote = None\n players.remove(self)",
"def clean(self):\n tags = self.get_tags()\n for tag in tags:\n image_name = self.build_image_name(tag)\n try:\n self.client.images.remove(image_name, force=True)\n except Exception as ex:\n print('Cannot remove {}: {}'.format(tag, str(ex)))",
"def photo_delete(sender, instance, **kwargs):\n\tinstance.photo.delete(False)",
"def teardown():\n os.remove('green-dot.tif')\n os.remove('green-dot.jpg')\n os.remove('green-dot.png')",
"def remove(self):\n\t\tcall_sdk_function('PrlVmDevHdPart_Remove', self.handle)",
"def delete_video(self, video_ID): # WORKS\n try:\n self.cur.execute(\"DELETE FROM videos WHERE video_ID = \\\"{}\\\"\".format(video_ID))\n self.db.commit()\n os.remove('static/videos/' + str(video_ID) + '.mp4')\n os.remove('static/images/' + str(video_ID) + '.jpg')\n except:\n self.db.rollback()",
"def clean_up(self):\n cv2.destroyAllWindows()\n # self.vs.release()",
"def auto_delete_file_on_delete(sender, instance, **kwargs):\r\n if instance.image:\r\n if pathlib.Path(instance.image.path).is_file():\r\n pathlib.Path(instance.image.path).unlink()",
"def remove_stuff_post_error(self):\n os.system('rm %s' % self.destination)",
"def POST_delete_sr_img(self, res, name):\r\n # just in case we need to kill this feature from XSS\r\n if g.css_killswitch:\r\n return self.abort(403,'forbidden')\r\n c.site.del_image(name)\r\n c.site._commit()\r\n # hide the image and it's container\r\n res._hide(\"img-li_%s\" % name)\r\n # reset the status\r\n res._update('img-status', innerHTML = _(\"Deleted\"))",
"def update_path_image_on_remove(sender, **kwargs):\n instance = kwargs.pop('instance', None)\n action = kwargs.pop('action', None)\n pk_set = kwargs.pop('pk_set', None)\n if action == \"post_remove\" and len(instance.content.all()) != 0:\n content = Content.objects.get(pk=list(pk_set)[0])\n if instance.image == content.image or not instance.image:\n content = instance.content.all()[0]\n instance.image = content.image\n instance.save()",
"def clear_thumbnails(self):",
"def auto_delete_file_on_delete(sender, instance, **kwargs):\n if instance.img_png:\n if os.path.isfile(instance.img_png.path):\n os.remove(instance.img_png.path)",
"def test_upload_area_cleanup(self):\n vis2_uvid='urn:mrn:stm:service:instance:furuno:vis2'\n p = Path('import')\n files = list(p.glob('**/urn:mrn:s124:*'))\n for item in files:\n print(item)\n os.remove(str(item))\n pass",
"def do_command(self, args):\n imageops = dbops.Images()\n imageops.delete(args)",
"def delete_any_image(self, index):\n self.__accessed_image[index] = False\n self.__check_delete_images()",
"def test_deletion_of_user_photo_succeeds(self):\n\t\tself.name = 'media.png'\n\t\tself.image = File(open('static/img/media.png', 'rb'))\n\t\tself.created_image = UserPhoto(image=self.image, name=self.name, created_by=self.user)\n\t\tself.created_image.save()\t\t\t\n\t\tresponse = self.client.delete('/api/modify_photo/?id={}'.format(self.created_image.id))\n\t\tself.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)",
"def tearDown(self):\n Image.objects.all().delete()",
"def test_delete_collection_image(self):\n pass",
"def delete_file(sender, instance, *args, **kwargs):\n if instance.image:\n _delete_file(instance.image.path)",
"def auto_delete_file_on_delete(sender, instance, **kwargs):\n\n if instance.image:\n if os.path.isfile(instance.image.path):\n os.remove(instance.image.path)",
"def dropObject(player):\n for treasure in Treasure.List:\n if player.treasureCaptured:\n player.treasureCaptured = False\n treasure.x = player.x\n treasure.y = player.y\n treasure.img = pygame.image.load(Treasure.treasure_img[0])",
"def delete_image(self, image_id):\r\n self.vgbdtg.deleteObject(id=image_id)",
"def delete_shot(context, index):\n\n shot = context.scene.milkshake_shots[index]\n if shot.camera:\n bpy.data.cameras.remove(shot.camera)\n context.scene.milkshake_shots.remove(index)",
"def clear(self):\r\n if self.groundPath:\r\n self.groundPath.clearProjectTexture(self.stage)\r\n self.groundPath = None\r\n\r\n if self.lightPath:\r\n self.lightPath.detachNode()\r\n self.lightPath = None\r\n\r\n if self.cameraPath:\r\n self.cameraPath.detachNode()\r\n self.cameraPath = None\r\n self.camera = None\r\n self.lens = None\r\n\r\n if self.buffer:\r\n base.graphicsEngine.removeWindow(self.buffer)\r\n self.tex = None\r\n self.buffer = None",
"def CLS(self):\n\t\tself.video.clear()",
"def replay_delete(sender, instance, **kwargs):\n pass\n # Temporarily disabled\n\n #print(\"deleting file from S3\")\n # False so FileField doesn't save the model\n #instance.file.delete(False)",
"def remove_ssm(self, ssm_image):\n pass",
"def preview_file_cleanup(sender, **kwargs):\n\n instance = kwargs.get('instance')\n filename = instance.path.url[1:]\n if os.path.exists(filename):\n os.remove(filename)",
"def test_delete_image(self):\n # Upload the image first\n self.test_upload_image()\n im = ImageAttachment.objects.all()[0]\n r = post(self.client, 'upload.del_image_async', args=[im.id])\n\n eq_(200, r.status_code)\n json_r = json.loads(r.content)\n eq_('success', json_r['status'])\n eq_(0, ImageAttachment.objects.count())",
"def delete_thumbnail(self, thumbnail_name):",
"def onDestroy(self):\n\t\tDEBUG_MSG(\"Avatar::onDestroy: %i.\" % self.id)\n\n\t\tif self.accountEntity is not None:\n\t\t\tself.accountEntity.activeAvatar = None\n\t\t\tself.accountEntity = None",
"def clear_shots(context):\n\n for shot in context.scene.milkshake_shots:\n bpy.data.cameras.remove(shot.camera)\n context.scene.milkshake_shots.clear()",
"def test_clean_only_image(image):\n resource = models.MediaResource(image=image)\n\n resource.clean()",
"def remove():\n vbox = Vbox(env.vm_name)\n vbox.remove()",
"def uiClearImage(self):\n\n\t\traise foundations.exceptions.ProgrammingError(\n\t\t\"{0} | '{1}' attribute is not deletable!\".format(self.__class__.__name__, \"uiClearImage\"))",
"def remove(self, vPath):\n os.remove(self.ospath(vPath) )\n #notify(AssetRemoved(self, vpath))",
"async def remove_img(self, ctx: BBContext, url: str):\n\n con = await ctx.get_connection()\n query = f'DELETE FROM {TABLE_ARTS} WHERE url = $1'\n\n await con.execute(query, url)\n await ctx.tick(True)",
"def delete_from_inv(target_info, inventory, images_dir):\n target = inventory.get(target_info.get(\"target\"), {})\n target_name = target.get(\"target\")\n log(\"TRACE\", \"Removing contents of {} from inventory ({})\".format(\n target, target.get(\"contents\", [])))\n dirs_to_delete = []\n # Delete all of the files\n for image_fn in target.get(\"contents\", []):\n image_path = os.path.join(images_dir, image_fn)\n if os.path.isfile(image_path):\n os.remove(image_path)\n log(\"TRACE\", \"Deleted {} from inventory\".format(image_path))\n elif os.path.isdir(image_path):\n dirs_to_delete.append(image_fn)\n else: # File doesn't exist\n log(\"WARN\", \"File {} in inventory does not exist\".format(image_path))\n # Then delete all of the (empty) directories\n for dir_path in dirs_to_delete:\n try:\n if os.path.isdir(dir_path):\n os.removedirs(dir_path)\n except os.error as ex:\n log(\"ERROR\", \"Failed to delete dir: {}\".format(ex))\n inventory.pop(target_name, None)\n return True",
"def clear_unique_video(self):\n self.top_unique_video_entry.delete(0, END)\n self.top_unique_video_box.delete(0, END)\n self.unique_video_found = False\n self.missing_files_label.grid_remove()",
"def cleanup():\n cv2.release()\n cv2.destroyAllWindows()",
"def __del__(self):\n\t\tos.remove(self.imgpath)\n\t\tfor b in self.__class__.__bases__:\n\t\t\tb.__del__(self)",
"def delete_AllImgs(self):\n self.listImages.remove_all_imgs()",
"async def async_media_stop(self) -> None:\n await self._volumio.stop()",
"def delete(self, *args, **kwargs):\n self.file.delete(save=False)\n self.thumbnail.delete(save=False)\n\n super(File, self).delete(*args, **kwargs)",
"def test_recreate_deleted_item(self):\n v1 = make_video(media_id='1234', title='testing')\n set_resources_and_sync([v1])\n i1 = mpmodels.MediaItem.objects.filter(jwp__key=v1.key).first()\n self.assertIsNotNone(i1)\n self.assertEqual(i1.title, 'testing')\n i1.delete()\n\n set_resources_and_sync([v1])\n i1 = mpmodels.MediaItem.objects.filter(jwp__key=v1.key).first()\n self.assertIsNotNone(i1)\n self.assertEqual(i1.title, 'testing')",
"def tearDownClass(self):\n if (os.path.exists(MEDIA_ROOT+\"/gitload_test\")):\n shutil.rmtree(MEDIA_ROOT+\"/gitload_test\")",
"def remove_media_files_path(self):\n\n if dialog.MessageDialog(programName,\n (\"Removing the path of media files from the project file is irreversible.<br>\"\n \"Are you sure to continue?\"),\n [YES, NO]) == NO:\n return\n\n self.pj = project_functions.remove_media_files_path(self.pj)\n self.projectChanged = True",
"def removeTextureToOcc(self):\n\t\tshas = self._getShapes()\n\t\tfor sha in shas:\n\t\t\tif sha.a.texture_Occ.exists:\n\t\t\t\tsha.a.texture_Occ.delete()",
"def destroy(self):\n bullet_tools.tear_down_scene()"
] | [
"0.685616",
"0.6767739",
"0.6732249",
"0.66243017",
"0.66243017",
"0.6524107",
"0.63902587",
"0.6269782",
"0.61779577",
"0.60977596",
"0.60840386",
"0.6020783",
"0.5993817",
"0.5987346",
"0.5948459",
"0.59088904",
"0.5899906",
"0.5896522",
"0.5884364",
"0.58657366",
"0.5865728",
"0.58523566",
"0.5837543",
"0.5822411",
"0.57927376",
"0.5756274",
"0.57558244",
"0.5728832",
"0.5721798",
"0.57136536",
"0.5710271",
"0.5710109",
"0.56767774",
"0.56682444",
"0.5664822",
"0.56481355",
"0.5637406",
"0.5629608",
"0.5620289",
"0.56199086",
"0.56199086",
"0.5614244",
"0.5610177",
"0.560806",
"0.5605818",
"0.5604143",
"0.56016797",
"0.55739754",
"0.55630535",
"0.555973",
"0.55570346",
"0.5550535",
"0.55451953",
"0.5541036",
"0.55302787",
"0.55075395",
"0.5507531",
"0.5506922",
"0.5483512",
"0.5477761",
"0.54635507",
"0.545593",
"0.54522645",
"0.5440984",
"0.5437155",
"0.542762",
"0.5414299",
"0.5410484",
"0.5403032",
"0.53964615",
"0.53904134",
"0.53834015",
"0.5376746",
"0.5369489",
"0.5362186",
"0.5361712",
"0.53615594",
"0.5349179",
"0.53456646",
"0.53450525",
"0.5332728",
"0.53270185",
"0.5326085",
"0.53234965",
"0.5318717",
"0.5315524",
"0.53137124",
"0.53093445",
"0.5305735",
"0.5303386",
"0.5303096",
"0.5301982",
"0.5297461",
"0.5295176",
"0.5290674",
"0.5287381",
"0.5264454",
"0.52605563",
"0.5252053",
"0.52508336"
] | 0.7177198 | 0 |
Get details of persistent boot devices, its order | def _get_persistent_boot_devices(self):
# Check if the BIOS resource if exists.
headers_bios, bios_uri, bios_settings = self._check_bios_resource()
# Get the Boot resource.
boot_settings = self._get_bios_boot_resource(bios_settings)
# Get the BootSources resource
try:
boot_sources = boot_settings['BootSources']
except KeyError:
msg = ("BootSources resource not found.")
raise exception.IloError(msg)
try:
boot_order = boot_settings['PersistentBootConfigOrder']
except KeyError:
msg = ("PersistentBootConfigOrder resource not found.")
raise exception.IloCommandNotSupportedError(msg)
return boot_sources, boot_order | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def list_devices(self):\n return [x for x in self.devices.keys()]",
"def getbootinfo(self):\n self.mount()\n kernel = None\n inits = []\n for line in self.xlist(\"get-bootinfo\", IBASE)[1]:\n if line.startswith('+++'):\n kernel = line.split()[1]\n else:\n inits.append(line)\n self.unmount()\n if not inits:\n run_error(_(\"No initramfs found\"))\n return None\n if not kernel:\n run_error(_(\"GRUB problem:\\n\") + inits[0])\n return None\n return (kernel, inits)",
"def get_device_info(handle, timeout):\n device_info = dict()\n device_info['ls'] = ceph_mon_command(handle, 'device ls', timeout)\n\n return device_info",
"def load_devices():",
"def get_boot_record(disk):\n\n #TODO\n return \"Unknown\", \"Unknown\"",
"def get_device_file_dict():\n cmd = 'lshw -class disk'\n desc = \"description\"\n log_name = \"logical name\"\n serial = \"serial\"\n\n dev = []\n dev_list = []\n\n ret, output, err = run_gluster_command(cmd)\n output = output.decode('ASCII')\n dev_info = output.split('\\n')\n for line in dev_info:\n if re.search(desc, line):\n if dev:\n dev_list.append(dev)\n\n dev = []\n if re.search(log_name, line) or re.search(serial, line):\n temp = line.split(':')\n temp[1] = temp[1].strip(' ')\n dev.append(temp[1])\n dev_list.append(dev)\n for line in dev_list:\n print(line)",
"def get_devices(self):\n devices = self.get(\"event/device\")",
"def getDevices(self):\n\n devices = None\n\n for i in range(3):\n devices = subprocess.check_output(\"adb devices -l\", creationflags=self.createNoWindow)\n\n devices = devices.decode()\n deviceModel = re.findall(\"model:(.*) device\", devices)\n deviceID = re.findall(r\"(\\S+) {2}\", devices, flags=re.IGNORECASE)\n\n return deviceModel, deviceID",
"def load_devices(self):\n response = self.oauth.get(url=f'{self.base_url}/json/devices/list')\n\n result = response.json()['device']\n return [(device['id'], device['name'], device['state']) for device in result]",
"def test_get_bios_boot_mode_list(self):\n pass",
"def GetDeviceSerials(cls):\n cls._CheckAdb()\n adb_cmd = [cls._adb_command, _ADB_DEVICE]\n device_info = utils.CheckOutput(adb_cmd)\n serials = []\n # Skip the first line which is \"List of devices attached\". Each of the\n # following lines consists of the serial number, a tab character, and\n # the state. The last line is empty.\n for line in device_info.splitlines()[1:]:\n serial_state = line.split()\n if len(serial_state) > 1:\n serials.append(serial_state[0])\n return serials",
"def get_devices():\n devices, errors = [], []\n\n for path in hookenv.action_get('devices').split(' '):\n path = path.strip()\n if not os.path.isabs(path):\n errors.append('{}: Not absolute path.'.format(path))\n elif not os.path.exists(path):\n errors.append('{}: Device does not exist.'.format(path))\n else:\n devices.append(path)\n\n if errors:\n raise ZapDiskError(\", \".join(errors))\n\n return devices",
"def GetDeviceSerials(self):\n return self._device_serial_index.keys()",
"def get_devices(adb=DEFAULT_ADB):\n # Check that adb is running\n Device.__start_adb(adb)\n # Split by newline and remove first line (\"List of devices attached\")\n # TODO: surround with try/except?\n devices = subprocess.check_output(\n [adb, \"devices\", \"-l\"]).decode().split('\\n')[1:]\n tmp = {}\n for dev in devices:\n if dev:\n tmp[dev.split()[0]] = dev\n return tmp",
"def get_devices():\n devices = []\n for device_id in range(pm.lib.Pm_CountDevices()):\n devices.append(DeviceInfo(device_id))\n\n return devices",
"def get_devices(self):\n\n md_configstore = os.path.join(\n os.environ['VOLTTRON_HOME'],\n \"configuration_store/platform.driver.store\"\n )\n\n if not os.path.exists(md_configstore):\n _log.debug(\"No master driver currently on this platform.\")\n return {}\n\n statinfo = os.stat(md_configstore)\n\n if self._master_driver_stat_time is None or \\\n self._master_driver_stat_time != statinfo.st_mtime:\n self._master_driver_stat_time = statinfo.st_mtime\n\n # else no change in the md file and we have the same stat time.\n else:\n keys = list(self._devices.keys())\n\n for k in keys:\n new_key = self.get_renamed_topic(k)\n if new_key != k:\n self._devices[new_key] = self._devices[k]\n del self._devices[k]\n\n return self._devices\n\n _log.debug('Getting devices')\n config_list = self.vip.rpc.call(CONFIGURATION_STORE,\n 'manage_list_configs',\n 'platform.driver').get(timeout=5)\n\n _log.debug('Config list is: {}'.format(config_list))\n devices = defaultdict(dict)\n\n for cfg_name in config_list:\n # Skip as we are only looking to do devices in this call.\n if not cfg_name.startswith('devices/'):\n continue\n\n device_config = self.vip.rpc.call('config.store', 'manage_get',\n 'platform.driver',\n cfg_name,\n raw=False).get(timeout=5)\n _log.debug('DEVICE CONFIG IS: {}'.format(device_config))\n\n reg_cfg_name = device_config.get(\n 'registry_config')[len('config://'):]\n _log.debug('Reading registry_config file {}'.format(\n reg_cfg_name\n ))\n registry_config = self.vip.rpc.call('config.store',\n 'manage_get', 'platform.driver',\n reg_cfg_name,\n raw=False).get(timeout=5)\n _log.debug('Registry Config: {}'.format(registry_config))\n\n points = []\n for pnt in registry_config:\n points.append(pnt['Volttron Point Name'])\n\n devices[cfg_name]['points'] = points\n\n return devices",
"def device_info(self) -> Dict[str, any]:\n return {\n \"identifiers\": {(DOMAIN, self._config[CONF_SERIAL])},\n \"name\": self._config[CONF_NAME],\n \"manufacturer\": \"Bosch\",\n }",
"def get_persistent_boot_device(self):\n system = self._get_host_details()\n try:\n # Return boot device if it is persistent.\n if system['Boot']['BootSourceOverrideEnabled'] == 'Continuous':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n except KeyError as e:\n msg = \"get_persistent_boot_device failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)\n\n # Check if we are in BIOS boot mode.\n # There is no resource to fetch boot device order for BIOS boot mode\n if not self._is_boot_mode_uefi():\n return None\n\n # Get persistent boot device order for UEFI\n boot_sources, boot_devices = self._get_persistent_boot_devices()\n\n boot_string = \"\"\n try:\n for source in boot_sources:\n if (source[\"StructuredBootString\"] == boot_devices[0]):\n boot_string = source[\"BootString\"]\n break\n except KeyError as e:\n msg = \"get_persistent_boot_device failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)\n\n if 'HP iLO Virtual USB CD' in boot_string:\n return 'CDROM'\n\n elif ('NIC' in boot_string or\n 'PXE' in boot_string or\n \"iSCSI\" in boot_string):\n return 'NETWORK'\n\n elif common.isDisk(boot_string):\n return 'HDD'\n\n else:\n return None",
"def devices_dict(self):\n return self.devices.dict",
"def devices(self):\n return self._recordings.keys()",
"def devices(self):\n return self.enumerate_devices()",
"def device_info(self):\n return {\n \"identifiers\": {(DOMAIN, self._uuid)},\n \"name\": self._device.device_data[self._uuid]['name'],\n \"manufacturer\": \"Nest Labs\",\n \"model\": self._device.device_data[self._uuid]['model'],\n }",
"def listDevices(self):\n count = 0\n for device in self:\n count += 1\n printLog(\"Device \" + str(count) + \": '%s %s (%s, %s, %s)'\" % (\n device.make, device.model, device.deviceId, device.androidVersion, device.operator))\n if device.idle:\n printLog(\"[Idle]\")\n else:\n printLog(\"[Busy]\")",
"def collect_existing_mounts():\n result = {}\n for mount in sh.mount().stdout.decode('utf-8').splitlines():\n tokens = mount.split()\n if tokens[1] == 'on' and tokens[0].startswith('/dev/'):\n device = tokens[0][5:]\n result[tokens[2]] = device\n return result",
"def list_devices():\n return _lib.SeaTeaseAPI().list_devices()",
"def get_devices():\n global managed_objects\n global devices_by_adr\n \n devices_by_adr = {}\n \n r = re.compile(\"\\/org\\/bluez\\/hci\\d*\\/dev\\_(.*)\")\n # e.g., match a string like this:\n # /org/bluez/hci0/dev_58_C9_35_2F_A1_EF\n \n for key, value in managed_objects.items():\n # print(\"key=\", key)\n m = r.match(key)\n if m is not None:\n dev_str = m.group(1) # we have a device string!\n # print(\"dev_str=\", dev_str)\n # let's flatten that dict a bit\n devices_by_adr[dev_str] = value[\"org.bluez.Device1\"]",
"def get_devices(self):\n return self.api_request('GET', self.url + '/device', {})",
"def device_info(self):\n return {\n \"identifiers\": {(DOMAIN, self.unique_id)},\n \"name\": self.name,\n \"manufacturer\": \"Brightech\",\n }",
"def get_device_map():\n ret = []\n vlist = subprocess.check_output(['ceph-volume', 'lvm', 'list',\n '--format=json'])\n for osd_id, data in json.loads(vlist.decode('utf8')).items():\n osd_id = normalize_osd_id(osd_id)\n for elem in data:\n for device in elem['devices']:\n ret.append({'id': osd_id, 'path': device})\n return ret",
"def getDevices(i):\n devices = Account['KTFLR'].devices('monpressprod')\n device = devices[i]\n return device",
"def get_device_info(self): # pylint: disable=no-self-use\r\n serial = get_serial_number()\r\n model = get_model()\r\n\r\n return {\r\n \"serial\": serial,\r\n \"model\": model,\r\n }",
"def get_detection_info(self):\n persistent_dict = self.props[\"persistent_identifiers\"]\n persistent_dict[\"model\"] = self._get_system_hardware()\n if persistent_dict[\"model\"] not in usb_config.CAMBRIONIX_PORT_MAP:\n raise errors.DeviceError(\n \"Model {} not supported. Supported models: {}\".format(\n persistent_dict[\"model\"],\n \",\".join(usb_config.CAMBRIONIX_PORT_MAP.keys())))\n persistent_dict[\"hub_port_name\"] = self.communication_address\n persistent_dict[\"console_port_name\"] = self.communication_address\n persistent_dict[\"total_ports\"] = self.total_ports\n persistent_dict[\n \"ftdi_serial_number\"] = usb_utils.get_serial_number_from_path(\n self.communication_address)\n\n # Cambrionix does not have a separate serial number from the one shown\n # in the /dev/serial/by-id/... name.\n persistent_dict[\"serial_number\"] = self.props[\"persistent_identifiers\"][\n \"ftdi_serial_number\"]\n\n self.props[\"options\"] = {}\n\n return persistent_dict, self.props[\"options\"]",
"def get_devices(self):\n devices = []\n for i in self.devices:\n devices.append(self.devices[i])\n\n return devices",
"def devices(self):\n return {k:v for k, v in self._data.items() \n if v[\"type\"] == \"DEVICE\"}",
"def get_info():\n\n global DISKINFO\n DISKINFO = {}\n\n #Run diskutil list to get disk names.\n runcmd = subprocess.Popen(\"diskutil list -plist\", stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)\n\n #Get the output.\n stdout = runcmd.communicate()[0]\n\n #Parse the plist (Property List).\n global PLIST\n\n PLIST = plistlib.loads(stdout)\n\n #Find the disks.\n for disk in PLIST[\"AllDisks\"]:\n #Run diskutil info to get disk info.\n runcmd = subprocess.Popen(\"diskutil info -plist \"+disk, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)\n stdout = runcmd.communicate()[0]\n\n #Parse the plist (Property List).\n PLIST = plistlib.loads(stdout)\n\n #Check if the disk is a partition.\n disk_is_partition = is_partition(disk)\n\n if not disk_is_partition:\n #These are devices.\n get_device_info(disk)\n\n else:\n #These are Partitions. Fix for disks w/ more than 9 partitions.\n host_disk = \"/dev/\"+disk.split(\"s\")[0]+\"s\"+disk.split(\"s\")[1]\n get_partition_info(disk, host_disk)\n\n #Check we found some disks.\n if not DISKINFO:\n raise RuntimeError(\"No Disks found!\")",
"async def get_discovered_device_names(self):\n json = self._api_call(\"app/monitors/%s/devices\" % self.sense_monitor_id)\n self._devices = await [entry[\"name\"] for entry in json]\n return self._devices",
"def list_devices():\r\n DeviceManagerCLI.BuildDeviceList()\r\n return DeviceManagerCLI.GetDeviceList()",
"def list_devices(self):\n response = self.oauth.get(url=f'{self.base_url}/json/devices/list')\n\n result = response.json()['device']\n for device in result:\n print(device)",
"async def find_devices() -> List[DeviceInfo]:\n return await Discovery.search_devices()",
"def device_info(self) -> Dict[str, any]:\n return {\n \"identifiers\": {(DOMAIN, self.config_entry.entry_id)},\n \"name\": NAME,\n \"model\": VERSION,\n \"manufacturer\": NAME,\n }",
"def getDeviceList(self):\n return defer.succeed(self.discovered)",
"def _get_device_list(self):\n if self.app.config.cloud_type == 'ec2':\n # c5/m5 on AWS mounts EBS volumes as NVMe:\n # http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/nvme-ebs-volumes.html\n # http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html\n for itype in ['c5', 'm5']:\n if itype in self.app.cloud_interface.get_type():\n return frozenset(glob('/dev/nvme[0-26]n1'))\n return frozenset(glob('/dev/*d[a-z]'))",
"def get_ceph_drv_info():\n disks_info = []\n stat = psutil.disk_io_counters(perdisk=True)\n for drv in get_ceph_disk():\n info = CEPHDiskInfo(drv)\n disk = basename(drv)\n if disk in stat:\n info.rd_cnt = stat[disk].read_count\n info.wr_cnt = stat[disk].write_count\n info.rd_bytes = stat[disk].read_bytes\n info.wr_bytes = stat[disk].write_bytes\n info.rd_time = stat[disk].read_time\n info.wr_time = stat[disk].write_time\n\n disks_info.append(info)\n\n return disks_info",
"def get_devices_information():\n global nipper_xml\n devices = {}\n\n for device in nipper_xml.findall('./information/devices/device'):\n if DEBUG:\n print \"\\t\" + note + \"Name: %s\" % device.get('name')\n print \"\\t\" + note + \"Type: %s\" % device.get('type')\n print \"\\t\" + note + \"OS: %s\" % device.get('os')\n print \"\\t\" + note + \"OS Version: %s\" % device.get('osversion')\n devices[device.attrib.get('name')] = {'name': device.get('name'),\n 'type': device.get('type'),\n 'os': device.get('os'),\n 'osversion': device.get('osversion')}\n if DEBUG:\n print info + \"Device Object:\"\n print devices\n raw_input(warn + \"Press enter to continue\")\n return devices",
"def devices(self):\n return self._devices",
"def devices(self):\n return self._devices",
"def devices(self):\n return self._devices",
"def devices(self):\n return self._devices",
"def device_info(self):\n return {\n \"identifiers\": {(DOMAIN, self.coordinator.data[\"deviceID\"])},\n \"name\": self.coordinator.data[\"deviceName\"],\n \"manufacturer\": self.coordinator.data[\"deviceManufacturer\"],\n \"model\": self.coordinator.data[\"deviceModel\"],\n \"sw_version\": self.coordinator.data[\"appVersionName\"],\n }",
"def device_info(self):\n return {\n \"identifiers\": {(DOMAIN, self._room_id)},\n \"name\": self._room_name,\n \"manufacturer\": MANUFACTURER,\n \"model\": MODELS[self._module_type],\n }",
"def get_device_information(self):\n return self.mycam.devicemgmt.GetDeviceInformation()",
"def getDeviceInfo():\n url = \"https://api.roblox.com/reference/deviceinfo\"\n r = requests.get(url)\n j = json.loads(r.text)\n return j",
"def get_fields(self):\n return self._devices.keys()",
"async def async_get_attached_devices(self) -> list:\n if self._method_version == 1:\n return await self.hass.async_add_executor_job(\n self._api.get_attached_devices\n )\n\n return await self.hass.async_add_executor_job(self._api.get_attached_devices_2)",
"def get_devices():\n names = devices.list()\n if request.args.get('full') is not None:\n data = {d: devices.show(d) for d in names}\n else:\n data = names\n return jsonify({'devices': data})",
"def get_keys(self):\n with self.lock:\n return list(self.devices.keys())",
"def get_available_devices(self):\n available_devices = []\n try:\n out = self.get_output(\"devices\")\n except Exception as e:\n logger.error(e)\n else:\n for line in out:\n device = self.parse_device_info(line)\n if device:\n available_devices.append(device)\n return available_devices",
"def get_available_devices(self):\n try:\n out = self.get_output(\"devices\")\n except BluetoothctlError, e:\n print(e)\n return None\n else:\n available_devices = []\n for line in out:\n device = self.parse_device_info(line)\n if device:\n available_devices.append(device)\n\n return available_devices",
"async def async_get_devices(self) -> list[dict[str, Any]]:\n return await self.aiolivisi.async_get_devices()",
"def get_bootvar(self):\n module = 'bootimage/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n bootdefault = response.json()['bootimage']['oper']['hd-default']\n print(self.device + ' The device is set to boot from: ' + bootdefault + ' in the future')\n return bootdefault",
"def listUsbHidDevices():\n \n for d in hid.enumerate():\n keys = list(d.keys())\n keys.sort()\n for key in keys:\n print(\"%s : %s\" % (key, d[key]))\n print()",
"def get_devices(self): \n devices = []\n \n # get all the keys from the dictionary\n keys = self.SCPI_Data.keys()\n \n # extract the device specifier\n dev_keys = [key.split(':')[0] for key in keys]\n \n # iterate through the devices\n for key in dev_keys:\n if (key not in devices) and (key != 'SUP'):\n # this is a unique device, add it to the list\n devices = devices + [key]\n # end if\n # end for\n \n devices = devices + ['SIM']\n \n # replace the GPS if present with its longer name\n devices = ['GPSRM' if device == 'GPS' else device \n for device in devices]\n return devices",
"def get_devices():\n devices = []\n for path in hookenv.action_get('osd-devices').split(' '):\n path = path.strip()\n if not os.path.isabs(path):\n raise Error('{}: Not absolute path.'.format(path))\n devices.append(path)\n return devices",
"def get_devices():\n try:\n with open(DEVICES, 'r') as f:\n data = json.load(f)['devices']\n except (IOError, ValueError) as err:\n raise SwiftlmCheckFailure('Failure opening %s: %s' % (DEVICES, err))\n\n devices = []\n for d in data:\n l = d.get('label', LABEL_CHECK_DISABLED)\n devices.append(Device(\n device=d['name'],\n mount=MOUNT_PATH+d['swift_drive_name'],\n label=l\n ))\n\n return devices",
"def get_devices_summary():\n\n # This function was created to replace get_devices_information\n # because it wasn't detecting virtual systems in Palo Alto Virtual Systems\n global nipper_xml\n devices = {}\n headings = []\n\n # Add the table headings to a list\n for h in nipper_xml.findall(\"./summary/table/[@ref='SCOPE.AUDITDEVICELIST.TABLE']/headings/heading\"):\n if h not in headings:\n headings.append(h.text)\n\n for device in nipper_xml.findall(\"./summary/table/[@ref='SCOPE.AUDITDEVICELIST.TABLE']/tablebody/tablerow\"):\n values = []\n for i in device.findall('./tablecell/item'):\n if i not in values:\n values.append(i.text)\n if DEBUG:\n print \"\\t\" + note + \"%s: %s\" % (headings[headings.index('Name')], values[headings.index('Name')])\n print \"\\t\" + note + \"%s: %s\" % (headings[headings.index('Device')], values[headings.index('Device')])\n print \"\\t\" + note + \"%s: %s\" % (headings[headings.index('OS')], values[headings.index('OS')].split(\" \")[0])\n print \"\\t\" + note + \"%s: %s\" % (headings[headings.index('OS')], values[headings.index('OS')].split(\" \")[1])\n devices[values[headings.index('Name')]] = {'name': values[headings.index('Name')],\n 'type': values[headings.index('Device')],\n 'os': values[headings.index('OS')].split(' ')[0],\n 'osversion': values[headings.index('OS')].split(' ')[1]\n }\n\n if DEBUG:\n print info + \"Device Object:\"\n print devices\n raw_input(warn + \"Press enter to continue\")\n return devices",
"async def get_discovered_device_data(self):\n json = self._api_call(\"monitors/%s/devices\" % self.sense_monitor_id)\n return await json",
"def test_get_pci_device_list(self):\n pass",
"def bt_get_discovered_devices(self):\n discovered_bluetooth_device = []\n try:\n self.bt_radio('on')\n if '8.1' in self.phone_info.os_version:\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 10)\n self.find_element(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 2).click()\n time.sleep(10)\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_discovered_device_list,\n 10)\n element_list = self.find_elements(self.driver.appium_driver,\n self.bluetooth_discovered_device_list,\n 1)\n\n # To add connected bluetooth device name in list\n for index in range(len(element_list)):\n discovered_bluetooth_device.append(\n str(element_list[index].text.replace('\\u200e', '')))\n logger.debug(\"List of Discovered Devices:\" + str(\n discovered_bluetooth_device))\n except Exception as e:\n self.take_screenshot(self.driver.appium_driver,\n '__retry_to_bt_connect')\n logger.error(\"No device are discoverable .\")\n logger.error(repr(e))\n return discovered_bluetooth_device",
"def GetAllDevices(self):\n\n return list(self.YieldAllDevices())",
"def get_boot_order(rfo, api=1, unit=1):\n\n url = f\"/redfish/v{api}/systems/{unit}/bios\"\n res = rfo.get(url)\n if res.status != 200:\n print(f\"Error: {res.status}: {res.read}\")\n return \"XXX\"\n booturl = res.dict['Oem']['Hpe']['Links']['Boot']['@odata.id']\n res = rfo.get(booturl)\n if res.status != 200:\n print(f\"HTTP Fail Status: {res.status} - {res.read}\")\n return \"XXX\"\n return res.dict['DefaultBootOrder']",
"def gather_metric(self):\n device_dict = {}\n # Delete first and last line of output of adb.\n output = self._shell.run(self.COMMAND).stdout\n\n # Example Line, Device Serial Num TAB Phone Status\n # 00bd977c7f504caf\toffline\n if output:\n for line in output.split('\\n'):\n spl_line = line.split('\\t')\n # spl_line[0] is serial, [1] is status. See example line.\n device_dict[spl_line[0]] = spl_line[1]\n\n return {self.DEVICES: device_dict}",
"def preferred(self):\n\t\t\t#1: sort by memory size\t- use better smarts when needed\n\t\t\tdevices = sorted(self.devices(), reverse=True)\n\t\t\treturn devices",
"def devices(self):\n\n return self.__devices",
"def discovered_drones(self):\n return self.devices",
"def get_device_info(disk):\n\n host_disk = \"/dev/\"+disk\n DISKINFO[host_disk] = {}\n DISKINFO[host_disk][\"Name\"] = host_disk\n DISKINFO[host_disk][\"Type\"] = \"Device\"\n DISKINFO[host_disk][\"HostDevice\"] = \"N/A\"\n DISKINFO[host_disk][\"Partitions\"] = []\n DISKINFO[host_disk][\"Vendor\"] = get_vendor(disk)\n DISKINFO[host_disk][\"Product\"] = get_product(disk)\n DISKINFO[host_disk][\"RawCapacity\"], DISKINFO[host_disk][\"Capacity\"] = get_capacity()\n DISKINFO[host_disk][\"Description\"] = get_description(disk)\n DISKINFO[host_disk][\"Flags\"] = get_capabilities(disk)\n DISKINFO[host_disk][\"Partitioning\"] = get_partitioning(disk)\n DISKINFO[host_disk][\"FileSystem\"] = \"N/A\"\n DISKINFO[host_disk][\"UUID\"] = \"N/A\"\n DISKINFO[host_disk][\"ID\"] = get_id(disk)\n DISKINFO[host_disk][\"BootRecord\"], DISKINFO[host_disk][\"BootRecordStrings\"] = get_boot_record(disk)\n\n return host_disk",
"def device_list():\n click.echo(\"\\nRetrieving the devices.\")\n\n url = base_url + \"/device\"\n\n response = requests.get(url=url, headers=header,verify=False)\n if response.status_code == 200:\n items = response.json()['data']\n else:\n print(\"Failed to get list of devices \" + str(response.text))\n exit()\n\n headers = [\"Host-Name\", \"Device Type\", \"Device ID\", \"System IP\", \"Site ID\", \"Version\", \"Device Model\"]\n table = list()\n\n for item in items:\n tr = [item.get('host-name'), item.get('device-type'), item.get('uuid'), item.get('system-ip'), item.get('site-id'), item.get('version'), item.get('device-model')]\n table.append(tr)\n try:\n click.echo(tabulate.tabulate(table, headers, tablefmt=\"fancy_grid\"))\n except UnicodeEncodeError:\n click.echo(tabulate.tabulate(table, headers, tablefmt=\"grid\"))",
"def device_info(self) -> Optional[Dict[str, Any]]:\n return {ATTR_NAME: self.name, \"identifiers\": {(DOMAIN, self._device.device_id)}}",
"def devices_json():\n return [\n {\n \"macAddress\": \"84:F3:EB:21:90:C4\",\n \"lastData\": {\n \"dateutc\": 1546889640000,\n \"baromrelin\": 30.09,\n \"baromabsin\": 24.61,\n \"tempinf\": 68.9,\n \"humidityin\": 30,\n \"date\": \"2019-01-07T19:34:00.000Z\",\n },\n \"info\": {\"name\": \"Home\", \"location\": \"Home\"},\n },\n {\n \"macAddress\": \"84:F3:EB:21:90:C4\",\n \"lastData\": {\n \"dateutc\": 1546889640000,\n \"baromrelin\": 30.09,\n \"baromabsin\": 24.61,\n \"tempinf\": 68.9,\n \"humidityin\": 30,\n \"date\": \"2019-01-06T19:34:00.000Z\",\n },\n \"info\": {\"name\": \"Home\", \"location\": \"Home\"},\n },\n ]",
"def list_devices(cls):\n # get all matching devices\n return usb.core.find(\n find_all=True,\n custom_match=lambda dev: (\n dev.idVendor == cls.vendor_id and dev.idProduct in cls.product_ids\n ),\n )",
"def getDevices():\n \n scannedDevices = list()\n \n proc = subprocess.Popen('bluetoothctl scan on', shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=8192, universal_newlines=True)\n \n time.sleep(10)\n \n proc.stdin.write('scan off')\n \n try:\n stdout, stderr = proc.communicate()\n except subprocess.TimeoutExpired:\n proc.kill()\n stdout, stderr = proc.communicate()\n\n ansiEscapePattern = re.compile(r'\\x1B[@-_][0-?]*[ -/]*[@-~]')\n stdout = ansiEscapePattern.sub('', stdout)\n \n #deviceNamePattern = re.compile('^\\[NEW\\] Device [A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2} ')\n \n for line in stdout.split('\\n'):\n if '[NEW] Device' in line:\n device = list()\n device.append(line[13:31])\n device.append(line[31:])\n scannedDevices.append(device)\n \n return scannedDevices",
"def device_info(self):\n return {\n \"name\": get_device_name(self._data, 0),\n \"identifiers\": {(DOMAIN, get_identifier(self._data, 0))},\n \"manufacturer\": MANUFACTURER,\n \"model\": self._data.wiserhub.system.product_type,\n \"sw_version\": self._data.wiserhub.system.firmware_version,\n \"via_device\": (DOMAIN, self._data.wiserhub.system.name),\n }",
"def device_info(node):\n\n if \"cpu\" in node and \"total_mbufs\" in node[\"cpu\"]:\n total_mbufs = node[\"cpu\"][\"total_mbufs\"]\n if total_mbufs != 0:\n print(\"Total Number of Buffers: {}\".format(total_mbufs))\n\n vpp = VppPCIUtil(node)\n vpp.get_all_devices()\n linkup_devs = vpp.get_link_up_devices()\n if len(linkup_devs):\n print(\"\\nDevices with link up (can not be used with VPP):\")\n vpp.show_vpp_devices(linkup_devs, show_header=False)\n # for dev in linkup_devs:\n # print (\" \" + dev)\n kernel_devs = vpp.get_kernel_devices()\n if len(kernel_devs):\n print(\"\\nDevices bound to kernel drivers:\")\n vpp.show_vpp_devices(kernel_devs, show_header=False)\n else:\n print(\"\\nNo devices bound to kernel drivers\")\n\n dpdk_devs = vpp.get_dpdk_devices()\n if len(dpdk_devs):\n print(\"\\nDevices bound to DPDK drivers:\")\n vpp.show_vpp_devices(dpdk_devs, show_interfaces=True, show_header=False)\n else:\n print(\"\\nNo devices bound to DPDK drivers\")\n\n other_devs = vpp.get_other_devices()\n if len(other_devs):\n print(\"\\nDevices not bound to Kernel or DPDK drivers:\")\n vpp.show_vpp_devices(other_devs, show_interfaces=True, show_header=False)\n else:\n print(\"\\nNo devices not bound to Kernel or DPDK drivers\")\n\n vpputl = VPPUtil()\n interfaces = vpputl.get_hardware(node)\n if interfaces == {}:\n return\n\n print(\"\\nDevices in use by VPP:\")\n\n if len(interfaces.items()) < 2:\n print(\"None\")\n return\n\n print(\n \"{:30} {:4} {:4} {:7} {:4} {:7}\".format(\n \"Name\", \"Numa\", \"RXQs\", \"RXDescs\", \"TXQs\", \"TXDescs\"\n )\n )\n for intf in sorted(interfaces.items()):\n name = intf[0]\n value = intf[1]\n if name == \"local0\":\n continue\n numa = rx_qs = rx_ds = tx_qs = tx_ds = \"\"\n if \"numa\" in value:\n numa = int(value[\"numa\"])\n if \"rx queues\" in value:\n rx_qs = int(value[\"rx queues\"])\n if \"rx descs\" in value:\n rx_ds = int(value[\"rx descs\"])\n if \"tx queues\" in value:\n tx_qs = int(value[\"tx queues\"])\n if \"tx descs\" in value:\n tx_ds = int(value[\"tx descs\"])\n\n print(\n \"{:30} {:>4} {:>4} {:>7} {:>4} {:>7}\".format(\n name, numa, rx_qs, rx_ds, tx_qs, tx_ds\n )\n )",
"def device_info(self):\n return {\n \"name\": get_device_name(self._data, self._actuator.id),\n \"identifiers\": {(DOMAIN, get_identifier(self._data, self._actuator.id))},\n \"via_device\": (DOMAIN, self._data.wiserhub.system.name),\n }",
"def get_devices(self):\n\n \"\"\"\n # Note: This code is no longer required with the latest spt updates.\n # But that said, leaving for now so I don't risk breaking folks!\n if not self._use_lsscsi:\n message = \"Find Number of IOM's\"\n command = \"lsscsi | fgrep enclo | egrep 'HGST|WDC' | wc -l\"\n pdata = self._run_command(command=command, message=message, logger=self._logger, shell=True)\n ioms = (int)(pdata['stdout'].strip())\n if ioms > 1:\n self._use_lsscsi = True\n if not self._use_lsscsi and os.path.exists('/etc/multipath.conf'):\n self._use_lsscsi = True\n \"\"\"\n # Allow above logic or options to override lsscsi vs. spt usage.\n if not self._use_lsscsi or self._force_spt:\n self.get_devices_spt()\n else:\n self.get_devices_lsscsi()\n return",
"def get_device_info(platform_path: str):\n device_name = os.path.basename(platform_path)\n try:\n platform_file = next(\n glob.iglob(os.path.join(glob.escape(platform_path), 'hw', f'*.[xd]sa')))\n except StopIteration as e:\n raise ValueError('cannot find platform file for %s' % device_name) from e\n with zipfile.ZipFile(platform_file) as platform:\n # platform_file must end with .xsa or .dsa, thus [:-4]\n with platform.open(os.path.basename(platform_file)[:-4] +\n '.hpfm') as metadata:\n platform_info = ET.parse(metadata).find('./xd:component/xd:platformInfo',\n XILINX_XML_NS)\n if platform_info is None:\n raise ValueError('cannot parse platform')\n clock_period = platform_info.find(\n \"./xd:systemClocks/xd:clock/[@xd:id='0']\", XILINX_XML_NS)\n if clock_period is None:\n raise ValueError('cannot find clock period in platform')\n part_num = platform_info.find('xd:deviceInfo', XILINX_XML_NS)\n if part_num is None:\n raise ValueError('cannot find part number in platform')\n return {\n 'clock_period':\n clock_period.attrib['{{{xd}}}period'.format(**XILINX_XML_NS)],\n 'part_num':\n part_num.attrib['{{{xd}}}name'.format(**XILINX_XML_NS)]\n }",
"def get_devices(self):\n return get_devices(self.api_key)",
"def device_info(self) -> Dict[str, any]:\n return {\n \"identifiers\": {(DOMAIN, self._device.unique_id)},\n \"name\": self._device.name,\n \"manufacturer\": \"Apple\",\n \"model\": self._device.device_model,\n }",
"def do_list(self, _):\n devices = []\n for source in self._target.devices:\n devices.append({\n 'name': source.device['name'],\n 'path': source.device['path'],\n })\n return devices",
"def device_information(self):\n return self._device_information",
"def device_info(self):\n return {\n \"identifiers\": {(DOMAIN, self.unique_id)},\n \"manufacturer\": \"Somfy\",\n \"name\": self.name,\n \"model\": self.tahoma_device.widget,\n \"sw_version\": self.tahoma_device.type,\n }",
"def find_stick():\n out = subprocess.check_output(\n \"gdbus introspect --system --dest org.freedesktop.UDisks \"\n \"--object-path /org/freedesktop/UDisks/devices --recurse \"\n \"--only-properties\".split())\n devs = zip(*((re.match(r\".* = '?(.*?)'?;\", x).group(1)\n for x in out.splitlines()\n if \"DriveConnectionInterface =\" in x\n or \"DeviceIsPartition =\" in x\n or \"DeviceFile = \" in x),)*3)\n try:\n return next(dev[2] for dev in devs if dev[0] == 'usb'\n and dev[1] == 'true')\n except StopIteration:\n return None",
"def device_info(self):\n return {\n \"identifiers\": {(DOMAIN, self.device_id)},\n \"name\": self.name,\n \"manufacturer\": self.manufacturer,\n \"model\": self._device.device_model,\n \"sw_version\": \"\",\n \"via_device\": (DOMAIN, self._controller_ip),\n }",
"def uuid_table():\n device_table = popen(\"blkid\").read().splitlines()\n devices = {}\n for device in device_table:\n dev = device.split(\":\")[0].split(\"/\")[2]\n uuid = device.split('UUID=\"')[1].split('\"')[0]\n devices[dev] = uuid\n return devices",
"def _get_data(self):\n devices = []\n try:\n if not self.router_client.login():\n self.hass.states.set(f\"{DOMAIN}.statusmsg\", self.router_client.statusmsg)\n _LOGGER.warning(\"Login failed: {0}:{1}@{2}\".format(self.router_client.username, self.router_client.password,self.router_client.host))\n self.router_client.logout()\n return devices\n\n devices_json = self.router_client.get_devices_response()\n finally:\n self.router_client.logout()\n\n self.hass.states.set(f\"{DOMAIN}.scanning\", devices_json != False)\n\n if devices_json != False:\n for device in devices_json:\n # _LOGGER.debug(\"Device: {0}\".format(device))\n dev = Device(\n device['HostName'].replace('未知设备', 'Unknown'),\n device['IPAddress'],\n device['MACAddress'],\n device['Active'],\n ICONS.get(device['IconType'])\n )\n # _LOGGER.debug(\"Device: {0}\".format(dev))\n devices.append(dev)\n return devices\n else:\n return []",
"def device_info(self):\n info = {\n \"identifiers\": {\n (\n DOMAIN,\n \"serial-number\",\n self._ctrl.data[\"routerboard\"][\"serial-number\"],\n \"switch\",\n \"Queue\",\n )\n },\n \"manufacturer\": self._ctrl.data[\"resource\"][\"platform\"],\n \"model\": self._ctrl.data[\"resource\"][\"board-name\"],\n \"name\": f\"{self._inst} Queue\",\n }\n return info",
"def mpt():\n lbl_drives = ['device','mountpoint','fstype']\n disks = [d[0:3] for d in psutil.disk_partitions()]\n drives = [dict(zip(lbl_drives,ds)) for ds in disks]\n return [d['mountpoint']for d in drives]",
"def device_info(self):\n info = {\n \"identifiers\": {\n (\n DOMAIN,\n \"serial-number\",\n self._ctrl.data[\"routerboard\"][\"serial-number\"],\n \"switch\",\n \"Scripts\",\n )\n },\n \"manufacturer\": self._ctrl.data[\"resource\"][\"platform\"],\n \"model\": self._ctrl.data[\"resource\"][\"board-name\"],\n \"name\": f\"{self._inst} Scripts\",\n }\n return info",
"def get_mbed_devices(self):\n upper_ven = [ven.upper() for ven in self.usb_vendor_list]\n mounts_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, 'SYSTEM\\MountedDevices')\n for point, label, _ in self.iter_vals(mounts_key):\n printable_label = label.decode('utf-16le', 'ignore')\n if ('DosDevices' in point and\n any(v in printable_label.upper() for v in upper_ven)):\n logger.debug(\"Found Mount point %s with usb ID %s\",point,\n printable_label)\n yield (point, printable_label)\n else:\n logger.debug(\"Skipping Mount point %r label %r\", point, label)",
"def onboot_names(self):\n ext_names = []\n for ext in self.extensions.values():\n if not ext.onboot:\n continue\n ext_names.append(ext.name)\n return ', '.join(sorted(ext_names))",
"def getTopDevices(self):\n logger.debug('Getting the list of Top Devices...')\n elements = get_elements_by_css(\"a[data-query-prepend='device eq']\")\n devices = []\n for element in elements:\n devices.append(get_text(element))\n return devices"
] | [
"0.65933275",
"0.64047486",
"0.63290364",
"0.62824804",
"0.627704",
"0.6253434",
"0.6251941",
"0.62083673",
"0.62029696",
"0.6141751",
"0.61345845",
"0.6098163",
"0.60664326",
"0.60396963",
"0.6038037",
"0.6015366",
"0.6013724",
"0.6004768",
"0.6000633",
"0.5989116",
"0.5956543",
"0.59317744",
"0.5927542",
"0.5926935",
"0.59190327",
"0.5915958",
"0.5910873",
"0.5909324",
"0.59072244",
"0.59046733",
"0.5866258",
"0.58635867",
"0.584618",
"0.58455527",
"0.58325577",
"0.5828837",
"0.58150417",
"0.58025724",
"0.5798326",
"0.5795508",
"0.5771192",
"0.5762079",
"0.5735956",
"0.5731544",
"0.57299405",
"0.57299405",
"0.57299405",
"0.57299405",
"0.57275087",
"0.57245404",
"0.5721453",
"0.5712881",
"0.5706563",
"0.56992364",
"0.5698556",
"0.56934977",
"0.56920755",
"0.56884974",
"0.5682456",
"0.56768686",
"0.5670175",
"0.5663443",
"0.5661399",
"0.56610507",
"0.56545573",
"0.5650122",
"0.564699",
"0.56406933",
"0.5637491",
"0.56265527",
"0.5621051",
"0.5614758",
"0.5610592",
"0.5607792",
"0.56057245",
"0.5603859",
"0.56003135",
"0.55980086",
"0.5588501",
"0.5577672",
"0.5574143",
"0.55639887",
"0.55434865",
"0.5542609",
"0.55411345",
"0.5541047",
"0.55340147",
"0.5532096",
"0.5531457",
"0.55217695",
"0.5518222",
"0.5514601",
"0.5514572",
"0.5512918",
"0.5512896",
"0.5509623",
"0.5495411",
"0.549059",
"0.5490533",
"0.5483219"
] | 0.78109515 | 0 |
Get current persistent boot device set for the host | def get_persistent_boot_device(self):
system = self._get_host_details()
try:
# Return boot device if it is persistent.
if system['Boot']['BootSourceOverrideEnabled'] == 'Continuous':
device = system['Boot']['BootSourceOverrideTarget']
if device in DEVICE_RIS_TO_COMMON:
return DEVICE_RIS_TO_COMMON[device]
return device
except KeyError as e:
msg = "get_persistent_boot_device failed with the KeyError:%s"
raise exception.IloError((msg) % e)
# Check if we are in BIOS boot mode.
# There is no resource to fetch boot device order for BIOS boot mode
if not self._is_boot_mode_uefi():
return None
# Get persistent boot device order for UEFI
boot_sources, boot_devices = self._get_persistent_boot_devices()
boot_string = ""
try:
for source in boot_sources:
if (source["StructuredBootString"] == boot_devices[0]):
boot_string = source["BootString"]
break
except KeyError as e:
msg = "get_persistent_boot_device failed with the KeyError:%s"
raise exception.IloError((msg) % e)
if 'HP iLO Virtual USB CD' in boot_string:
return 'CDROM'
elif ('NIC' in boot_string or
'PXE' in boot_string or
"iSCSI" in boot_string):
return 'NETWORK'
elif common.isDisk(boot_string):
return 'HDD'
else:
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_persistent_boot_devices(self):\n # Check if the BIOS resource if exists.\n headers_bios, bios_uri, bios_settings = self._check_bios_resource()\n\n # Get the Boot resource.\n boot_settings = self._get_bios_boot_resource(bios_settings)\n\n # Get the BootSources resource\n try:\n boot_sources = boot_settings['BootSources']\n except KeyError:\n msg = (\"BootSources resource not found.\")\n raise exception.IloError(msg)\n\n try:\n boot_order = boot_settings['PersistentBootConfigOrder']\n except KeyError:\n msg = (\"PersistentBootConfigOrder resource not found.\")\n raise exception.IloCommandNotSupportedError(msg)\n\n return boot_sources, boot_order",
"def get_boot_device(self):\n root_vol = None\n boot_vol = None\n for volume in self.volumes:\n if not volume.partitions:\n continue\n for partition in volume.partitions:\n if partition.mount_point == \"/\":\n root_vol = volume\n elif partition.mount_point == '/boot':\n boot_vol = volume\n\n if not boot_vol:\n return root_vol\n return boot_vol",
"def get_bootvar(self):\n module = 'bootimage/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n bootdefault = response.json()['bootimage']['oper']['hd-default']\n print(self.device + ' The device is set to boot from: ' + bootdefault + ' in the future')\n return bootdefault",
"def get_ephemeral_dev(self):\n ephem_name = None\n dev_prefixs = ['s','v','xd','xvd']\n if not self.root_device_type == 'ebs':\n try:\n self.assertFilePresent('/dev/' + str(self.rootfs_device))\n return self.rootfs_device\n except:\n ephem_name = 'da'\n else:\n ephem_name = 'db'\n devs = self.get_dev_dir()\n for prefix in dev_prefixs:\n if str(prefix+ephem_name) in devs:\n return str('/dev/'+prefix+ephem_name)\n raise Exception('Could not find ephemeral device?')",
"def get_root_device():\r\n return utils.system_output('rootdev -s -d')",
"def get_boot_device(self):\n operation = 'get_boot_device'\n try:\n boot_device = self.sp_manager.get_boot_device()\n return boot_device\n except UcsException as ex:\n print(_(\"Cisco client exception: %(msg)s.\"), {'msg': ex})\n raise exception.UcsOperationError(operation=operation, error=ex)",
"def get_devices(self):\n\n md_configstore = os.path.join(\n os.environ['VOLTTRON_HOME'],\n \"configuration_store/platform.driver.store\"\n )\n\n if not os.path.exists(md_configstore):\n _log.debug(\"No master driver currently on this platform.\")\n return {}\n\n statinfo = os.stat(md_configstore)\n\n if self._master_driver_stat_time is None or \\\n self._master_driver_stat_time != statinfo.st_mtime:\n self._master_driver_stat_time = statinfo.st_mtime\n\n # else no change in the md file and we have the same stat time.\n else:\n keys = list(self._devices.keys())\n\n for k in keys:\n new_key = self.get_renamed_topic(k)\n if new_key != k:\n self._devices[new_key] = self._devices[k]\n del self._devices[k]\n\n return self._devices\n\n _log.debug('Getting devices')\n config_list = self.vip.rpc.call(CONFIGURATION_STORE,\n 'manage_list_configs',\n 'platform.driver').get(timeout=5)\n\n _log.debug('Config list is: {}'.format(config_list))\n devices = defaultdict(dict)\n\n for cfg_name in config_list:\n # Skip as we are only looking to do devices in this call.\n if not cfg_name.startswith('devices/'):\n continue\n\n device_config = self.vip.rpc.call('config.store', 'manage_get',\n 'platform.driver',\n cfg_name,\n raw=False).get(timeout=5)\n _log.debug('DEVICE CONFIG IS: {}'.format(device_config))\n\n reg_cfg_name = device_config.get(\n 'registry_config')[len('config://'):]\n _log.debug('Reading registry_config file {}'.format(\n reg_cfg_name\n ))\n registry_config = self.vip.rpc.call('config.store',\n 'manage_get', 'platform.driver',\n reg_cfg_name,\n raw=False).get(timeout=5)\n _log.debug('Registry Config: {}'.format(registry_config))\n\n points = []\n for pnt in registry_config:\n points.append(pnt['Volttron Point Name'])\n\n devices[cfg_name]['points'] = points\n\n return devices",
"def get_one_time_boot(self):\n system = self._get_host_details()\n try:\n if system['Boot']['BootSourceOverrideEnabled'] == 'Once':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n else:\n # value returned by RIBCL if one-time boot setting are absent\n return 'Normal'\n\n except KeyError as e:\n msg = \"get_one_time_boot failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)",
"def get_boot_driver(self):\n return self._boot_driver",
"def _get_device_list(self):\n if self.app.config.cloud_type == 'ec2':\n # c5/m5 on AWS mounts EBS volumes as NVMe:\n # http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/nvme-ebs-volumes.html\n # http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html\n for itype in ['c5', 'm5']:\n if itype in self.app.cloud_interface.get_type():\n return frozenset(glob('/dev/nvme[0-26]n1'))\n return frozenset(glob('/dev/*d[a-z]'))",
"def boot_configuration(self):\n bootconfs = self.get_logical_configuration(gdef.BOOT_LOG_CONF)\n if not bootconfs:\n return bootconfs\n assert len(bootconfs) == 1 # Only one boot configuration can exist for each device instance.\n return bootconfs[0]",
"def get_boot_record(disk):\n\n #TODO\n return \"Unknown\", \"Unknown\"",
"def getbootinfo(self):\n self.mount()\n kernel = None\n inits = []\n for line in self.xlist(\"get-bootinfo\", IBASE)[1]:\n if line.startswith('+++'):\n kernel = line.split()[1]\n else:\n inits.append(line)\n self.unmount()\n if not inits:\n run_error(_(\"No initramfs found\"))\n return None\n if not kernel:\n run_error(_(\"GRUB problem:\\n\") + inits[0])\n return None\n return (kernel, inits)",
"def device(self):\n hw = self.hw()\n if hw: return hw.device()",
"def get_device(l):\n if not l.device:\n l.device = find_device()\n setup_device(l.device)\n return l.device",
"def find_stick():\n out = subprocess.check_output(\n \"gdbus introspect --system --dest org.freedesktop.UDisks \"\n \"--object-path /org/freedesktop/UDisks/devices --recurse \"\n \"--only-properties\".split())\n devs = zip(*((re.match(r\".* = '?(.*?)'?;\", x).group(1)\n for x in out.splitlines()\n if \"DriveConnectionInterface =\" in x\n or \"DeviceIsPartition =\" in x\n or \"DeviceFile = \" in x),)*3)\n try:\n return next(dev[2] for dev in devs if dev[0] == 'usb'\n and dev[1] == 'true')\n except StopIteration:\n return None",
"def get_device(self):\n return self.parent.get_device()",
"def get_device_param(host_id):\n global sqlalche_obj\n sqlalche_obj.sql_alchemy_db_connection_open()\n device_list_param = []\n device_list_param = sqlalche_obj.session.query(\n Hosts.ip_address, Hosts.mac_address, Hosts.device_type_id, Hosts.config_profile_id).filter(Hosts.host_id == host_id).all()\n if device_list_param == None:\n device_list_param = []\n sqlalche_obj.sql_alchemy_db_connection_close()\n return device_list_param",
"def collect_existing_mounts():\n result = {}\n for mount in sh.mount().stdout.decode('utf-8').splitlines():\n tokens = mount.split()\n if tokens[1] == 'on' and tokens[0].startswith('/dev/'):\n device = tokens[0][5:]\n result[tokens[2]] = device\n return result",
"def device(self):\n return self._vars[0].device",
"def _get_system_hardware(self):\n return self._get_system_status()[\"hardware\"]",
"def list_devices(self):\n return [x for x in self.devices.keys()]",
"def load_devices():",
"def finddevice():\n\n return next((device for device in [\"xpu\"] if hasattr(torch, device) and getattr(torch, device).is_available()), None)",
"def get_device_list_by_path(self):\n by_path_dir = \"/dev/disk/by-path/\"\n disk_list = os.listdir(by_path_dir)\n usb_set = set()\n for device in disk_list:\n if device.find(\"usb\") != -1:\n path = os.readlink(by_path_dir + device)\n abs_path = os.path.abspath(by_path_dir + path)\n usb_set.add(abs_path)\n return usb_set",
"def detect_host(self):\n try:\n host_list = run(f\"{self.path} devices\", capture_output=True).stdout.decode('utf-8').split('\\r\\n')[-3:0:-1]\n except Exception as e:\n raise ADBError(e)\n host_list = [host.replace('\\tdevice', '') for host in host_list]\n if not host_list:\n raise ADBError(f'Cannot detect any running emulator')\n else:\n return host_list",
"def get_devices():\n devices, errors = [], []\n\n for path in hookenv.action_get('devices').split(' '):\n path = path.strip()\n if not os.path.isabs(path):\n errors.append('{}: Not absolute path.'.format(path))\n elif not os.path.exists(path):\n errors.append('{}: Device does not exist.'.format(path))\n else:\n devices.append(path)\n\n if errors:\n raise ZapDiskError(\", \".join(errors))\n\n return devices",
"def device(self):\n return next(self.parameters()).device",
"def device(self):\n return next(self.parameters()).device",
"def device(self):\n return next(self.parameters()).device",
"def device(self):\n return next(self.parameters()).device",
"def device(self):\n return next(self.parameters()).device",
"def getCurrentRootPartition(disk):\n mountDir = mount(\"%s2\" % disk)\n if mountDir is None:\n return None\n\n path = os.path.join(mountDir, \"etc/yaboot.conf\")\n if not os.path.exists(path):\n return None\n\n fd = open(path)\n content = fd.read()\n fd.close()\n\n rootPartition = re.findall(\"root = .*\", content)[0].split(\"=\")[1].strip()\n\n if not umount(mountDir):\n return None\n\n return rootPartition",
"async def device_fixture(hass: HomeAssistant, ufp: MockUFPFixture):\n\n await init_entry(hass, ufp, [])\n\n device_registry = dr.async_get(hass)\n\n return list(device_registry.devices.values())[0]",
"def is_booted_storage_device(disk):\n cmdline = (\"grep -w /ahcexport /proc/mounts | cut -d ' ' -f 1 | \"\n \"sed -e 's/[0-9]*//g'\")\n if '/dev/' not in disk:\n disk = '/dev/%s' % disk\n grep_cmd = subprocess.Popen(cmdline,\n shell=True, stdout=subprocess.PIPE)\n for booted_disk in grep_cmd.stdout:\n booted_disk = booted_disk.decode(errors='ignore')\n booted_disk = booted_disk.rstrip('\\n').strip()\n if booted_disk == disk:\n return True\n return False",
"def get_devices(self):\n devices = self.get(\"event/device\")",
"def devices(self):\n return self._recordings.keys()",
"def main_device(self):\n return self._main_device",
"def get_device(self):\n raise NotImplementedError()",
"def devices_dict(self):\n return self.devices.dict",
"def device(self):\n\n\t\treturn self._device",
"def global_device():\n global _DEVICE\n return _DEVICE",
"def _get_device(node):\n\n vpp = VppPCIUtil(node)\n vpp.get_all_devices()\n\n # Save the device information\n node[\"devices\"] = {}\n node[\"devices\"][\"dpdk_devices\"] = vpp.get_dpdk_devices()\n node[\"devices\"][\"kernel_devices\"] = vpp.get_kernel_devices()\n node[\"devices\"][\"other_devices\"] = vpp.get_other_devices()\n node[\"devices\"][\"linkup_devices\"] = vpp.get_link_up_devices()",
"def get_device(self):\n addr = self.address\n servers = [server for server in pyrax.cloudservers.list()\n if addr in server.networks.get(\"private\", \"\")]\n try:\n return servers[0]\n except IndexError:\n return None",
"def get_devices(self):\n devices = []\n for i in self.devices:\n devices.append(self.devices[i])\n\n return devices",
"def _get_bios_boot_resource(self, data):\n try:\n boot_uri = data['links']['Boot']['href']\n except KeyError:\n msg = ('Boot resource not found.')\n raise exception.IloCommandNotSupportedError(msg)\n\n status, headers, boot_settings = self._rest_get(boot_uri)\n\n if status != 200:\n msg = self._get_extended_error(boot_settings)\n raise exception.IloError(msg)\n\n return boot_settings",
"def get_boot_device(self, task):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n return super(IRMCManagement, self).get_boot_device(task)\n else:\n return super(\n ipmitool.IPMIManagement, self).get_boot_device(task)",
"def load_device():",
"def devices(self):\n return self._devices",
"def devices(self):\n return self._devices",
"def devices(self):\n return self._devices",
"def devices(self):\n return self._devices",
"def get_devices(self):\n return self.api_request('GET', self.url + '/device', {})",
"def devices(self):\n\n return self.__devices",
"def get_devices_spt(self):\n\n #import pdb; pdb.set_trace()\n if self._drives or self.firmware_version or self.product_name or self.vendor_name or \\\n self.serial_number or self.target_port:\n user_options = True\n else:\n user_options = False\n try:\n # Note: Extra logic to optimize spt device directory scanning.\n if not user_options:\n if self._include_enclosures:\n message = \"Find SCSI Devices\"\n command = \"{tool} show devices dtype=direct,hostmanaged,enclosure\".format(tool=self.tool)\n else:\n message = \"Find SCSI Disk Drives\"\n command = \"{tool} show devices dtype=direct,hostmanaged\".format(tool=self.tool)\n # Use common execute below.\n else:\n # Request enclosures separately.\n if self._include_enclosures:\n message = \"Find SCSI Enclosures\"\n command = \"{tool} show devices dtype=enclosure ofmt=json\".format(tool=self.tool)\n pdata = self._run_command(command=command, message=message,\n logger=self._logger, shell=False, expected_failure=True)\n if pdata['exit_code'] == self.EXIT_STATUS_SUCCESS and pdata['stdout']:\n devices = json.loads(pdata['stdout'])\n self.parse_devices_spt(devices)\n\n message = \"Find SCSI Disk Drives\"\n # Selective drives or all direct access (disk drives).\n if self._drives:\n command = \"{tool} show edt dtype=direct,hostmanaged devices={drives}\"\\\n .format(tool=self.tool, drives=\",\".join(self._drives))\n else:\n command = \"{tool} show devices dtype=direct,hostmanaged\".format(tool=self.tool)\n # Apply optional parameters.\n if self.product_name:\n command += \" pid={product}\".format(product=self.product_name)\n if self.vendor_name:\n command += \" vid={vendor}\".format(vendor=self.vendor_name)\n if self.serial_number:\n command += \" serial={serial}\".format(serial=self.serial_number)\n if self.target_port:\n command += \" tport={target}\".format(target=self.target_port)\n if self.firmware_version:\n command += \" fw_version={firmware}\".format(firmware=self.firmware_version)\n\n # Add common spt options, we want JSON output!\n if self._exclude:\n command += \" exclude={drives}\".format(drives=\",\".join(self._exclude))\n command += \" ofmt=json\"\n # Finally, execute spt and parse its' JSON output (if any).\n pdata = self._run_command(command=command, message=message,\n logger=self._logger, shell=False, expected_failure=True)\n # spt emits warning status (1) and no JSON output if no devices found.\n if pdata['exit_code'] == self.EXIT_STATUS_SUCCESS and pdata['stdout']:\n devices = json.loads(pdata['stdout'])\n self.parse_devices_spt(devices)\n\n except RuntimeError as exc:\n self._logger.error(\"Failed to acquire SCSI devices: {0}\".format(exc))\n raise exc\n\n except ValueError as exc:\n self._logger.error(\"Failed to parse spts' JSON output: {0}\".format(exc))\n raise exc",
"def create_boot_dev(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_CreateBootDev', self.handle))",
"def get_devices(adb=DEFAULT_ADB):\n # Check that adb is running\n Device.__start_adb(adb)\n # Split by newline and remove first line (\"List of devices attached\")\n # TODO: surround with try/except?\n devices = subprocess.check_output(\n [adb, \"devices\", \"-l\"]).decode().split('\\n')[1:]\n tmp = {}\n for dev in devices:\n if dev:\n tmp[dev.split()[0]] = dev\n return tmp",
"def udev(self):\n return self._udev",
"def get_device_file_dict():\n cmd = 'lshw -class disk'\n desc = \"description\"\n log_name = \"logical name\"\n serial = \"serial\"\n\n dev = []\n dev_list = []\n\n ret, output, err = run_gluster_command(cmd)\n output = output.decode('ASCII')\n dev_info = output.split('\\n')\n for line in dev_info:\n if re.search(desc, line):\n if dev:\n dev_list.append(dev)\n\n dev = []\n if re.search(log_name, line) or re.search(serial, line):\n temp = line.split(':')\n temp[1] = temp[1].strip(' ')\n dev.append(temp[1])\n dev_list.append(dev)\n for line in dev_list:\n print(line)",
"def device(self):\n return self.share.device",
"def get_disks():\n\n if system() != \"Windows\":\n raise OSError(\"For use with Windows platforms.\")\n\n logicaldisks=run(\n [\"wmic\", \"logicaldisk\", \"get\", \"name\"],\n capture_output=True\n )\n\n return findall(\"[A-Z]:\", str(logicaldisks.stdout))",
"def get_disks(self):\n result = {}\n\n exp = self.config['devices']\n reg = re.compile(exp)\n fs_types = set(self.config['fs_types'].split(','))\n\n try:\n fp = open('/proc/mounts')\n for line in fp:\n columns = line.split()\n device = columns[0].strip('/').replace('dev/','',1)\n mount_point = columns[1]\n fs_type = columns[2]\n\n if not reg.match(device):\n continue\n\n if fs_type not in fs_types:\n continue\n\n result[device] = mount_point\n except Exception as e:\n self.log.debug('Could not read /proc/mounts!')\n self.log.exception(e)\n finally:\n fp.close()\n return result",
"def get_devices():\n global managed_objects\n global devices_by_adr\n \n devices_by_adr = {}\n \n r = re.compile(\"\\/org\\/bluez\\/hci\\d*\\/dev\\_(.*)\")\n # e.g., match a string like this:\n # /org/bluez/hci0/dev_58_C9_35_2F_A1_EF\n \n for key, value in managed_objects.items():\n # print(\"key=\", key)\n m = r.match(key)\n if m is not None:\n dev_str = m.group(1) # we have a device string!\n # print(\"dev_str=\", dev_str)\n # let's flatten that dict a bit\n devices_by_adr[dev_str] = value[\"org.bluez.Device1\"]",
"def get_platform():\n query = {\n \"type\": \"config\",\n \"action\": \"get\",\n \"xpath\": \"/config/devices/entry[@name='localhost.localdomain']/platform\",\n }\n\n return __proxy__[\"panos.call\"](query)",
"def lv_devices(self):\n devs = set()\n return devs",
"def get_backend_disk(self, disk):\n backend_vm = self.get_backend_vm(disk.vm)\n for device in backend_vm.config.hardware.device:\n if (\n isinstance(device, vim.VirtualDisk)\n and str(device.key) == disk.backend_id\n ):\n return device",
"def set_one_time_boot(self, device, mac=None):\n self._update_persistent_boot([device], persistent=False, mac=mac)",
"def test_get_bios_boot_mode_list(self):\n pass",
"def device(self):\n return self._device",
"def devices(self):\n return {k:v for k, v in self._data.items() \n if v[\"type\"] == \"DEVICE\"}",
"def get_devices():\n devices = []\n for device_id in range(pm.lib.Pm_CountDevices()):\n devices.append(DeviceInfo(device_id))\n\n return devices",
"def GetBootDisk(self) -> 'AZComputeDisk':\n # pylint: disable=line-too-long\n disks = self.az_account.compute.ListDisks(\n resource_group_name=self.resource_group_name) # type: Dict[str, AZComputeDisk]\n # pylint: enable=line-too-long\n boot_disk_name = self.compute_client.virtual_machines.get(\n self.resource_group_name, self.name).storage_profile.os_disk.name\n if boot_disk_name not in disks:\n raise errors.ResourceNotFoundError(\n 'Boot disk not found for instance {0:s}'.format(self.resource_id),\n __name__)\n return disks[boot_disk_name]",
"def get_mbeds(self):\n mbeds = []\n for mbed in self.get_mbed_devices():\n mountpoint = re.match('.*\\\\\\\\(.:)$', mbed[0]).group(1)\n logger.debug('Registry mountpoint %s', mountpoint)\n\n if self._mount_point_exists(mountpoint):\n # TargetID is a hex string with 10-48 chars\n m = re.search('[&#]([0-9A-Za-z]{10,48})[&#]', mbed[1])\n if not m:\n continue\n tid = m.group(1)\n mbeds += [(mountpoint, tid)]\n logger.debug(\"get_mbeds mount_point %s usb_id %s\", mountpoint, tid)\n return mbeds",
"async def get_device_boottime_hostname(self):\n\n if self.transport == 'https':\n cmdlist = [\"show version\", \"show hostname\"]\n else:\n cmdlist = [\"show version|json\", \"show hostname|json\"]\n await self.exec_cmd(self._parse_boottime_hostname, cmdlist, None)",
"def get_device_info(disk):\n\n host_disk = \"/dev/\"+disk\n DISKINFO[host_disk] = {}\n DISKINFO[host_disk][\"Name\"] = host_disk\n DISKINFO[host_disk][\"Type\"] = \"Device\"\n DISKINFO[host_disk][\"HostDevice\"] = \"N/A\"\n DISKINFO[host_disk][\"Partitions\"] = []\n DISKINFO[host_disk][\"Vendor\"] = get_vendor(disk)\n DISKINFO[host_disk][\"Product\"] = get_product(disk)\n DISKINFO[host_disk][\"RawCapacity\"], DISKINFO[host_disk][\"Capacity\"] = get_capacity()\n DISKINFO[host_disk][\"Description\"] = get_description(disk)\n DISKINFO[host_disk][\"Flags\"] = get_capabilities(disk)\n DISKINFO[host_disk][\"Partitioning\"] = get_partitioning(disk)\n DISKINFO[host_disk][\"FileSystem\"] = \"N/A\"\n DISKINFO[host_disk][\"UUID\"] = \"N/A\"\n DISKINFO[host_disk][\"ID\"] = get_id(disk)\n DISKINFO[host_disk][\"BootRecord\"], DISKINFO[host_disk][\"BootRecordStrings\"] = get_boot_record(disk)\n\n return host_disk",
"def devices(self):\n return self.enumerate_devices()",
"def bootpart(disks):\n return path_to_partition(disks, '/boot/foo')",
"def get_available_devices(self):\n try:\n out = self.get_output(\"devices\")\n except BluetoothctlError, e:\n print(e)\n return None\n else:\n available_devices = []\n for line in out:\n device = self.parse_device_info(line)\n if device:\n available_devices.append(device)\n\n return available_devices",
"def get_device_information(self):\n return self.mycam.devicemgmt.GetDeviceInformation()",
"def find_device():\n device = usb.core.find(\n idVendor=LuxaforFlag.DEVICE_VENDOR_ID,\n idProduct=LuxaforFlag.DEVICE_PRODUCT_ID\n )\n return device",
"def mpt():\n lbl_drives = ['device','mountpoint','fstype']\n disks = [d[0:3] for d in psutil.disk_partitions()]\n drives = [dict(zip(lbl_drives,ds)) for ds in disks]\n return [d['mountpoint']for d in drives]",
"def get_bootarch(self):\n return self._bootarch",
"def get_devices(self):\n return get_devices(self.api_key)",
"def devices(self):\n return self._sdk_dependencies.device_client",
"def getDeviceList(self):\n return defer.succeed(self.discovered)",
"def get_block_device_list(vars = {}, log = sys.stderr):\n\n # make sure we can access to the files/directories in /proc\n if not os.access(PROC_PARTITIONS_PATH, os.F_OK):\n return None\n\n # table with valid scsi/sata/ide/raid block device names\n valid_blk_names = {}\n # add in valid sd and hd block device names\n for blk_prefix in ('sd','hd'):\n for blk_num in map (\\\n lambda x: chr(x), range(ord('a'),ord('z')+1)):\n devicename=\"%s%c\" % (blk_prefix, blk_num)\n valid_blk_names[devicename]=None\n\n # add in valid scsi raid block device names\n for M in range(0,1+1):\n for N in range(0,7+1):\n devicename = \"cciss/c%dd%d\" % (M,N)\n valid_blk_names[devicename]=None\n\n for devicename in valid_blk_names.keys():\n # devfs under 2.4 (old boot cds) used to list partitions\n # in a format such as scsi/host0/bus0/target0/lun0/disc\n # and /dev/sda, etc. were just symlinks\n try:\n devfsname= os.readlink( \"/dev/%s\" % devicename )\n valid_blk_names[devfsname]=None\n except OSError:\n pass\n\n # only do this once every system boot\n if not os.access(DEVICES_SCANNED_FLAG, os.R_OK):\n\n # this is ugly. under devfs, device\n # entries in /dev/scsi/.. and /dev/ide/...\n # don't show up until you attempt to read\n # from the associated device at /dev (/dev/sda).\n # so, lets run sfdisk -l (list partitions) against\n # most possible block devices, that way they show\n # up when it comes time to do the install.\n devicenames = valid_blk_names.keys()\n devicenames.sort()\n for devicename in devicenames:\n os.system( \"sfdisk -l /dev/%s > /dev/null 2>&1\" % devicename )\n\n # touch file\n fb = open(DEVICES_SCANNED_FLAG,\"w\")\n fb.close()\n\n devicelist= {}\n\n partitions_file= file(PROC_PARTITIONS_PATH,\"r\")\n line_count= 0\n for line in partitions_file:\n line_count= line_count + 1\n\n # skip the first two lines always\n if line_count < 2:\n continue\n\n parts= string.split(line)\n\n if len(parts) < 4:\n continue\n\n device= parts[3]\n\n # skip and ignore any partitions\n if not valid_blk_names.has_key(device):\n continue\n\n try:\n major= int(parts[0])\n minor= int(parts[1])\n blocks= int(parts[2])\n except ValueError, err:\n continue\n\n gb_size= blocks/BLOCKS_PER_GB\n\n # check to see if the blk device is readonly\n try:\n # can we write to it?\n dev_name= \"/dev/%s\" % device\n fb = open(dev_name,\"w\")\n fb.close()\n readonly=False\n except IOError, e:\n # check if EROFS errno\n if errno.errorcode.get(e.errno,None) == 'EROFS':\n readonly=True\n else:\n # got some other errno, pretend device is readonly\n readonly=True\n\n devicelist[dev_name]= {'major': major,'minor': minor,'blocks': blocks, 'size': gb_size, 'readonly': readonly}\n return devicelist",
"def get_all_devices(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_GetAllDevices', self.handle))",
"def devmounted(device):\n ProcMounts.initialize()\n for mount in ProcMounts._mounts:\n if mount['fs_spec'] == device:\n return mount\n return None",
"def get_configuration(self):\n\t\tdevice = DeviceBase(self.name)\n\n\t\tif len(self.master_url) > 0:\n\t\t\tdevice.master_url = self.master_url\n\t\t\tr = requests.get(self.master_url + '/configuration/' + self.name)\n\n\t\t\tif r.status_code == 200:\n\t\t\t\ttry:\n\t\t\t\t\t#Request success\n\t\t\t\t\tconfig = json.loads(r.text)\n\t\t\t\t\tif config['deviceType'] == 1:\n\t\t\t\t\t\t\"\"\" HID Reader \"\"\"\n\t\t\t\t\t\tdevice = HIDReader(self.name)\n\t\t\t\t\tif config['deviceType'] == 2:\n\t\t\t\t\t\t\"\"\" ZK45Reader \"\"\"\n\t\t\t\t\t\tdevice = ZK45Reader(self.name)\n\t\t\t\t\tif config['deviceType'] == 4:\n\t\t\t\t\t\t\"\"\" ZFM20Reader \"\"\"\n\t\t\t\t\t\tdevice = ZFM20Reader(self.name)\n\t\t\t\t\tif config['deviceType'] == 5:\n\t\t\t\t\t\t\"\"\" IOEcho \"\"\"\n\t\t\t\t\t\tdevice = IOEcho(name=self.name, pin_and_label_matrix='')\n\t\t\t\t\telif config['deviceType'] == 0:\n\t\t\t\t\t\t\"\"\" None \"\"\"\n\t\t\t\t\t\tdevice = DeviceBase(name=self.name)\n\t\t\t\t\telse:\n\t\t\t\t\t\t\"\"\" Disable \"\"\"\n\t\t\t\t\t\tdevice = DeviceBase(self.name)\n\n\t\t\t\t\tdevice.zone_id = config['zone']\n\n\t\t\t\t\tdevice.is_zone_enabled = config['enabled']\n\t\t\t\t\tdevice.is_zone_day_time_only = config['dayTimeOnly']\n\t\t\t\t\tdevice.is_configuration_loaded = True\n\n\t\t\t\t\tdevice.master_secret = config['secret']\n\t\t\t\t\tdevice.master_url = self.master_url\n\n\t\t\t\t\tdevice.is_in_error = False\n\t\t\t\t\tdevice.error_status = \"OK\"\n\t\t\t\t\tdevice.type = config['deviceType']\n\n\t\t\t\t\tprint(\"Configuration loaded.\")\n\t\t\t\texcept Exception as e:\n\t\t\t\t\terror_message = \"Device type not supported by current platform. Configuration aborted. (\" + str(e) + \")\"\n\t\t\t\t\tprint(error_message)\n\t\t\t\t\tdevice.zone_id = 1\n\n\t\t\t\t\tdevice.is_zone_enabled = False\n\t\t\t\t\tdevice.is_zone_day_time_only = False\n\t\t\t\t\tdevice.is_in_error = True\n\t\t\t\t\tdevice.error_status = error_message\n\t\t\telse:\n\t\t\t\tprint(\"Configuration loading failed. (Server response : \" + str(r.status_code) + \")\")\n\t\t\t\tdevice.zone_id = 1\n\t\t\t\tdevice.is_zone_enabled = False\n\t\t\t\tdevice.is_zone_day_time_only = False\n\t\t\t\tdevice.is_in_error = True\n\t\t\t\tdevice.error_status = \"Configuration loading failed. (Server response : \" + str(r.status_code) + \")\"\n\t\telse:\n\t\t\tself.zone_id = 1\n\t\t\tself.is_zone_enabled = True\n\t\t\tself.is_zone_day_time_only = True\n\t\t\tdevice.is_in_error = True\n\t\t\tdevice.error_status = \"No master URL defined\"\n\n\t\tdevice.report_state()\n\t\treturn device",
"def select_host_characteristics(self):\n return IMPL.select_host_characteristics()",
"def set_boot_device(self, device, persistent=False):\n\n operation = \"set_boot_device\"\n try:\n self.sp_manager.create_boot_policy()\n self.sp_manager.set_boot_device(device)\n\n except UcsException as ex:\n raise exception.UcsOperationError(operation=operation, error=ex)",
"def loopdev(diskimg):\n result = subprocess.run(\n ['losetup', '--all', '--list', '--json'], check=True, capture_output=True)\n for ld in json.loads(result.stdout.decode())['loopdevices']:\n if ld['back-file'] == diskimg:\n return ld['name']\n return None",
"def current(self):\n with driver.get_active_context() as ac:\n devnum = ac.devnum\n if devnum is not None:\n return self[devnum]",
"def trusted_devices(self):\n request = self.session.get(\n f\"{self.SETUP_ENDPOINT}/listDevices\", params=self.params\n )\n return request.json().get(\"devices\")",
"def getemu(self):\n return self.emu",
"def getDevices(i):\n devices = Account['KTFLR'].devices('monpressprod')\n device = devices[i]\n return device",
"def device_config(self):\n\t\ttry:\n\t\t\treturn self._dev\n\t\texcept:\n\t\t\treturn 0",
"def device():\n return G.DEVICE",
"def getDevices(self):\n\n devices = None\n\n for i in range(3):\n devices = subprocess.check_output(\"adb devices -l\", creationflags=self.createNoWindow)\n\n devices = devices.decode()\n deviceModel = re.findall(\"model:(.*) device\", devices)\n deviceID = re.findall(r\"(\\S+) {2}\", devices, flags=re.IGNORECASE)\n\n return deviceModel, deviceID",
"def get_my_mac():\n\n mac_set = get_my_mac_set(iface_filter=get_default_route()[1])\n return mac_set.pop()"
] | [
"0.7050934",
"0.6595496",
"0.63762164",
"0.63091874",
"0.62937915",
"0.6123704",
"0.6111399",
"0.60594696",
"0.5930392",
"0.5907175",
"0.5856273",
"0.57696676",
"0.574115",
"0.5735435",
"0.569995",
"0.5656506",
"0.56381845",
"0.56327116",
"0.56295407",
"0.5619154",
"0.5611484",
"0.558969",
"0.5581483",
"0.55683184",
"0.5529376",
"0.55206436",
"0.55151314",
"0.551312",
"0.551312",
"0.551312",
"0.551312",
"0.551312",
"0.55046946",
"0.5502852",
"0.5485409",
"0.5483247",
"0.5482557",
"0.5478948",
"0.54783106",
"0.5478077",
"0.54748404",
"0.5472111",
"0.5470991",
"0.54678524",
"0.5446326",
"0.5438904",
"0.5433738",
"0.5428476",
"0.54122335",
"0.54122335",
"0.54122335",
"0.54122335",
"0.54092693",
"0.5402858",
"0.53980404",
"0.5395681",
"0.5392518",
"0.5390345",
"0.53881276",
"0.53845185",
"0.5384188",
"0.5383734",
"0.53829455",
"0.53810775",
"0.5369885",
"0.53665346",
"0.53619844",
"0.53525275",
"0.5351262",
"0.53437126",
"0.53424084",
"0.5340769",
"0.5337119",
"0.5335177",
"0.5334186",
"0.5317124",
"0.5309832",
"0.5305273",
"0.53014994",
"0.5300276",
"0.52799916",
"0.52688223",
"0.5261522",
"0.5260528",
"0.52603877",
"0.52569294",
"0.5253903",
"0.5253594",
"0.52525043",
"0.5252106",
"0.52494514",
"0.52411383",
"0.5239091",
"0.5237199",
"0.52324486",
"0.52228826",
"0.52152866",
"0.52145493",
"0.52111536",
"0.52102077"
] | 0.65442973 | 2 |
Changes the persistent boot device order in BIOS boot mode for host | def _update_persistent_boot(self, device_type=[], persistent=False,
mac=None):
tenure = 'Once'
new_device = device_type[0]
# If it is a standard device, we need to convert in RIS convention
if device_type[0].upper() in DEVICE_COMMON_TO_RIS:
new_device = DEVICE_COMMON_TO_RIS[device_type[0].upper()]
if persistent:
tenure = 'Continuous'
systems_uri = "/rest/v1/Systems/1"
# Need to set this option first if device is 'UefiTarget'
if new_device is 'UefiTarget':
if not mac:
msg = ('Mac is needed for iscsi uefi boot')
raise exception.IloInvalidInputError(msg)
headers, bios_uri, bios_settings = self._check_bios_resource()
# Get the Boot resource and Mappings resource.
boot_settings = self._get_bios_boot_resource(bios_settings)
StructuredBootString = None
for boot_setting in boot_settings['BootSources']:
if(mac.upper() in boot_setting['UEFIDevicePath'] and
'iSCSI' in boot_setting['UEFIDevicePath']):
StructuredBootString = boot_setting['StructuredBootString']
break
if not StructuredBootString:
msg = ('MAC provided is Invalid "%s"' % mac)
raise exception.IloInvalidInputError(msg)
new_boot_settings = {}
new_boot_settings['Boot'] = {'UefiTargetBootSourceOverride':
StructuredBootString}
status, headers, response = self._rest_patch(systems_uri, None,
new_boot_settings)
if status >= 300:
msg = self._get_extended_error(response)
raise exception.IloError(msg)
new_boot_settings = {}
new_boot_settings['Boot'] = {'BootSourceOverrideEnabled': tenure,
'BootSourceOverrideTarget': new_device}
status, headers, response = self._rest_patch(systems_uri, None,
new_boot_settings)
if status >= 300:
msg = self._get_extended_error(response)
raise exception.IloError(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_update_bios_boot_mode(self):\n pass",
"def test_patch_bios_boot_mode(self):\n pass",
"def set_boot_order(profile_obj):\n status = True\n logger._log_to_console_and_log_file(\"\")\n logger._log_to_console_and_log_file(\"### Testing the 'Boot Settings' session ###\")\n logger._log_to_console_and_log_file(\"- Select the 'Legacy BIOS' mode\")\n createprofile_elements = ProfileContainer(ProfileContainerType.ADD)\n __select_value_from_a_profile_combo_box(createprofile_elements.ID_COMBO_PROFILE_BOOT_MODE, createprofile_elements.ID_COMBO_PROFILE_BOOT_MODE_LIST % \"Legacy BIOS\")\n # Set invalid values\n logger._log_to_console_and_log_file(\"Testing using invalid values\")\n for profile in profile_obj:\n items = [[\"CD\", profile.cd], [\"USB\", profile.usb], [\"HardDisk\", profile.harddisk]]\n for data in items:\n ui_lib.wait_for_element_and_input_text(\"name=%s\" % data[0], data[1])\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_CREATE_SERVER_PROFILE_FORM)\n if data[0] == \"HardDisk\":\n data[0] = \"Hard Disk\"\n if ui_lib.wait_for_element_text(FusionServerProfilesPage.ID_BOOT_ORDER_POSITION % data[0], data[1], timeout=1):\n logger._log_to_console_and_log_file(\"- \" + \"'\" + data[0] + \"'\" + \" field was not cleared to the default value and persisted as '\" + str(data[1]) + \"'\")\n status = False\n else:\n logger._log_to_console_and_log_file(\"- \" + \"'\" + data[0] + \"'\" + \" field was correctly cleared to the default value\")\n return status",
"def set_boot_device(self, device, persistent=False):\n\n operation = \"set_boot_device\"\n try:\n self.sp_manager.create_boot_policy()\n self.sp_manager.set_boot_device(device)\n\n except UcsException as ex:\n raise exception.UcsOperationError(operation=operation, error=ex)",
"def set_one_time_boot(self, device, mac=None):\n self._update_persistent_boot([device], persistent=False, mac=mac)",
"def boot(self):\n\n pass",
"def boot(self, boot):\n\n self._boot = boot",
"def set_boot_device(self, task, device, persistent=False):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified.\") % device)\n\n uefi_mode = (\n boot_mode_utils.get_boot_mode(task.node) == 'uefi')\n\n # disable 60 secs timer\n timeout_disable = \"0x00 0x08 0x03 0x08\"\n ipmitool.send_raw(task, timeout_disable)\n\n # note(naohirot):\n # Set System Boot Options : ipmi cmd '0x08', bootparam '0x05'\n #\n # $ ipmitool raw 0x00 0x08 0x05 data1 data2 0x00 0x00 0x00\n #\n # data1 : '0xe0' persistent + uefi\n # '0xc0' persistent + bios\n # '0xa0' next only + uefi\n # '0x80' next only + bios\n # data2 : boot device defined in the dict _BOOTPARAM5_DATA2\n\n bootparam5 = '0x00 0x08 0x05 %s %s 0x00 0x00 0x00'\n if persistent:\n data1 = '0xe0' if uefi_mode else '0xc0'\n else:\n data1 = '0xa0' if uefi_mode else '0x80'\n data2 = _BOOTPARAM5_DATA2[device]\n\n cmd8 = bootparam5 % (data1, data2)\n ipmitool.send_raw(task, cmd8)\n else:\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified. \"\n \"Current iRMC firmware condition doesn't support IPMI \"\n \"but Redfish.\") % device)\n super(ipmitool.IPMIManagement, self).set_boot_device(\n task, device, persistent)",
"def bootloader() -> NoReturn:",
"def step7(self):\n for indx, mr in enumerate(self.mrs):\n self.log.info(\"Set boot drive on controller:%d\"\n % (mr.ctrl_id))\n for vd in self.mr_vds[indx]:\n if (int(mr.cli.bootdrive_vd_get()) != vd):\n mr.cli.bootdrive_vd_set(vd_id=self.mr_vds[indx][indx],\n setting=\"On\")\n break",
"def _boot_using_bootmon(self, target):\n self.logger.debug('Booting using bootmon.')\n\n try:\n self._wait_for_vemsd_mount(target, timeout=20)\n except DeviceError:\n # OK, something's wrong. Reboot the board and try again.\n self.logger.debug('VEMSD not mounted, attempting to power cycle device.')\n target.sendline(' ')\n state = target.expect(['Cmd> ', self.config.bootmon_prompt, self.android_prompt]) # pylint: disable=E1101\n\n if state == 0 or state == 1:\n # Reboot - Bootmon\n target.sendline('reboot')\n target.expect('Powering up system...')\n elif state == 2:\n target.sendline('reboot -n')\n target.expect('Powering up system...')\n else:\n raise DeviceError('Unexpected board state {}; should be 0, 1 or 2'.format(state))\n\n self._wait_for_vemsd_mount(target)\n\n self._setup_before_reboot()\n\n # Reboot - Bootmon\n self.logger.debug('Rebooting into bootloader...')\n open(os.path.join(self.config.root_mount, 'reboot.txt'), 'a').close()\n target.expect('Powering up system...')\n target.expect(self.config.bootmon_prompt)\n\n # Wait for VEMSD to mount\n self._wait_for_vemsd_mount(target)\n\n #Boot Linux - Bootmon\n target.sendline('fl linux fdt ' + self.config.dtb)\n target.expect(self.config.bootmon_prompt)\n target.sendline('fl linux initrd ' + self.config.initrd)\n target.expect(self.config.bootmon_prompt)\n #Workaround TC2 bootmon serial issue for loading large initrd blob\n target.sendline(' ')\n target.expect(self.config.bootmon_prompt)\n target.sendline('fl linux boot ' + self.config.kernel + self.config.kernel_arguments)",
"def update_sdcard_boot_commands(device):\n mount_dir = mkdtemp()\n\n boot_partition = device.partitions(full_paths=True)[0]\n\n mount_command = ['sudo', 'mount', boot_partition, mount_dir]\n\n print(f'Mounting SD Card partition {boot_partition} to temp directory {mount_dir}')\n interactive_console(mount_command)\n\n # Note- this sed command is what the target mounts will look like\n # I'm not messing with the blk_ids of our devices as we know them\n # here.\n\n sed_command = [\n 'sudo',\n 'sed',\n '-i',\n '-E',\n 's#root=[^ ]+#root=/dev/sda2#',\n os.path.join(mount_dir, 'cmdline.txt')]\n console(sed_command)\n sed_command = [\n 'sudo',\n 'sed',\n '-i',\n 's# init=/usr/lib/raspi-config/init_resize.sh##',\n os.path.join(mount_dir, 'cmdline.txt')]\n\n print('Modifying init command line')\n console(sed_command)\n\n print('Successfully modified! Unmounting.')\n umount_command = ['sudo', 'umount', mount_dir]\n interactive_console(umount_command)\n\n print('Cleaning up mounted dir')\n os.rmdir(mount_dir)",
"def set_pending_boot_mode(self, boot_mode):\n boot_mode = boot_mode.lower()\n if boot_mode not in ['uefi', 'legacy']:\n msg = 'Invalid Boot mode specified'\n raise exception.IloInvalidInputError(msg)\n\n boot_properties = {'BootMode': boot_mode}\n\n if boot_mode == 'legacy':\n boot_properties['BootMode'] = 'LegacyBios'\n else:\n # If Boot Mode is 'Uefi' set the UEFIOptimizedBoot first.\n boot_properties['UefiOptimizedBoot'] = \"Enabled\"\n\n # Change the Boot Mode\n self._change_bios_setting(boot_properties)",
"def _setBootable(self, bootable):\n if self.partedPartition:\n if arch.isS390():\n return\n if self.flagAvailable(parted.PARTITION_BOOT):\n if bootable:\n self.setFlag(parted.PARTITION_BOOT)\n else:\n self.unsetFlag(parted.PARTITION_BOOT)\n else:\n raise errors.DeviceError(\"boot flag not available for this partition\", self.name)\n\n self._bootable = bootable\n else:\n self.req_bootable = bootable",
"def boot(self):\n\t\tmesslen, received = self.socket.send('bootm\\r', 25)\t\t\n\t\treturn None",
"def uefi_reorder_loaders(grubcfg, target):\n if grubcfg.get('reorder_uefi', True):\n efi_output = util.get_efibootmgr(target)\n currently_booted = efi_output.get('current', None)\n boot_order = efi_output.get('order', [])\n if currently_booted:\n if currently_booted in boot_order:\n boot_order.remove(currently_booted)\n boot_order = [currently_booted] + boot_order\n new_boot_order = ','.join(boot_order)\n LOG.debug(\n \"Setting currently booted %s as the first \"\n \"UEFI loader.\", currently_booted)\n LOG.debug(\n \"New UEFI boot order: %s\", new_boot_order)\n with util.ChrootableTarget(target) as in_chroot:\n in_chroot.subp(['efibootmgr', '-o', new_boot_order])\n else:\n LOG.debug(\"Skipped reordering of UEFI boot methods.\")\n LOG.debug(\"Currently booted UEFI loader might no longer boot.\")",
"def _get_persistent_boot_devices(self):\n # Check if the BIOS resource if exists.\n headers_bios, bios_uri, bios_settings = self._check_bios_resource()\n\n # Get the Boot resource.\n boot_settings = self._get_bios_boot_resource(bios_settings)\n\n # Get the BootSources resource\n try:\n boot_sources = boot_settings['BootSources']\n except KeyError:\n msg = (\"BootSources resource not found.\")\n raise exception.IloError(msg)\n\n try:\n boot_order = boot_settings['PersistentBootConfigOrder']\n except KeyError:\n msg = (\"PersistentBootConfigOrder resource not found.\")\n raise exception.IloCommandNotSupportedError(msg)\n\n return boot_sources, boot_order",
"def power_on_post_boot(self):\n self.log.info(\"post-power-up for boot to flashed image\")\n with self.console_takeover() as (descr, log):\n e = pexpect.fdpexpect.fdspawn(descr, logfile = log, timeout = 20)\n ttbl.target.expect_send_sequence(\n self.log, e, self.kernel_boot_cmd_list)\n # From here on, the console returns to normal",
"def update_persistent_boot(self, device_type=[], mac=None):\n # Check if the input is valid\n for item in device_type:\n if item.upper() not in DEVICE_COMMON_TO_RIS:\n raise exception.IloInvalidInputError(\"Invalid input. Valid \"\n \"devices: NETWORK, HDD,\"\n \" ISCSI or CDROM.\")\n\n self._update_persistent_boot(device_type, persistent=True, mac=mac)",
"def make_BootSettings(order, manageBoot=False):\n return {'manageBoot': manageBoot,\n 'order': order\n }",
"def setBootargs(self):\n\t\tif self.testType == 'auto' or self.testType == 'manual':\n\t\t\tself.bootargs = self.settings.getKeyValue('nfs.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<nfsroot>', self.nfsroot)\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\n\t\telse:\n\t\t\tself.bootargs = self.settings.getKeyValue('ramdisk.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\t\t\n\t\treturn None",
"def setup_device(device):\n try:\n # Gets around \"Resource busy\" errors\n device.detach_kernel_driver(0)\n except Exception:\n pass\n device.set_configuration()",
"def test_get_bios_boot_mode_list(self):\n pass",
"def get_persistent_boot_device(self):\n system = self._get_host_details()\n try:\n # Return boot device if it is persistent.\n if system['Boot']['BootSourceOverrideEnabled'] == 'Continuous':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n except KeyError as e:\n msg = \"get_persistent_boot_device failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)\n\n # Check if we are in BIOS boot mode.\n # There is no resource to fetch boot device order for BIOS boot mode\n if not self._is_boot_mode_uefi():\n return None\n\n # Get persistent boot device order for UEFI\n boot_sources, boot_devices = self._get_persistent_boot_devices()\n\n boot_string = \"\"\n try:\n for source in boot_sources:\n if (source[\"StructuredBootString\"] == boot_devices[0]):\n boot_string = source[\"BootString\"]\n break\n except KeyError as e:\n msg = \"get_persistent_boot_device failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)\n\n if 'HP iLO Virtual USB CD' in boot_string:\n return 'CDROM'\n\n elif ('NIC' in boot_string or\n 'PXE' in boot_string or\n \"iSCSI\" in boot_string):\n return 'NETWORK'\n\n elif common.isDisk(boot_string):\n return 'HDD'\n\n else:\n return None",
"def flashUboot(self):\n\t\tif self.settings.getKeyValue('flash.uboot?') == 'y':\n\t\t\tloadAddress = self.settings.getKeyValue('u-boot.flash.address')\n\t\t\tcmd = self.settings.getKeyValue('u-boot.load.command')\n\t\t\tcmd = cmd.replace('<u-boot>', 'u-boot.bin.12x.2430')\n\t\t\tself.socket.send(cmd, 5)\n\t\t\t#self.socket.send('protect off 1:0-1\\r', 2)\n\t\t\t#self.socket.send('erase 1:0-1\\r', 2)\n\t\t\t#self.socket.send('cp.b 80000000 %s 2ffff\\r' % loadAddress)\n\t\t\treturn None\n\t\t\t#cmd = cmd.replace('<u-bootloadadress>', self.u-bootloadaddress)",
"def reset_secure_boot_keys(self):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('ResetToDefaultKeys', True)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def wait_boot(self, value: int) -> None:\n self._data[ATTR_WAIT_BOOT] = value",
"def bootpart(disks):\n return path_to_partition(disks, '/boot/foo')",
"def swap(name, persist=True, config=\"/etc/fstab\"):\n ret = {\"name\": name, \"changes\": {}, \"result\": True, \"comment\": \"\"}\n on_ = __salt__[\"mount.swaps\"]()\n\n if __salt__[\"file.is_link\"](name):\n real_swap_device = __salt__[\"file.readlink\"](name)\n if not real_swap_device.startswith(\"/\"):\n real_swap_device = \"/dev/{}\".format(os.path.basename(real_swap_device))\n else:\n real_swap_device = name\n\n if real_swap_device in on_:\n ret[\"comment\"] = \"Swap {} already active\".format(name)\n elif __opts__[\"test\"]:\n ret[\"result\"] = None\n ret[\"comment\"] = \"Swap {} is set to be activated\".format(name)\n else:\n __salt__[\"mount.swapon\"](real_swap_device)\n\n on_ = __salt__[\"mount.swaps\"]()\n\n if real_swap_device in on_:\n ret[\"comment\"] = \"Swap {} activated\".format(name)\n ret[\"changes\"] = on_[real_swap_device]\n else:\n ret[\"comment\"] = \"Swap {} failed to activate\".format(name)\n ret[\"result\"] = False\n\n if persist:\n device_key_name = \"device\"\n if \"AIX\" in __grains__[\"os\"]:\n device_key_name = \"dev\"\n if \"/etc/fstab\" == config:\n # Override default for AIX\n config = \"/etc/filesystems\"\n fstab_data = __salt__[\"mount.filesystems\"](config)\n else:\n fstab_data = __salt__[\"mount.fstab\"](config)\n if __opts__[\"test\"]:\n if name not in fstab_data and name not in [\n fstab_data[item][\"device\"] for item in fstab_data\n ]:\n ret[\"result\"] = None\n if name in on_:\n ret[\n \"comment\"\n ] = \"Swap {} is set to be added to the fstab and to be activated\".format(\n name\n )\n return ret\n\n if \"none\" in fstab_data:\n if (\n fstab_data[\"none\"][device_key_name] == name\n and fstab_data[\"none\"][\"fstype\"] != \"swap\"\n ):\n return ret\n\n if \"AIX\" in __grains__[\"os\"]:\n out = None\n ret[\"result\"] = False\n ret[\"comment\"] += \". swap not present in /etc/filesystems on AIX.\"\n return ret\n else:\n # present, new, change, bad config\n # Make sure the entry is in the fstab\n out = __salt__[\"mount.set_fstab\"](\n \"none\", name, \"swap\", [\"defaults\"], 0, 0, config\n )\n if out == \"present\":\n return ret\n if out == \"new\":\n ret[\"changes\"][\"persist\"] = \"new\"\n ret[\"comment\"] += \". Added new entry to the fstab.\"\n return ret\n if out == \"change\":\n ret[\"changes\"][\"persist\"] = \"update\"\n ret[\"comment\"] += \". Updated the entry in the fstab.\"\n return ret\n if out == \"bad config\":\n ret[\"result\"] = False\n ret[\"comment\"] += \". However, the fstab was not found.\"\n return ret\n return ret",
"def _turn_on_dev_mode(self):\n if self._device is not None:\n self._char_write(self._BLE_SERVICE_ANTI_DOS,\n [ord(c) for c in self._ANTI_DOS_MESSAGE])\n self._char_write(self._BLE_SERVICE_TX_POWER,\n [self._TX_POWER_VALUE])\n # Sending 0x01 to the wake service wakes the sphero.\n self._char_write(self._BLE_SERVICE_WAKE, [0x01])",
"def boot():\r\n print \"\"\"\r\n ###### ## ## ### ## ## ## ## ######## ########\r\n ## ## ## ## ## ## ### ## ## ## ## ## ##\r\n ## #### ## ## #### ## ## ## ## ## ##\r\n ## ## ## ## ## ## ## ## ## ######## ######\r\n ## ## ######### ## #### ## ## ## ## ##\r\n ## ## ## ## ## ## ### ## ## ## ## ##\r\n ###### ## ## ## ## ## ####### ## ## ########\r\n\r\n Version %s-%s\r\n\r\n Multi Purpose Artificial Inelegance Program\r\n Copyright (c) Alexandre Gauthier 2010-2011\r\n All Rights Reserved\r\n \"\"\" % ( constants.VERSION, constants.TAGNAME )\r\n\r\n # Initialize log\r\n # TODO: The values should be read from config file.\r\n log.init_log('cyanure.log', 'DEBUG')\r\n\r\n logger.info(\"Cyanure system init: Version %s (%s)\" % (\r\n constants.VERSION, constants.TAGNAME ))",
"def set_boot_options(self, image_name, **vendor_specifics):\n current_boot = self.show(\"show running-config | inc ^boot system \")\n file_system = vendor_specifics.get(\"file_system\")\n if file_system is None:\n file_system = self._get_file_system()\n\n file_system_files = self.show(f\"dir {file_system}\")\n if re.search(image_name, file_system_files) is None:\n log.error(\"Host %s: File not found error for image %s.\", self.host, image_name)\n raise NTCFileNotFoundError(\n # TODO: Update to use hostname\n hostname=self.host,\n file=image_name,\n directory=file_system,\n )\n\n current_images = current_boot.splitlines()\n commands_to_exec = [f\"no {image}\" for image in current_images]\n commands_to_exec.append(f\"boot system {file_system}/{image_name}\")\n self.config(commands_to_exec)\n\n self.save()\n if self.boot_options[\"sys\"] != image_name:\n log.error(\"Host %s: Setting boot command did not yield expected results\", self.host)\n raise CommandError(\n command=f\"boot system {file_system}/{image_name}\",\n message=\"Setting boot command did not yield expected results\",\n )\n\n log.info(\"Host %s: boot options have been set to %s\", self.host, image_name)",
"def bootup(debug_port, lines):\n lines.skip_until(\"Booting...\")\n lines.skip_until(\"Loading blocks...\")\n lines.skip_until(\"Starting user space\")\n authenticate(debug_port, lines)\n lines.expect_next(\"Enter command\")",
"def software_load(self, filename: str) -> None:\n pass # Most boards can use serialboot.",
"def boot_config():\n # quick check to grab a config file from /boot partition.\n # this function helps users who cannot SSH/access the Pi,\n # but can access the microSD card\n if os.path.exists(BOOT_CONFIG_PATH):\n print(\"Configuration loaded from /boot directory.\")\n with open(BOOT_CONFIG_PATH) as boot_file:\n with open(CONFIG_FILE_PATH, 'w+') as config_file:\n for line in boot_file:\n config_file.write(line)",
"def load_devices():",
"def default_device_names_for_instance(self,\n instance,\n root_device_name,\n *block_device_lists):\n self.prep_for_spawn(context=None, instance=instance)",
"def step4(self):\n for mr in self.mrs:\n self.log.info(\"Boot drive of controller: %d is %d\"\n % (mr.ctrl_id, mr.cli.bootdrive_vd_get()))",
"def set_bootloader_mode(self, mode):\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 'B')",
"def reboot_fpga(self):\n log.info(\"Booting FPGA from SPI prom\")\n self.set(\"FPGA_CTRL\", \"boot_fpga\", 1);",
"def magma_device_sync():\n\n _libmagma.magma_device_sync()",
"def create_boot_dev(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_CreateBootDev', self.handle))",
"def load_device():",
"def zap(dev):\n try:\n LOG.debug('Zapping partition table on %s', dev)\n\n # try to wipe out any GPT partition table backups. sgdisk\n # isn't too thorough.\n lba_size = 4096\n size = 33 * lba_size\n with file(dev, 'wb') as dev_file:\n dev_file.seek(-size, os.SEEK_END)\n dev_file.write(size*'\\0')\n\n command_check_call(\n [\n 'sgdisk',\n '--zap-all',\n '--',\n dev,\n ],\n )\n command_check_call(\n [\n 'sgdisk',\n '--clear',\n '--mbrtogpt',\n '--',\n dev,\n ],\n )\n\n update_partition('-d', dev, 'zapped')\n\n except subprocess.CalledProcessError as e:\n raise Error(e)",
"def get_one_time_boot(self):\n system = self._get_host_details()\n try:\n if system['Boot']['BootSourceOverrideEnabled'] == 'Once':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n else:\n # value returned by RIBCL if one-time boot setting are absent\n return 'Normal'\n\n except KeyError as e:\n msg = \"get_one_time_boot failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)",
"def set_bios_bootmode_uefi(ip, login_account, login_password, system_id):\n result = {}\n login_host = \"https://\" + ip\n try:\n # Connect using the BMC address, account name, and password\n # Create a REDFISH object\n REDFISH_OBJ = redfish.redfish_client(base_url=login_host, username=login_account, timeout=utils.g_timeout,\n password=login_password, default_prefix='/redfish/v1', cafile=utils.g_CAFILE)\n # Login into the server and create a session\n REDFISH_OBJ.login(auth=utils.g_AUTH)\n except:\n traceback.print_exc()\n result = {'ret': False, 'msg': \"Please check the username, password, IP is correct\"}\n return result\n\n # GET the ComputerSystem resource\n system = utils.get_system_url(\"/redfish/v1\", system_id, REDFISH_OBJ)\n if not system:\n result = {'ret': False, 'msg': \"This system id is not exist or system member is None\"}\n REDFISH_OBJ.logout()\n return result\n for i in range(len(system)):\n system_url = system[i]\n response_system_url = REDFISH_OBJ.get(system_url, None)\n if response_system_url.status != 200:\n error_message = utils.get_extended_error(response_system_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (system_url, response_system_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else:\n # Get the bios resource\n bios_url = response_system_url.dict['Bios']['@odata.id']\n response_bios_url = REDFISH_OBJ.get(bios_url, None)\n if response_bios_url.status != 200:\n error_message = utils.get_extended_error(response_bios_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (bios_url, response_bios_url.status, error_message)}\n REDFISH_OBJ.logout()\n return result\n else: # Get bios success\n # Seek boot mode from bios attributes\n attribute_bootmode = None\n attributes = response_bios_url.dict['Attributes']\n for attribute in attributes:\n if attribute == \"BootMode\" or attribute == \"SystemBootMode\":\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"SystemBootMode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n for attribute in attributes:\n if \"Boot\" in attribute and \"Mode\" in attribute:\n attribute_bootmode = attribute\n break\n if attribute_bootmode == None:\n result = {'ret': False, 'msg': \"Can not found BootMode attribute in response of url %s\" %(bios_url)}\n REDFISH_OBJ.logout()\n return result\n\n # Get boot mode setting guide from bios registry\n WarningText = None\n ValueName = None\n bios_registry_url = \"/redfish/v1/Registries/\" + response_bios_url.dict['AttributeRegistry']\n response_bios_registry_url = REDFISH_OBJ.get(bios_registry_url, None)\n if response_bios_registry_url.status == 200:\n locations = response_bios_registry_url.dict['Location']\n bios_regjson_url = None\n for location in locations:\n if 'en' in location['Language']:\n bios_regjson_url = location['Uri']\n break\n if bios_regjson_url:\n response_bios_regjson_url = REDFISH_OBJ.get(bios_regjson_url, None)\n if response_bios_regjson_url.status == 200:\n regattributes = response_bios_regjson_url.dict['RegistryEntries']['Attributes']\n for regattribute in regattributes:\n if regattribute['AttributeName'] == attribute_bootmode:\n if 'WarningText' in regattribute:\n WarningText = regattribute['WarningText']\n for value in regattribute['Value']:\n if 'legacy' in value['ValueName'].lower():\n continue\n if 'uefi' in value['ValueName'].lower():\n ValueName = value['ValueName']\n break\n ValueName = value['ValueName']\n break\n \n # Perform patch to set\n if ValueName == None:\n ValueName = \"UEFIMode\"\n pending_url = response_bios_url.dict['@Redfish.Settings']['SettingsObject']['@odata.id']\n parameter = {attribute_bootmode: ValueName}\n attribute = {\"Attributes\": parameter}\n headers = {\"If-Match\": '*'}\n response_pending_url = REDFISH_OBJ.patch(pending_url, body=attribute, headers=headers)\n if response_pending_url.status in [200,204]:\n if WarningText:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful. WarningText: %s'% (WarningText) }\n else:\n result = {'ret': True, 'msg': 'set bios bootmode uefi successful'}\n elif response_pending_url.status == 405:\n result = {'ret': False, 'msg': \"Resource not supported\"}\n else:\n error_message = utils.get_extended_error(response_pending_url)\n result = {'ret': False, 'msg': \"Url '%s' response Error code %s \\nerror_message: %s\" % (\n pending_url, response_pending_url.status, error_message)}\n\n # Logout of the current session\n try:\n REDFISH_OBJ.logout()\n except:\n pass\n return result",
"def test_get_bios_boot_mode_by_moid(self):\n pass",
"def set_bootloader_mode(self, mode):\n self.check_validity()\n\n mode = int(mode)\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 9, 'B')",
"def set_boot_mode(self, task, mode):\n raise exception.UnsupportedDriverExtension(\n driver=task.node.driver, extension='set_boot_mode')",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version|json\", \"show hostname\"], None,\n 'mixed')",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"cat /proc/uptime\", \"hostnamectl\",\n \"cat /etc/os-release\"], None, 'text')",
"def last_boot(self, value: datetime) -> None:\n self._data[ATTR_LAST_BOOT] = value.isoformat()",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version\"], None)",
"def reset(self,bootloader=False):\n self.send_packet('\\xff' if bootloader else '\\xfe')",
"def unselect_and_select_boot_order():\n # Unselect and select the \"Manage boot order\" option\n selenium2lib = ui_lib.get_s2l()\n status = True\n logger._log_to_console_and_log_file(\"\")\n logger._log_to_console_and_log_file(\"Unselecting and selecting the 'Manage boot order' checkbox\")\n ui_lib.wait_for_checkbox_and_unselect(FusionServerProfilesPage.ID_CHKBOX_MANAGE_BOOT_ORDER)\n if not ui_lib.wait_for_element_visible(\"name=%s\" % \"CD\") and ui_lib.wait_for_element_visible(\"name=%s\" % \"USB\") and ui_lib.wait_for_element_visible(\"name=%s\" % \"HardDisk\"):\n logger._log_to_console_and_log_file(\"- 'Manage boot order' items were correctly hidden\")\n else:\n logger._log_to_console_and_log_file(\"- 'Manage boot order' items are still being displayed\")\n selenium2lib.capture_page_screenshot()\n status = False\n ui_lib.wait_for_checkbox_and_select(FusionServerProfilesPage.ID_CHKBOX_MANAGE_BOOT_ORDER)\n if ui_lib.wait_for_element_visible(\"name=%s\" % \"CD\") and ui_lib.wait_for_element_visible(\"name=%s\" % \"USB\") and ui_lib.wait_for_element_visible(\"name=%s\" % \"HardDisk\"):\n logger._log_to_console_and_log_file(\"- 'Manage boot order' items were correctly displayed\")\n else:\n logger._log_to_console_and_log_file(\"- 'Manage boot order' items were NOT displayed\")\n selenium2lib.capture_page_screenshot()\n status = False\n return status",
"def usb_setup():\n print(\"Warning: using deprecated usb_setup routine!\")\n largest = largest_partition()\n medium = medium_partition()\n smallest = smallest_partition()\n\n print(\"Starting USB installation\")\n print(\"Using {} as archive storage\".format(largest))\n print(\"Using {} as volatile storage\".format(medium))\n print(\"Using {} as important storage\".format(smallest))\n\n lncm_usb = \"/usr/local/sbin/lncm-usb\"\n\n cli_invocation = [\n lncm_usb,\n largest,\n medium,\n smallest,\n get_uuid(largest),\n get_uuid(medium),\n get_uuid(smallest),\n str(largest_part_size()),\n ]\n\n call(cli_invocation)",
"def boot_linux(self, rootfs=None, bootargs=\"\"):\n common.print_bold(\"\\n===== Booting linux for %s =====\" % self.model)\n\n self.sendline('fdt addr $fdt_addr')\n self.expect(self.uprompt)\n self.sendline('fdt get value bcm_bootargs /chosen bootargs')\n self.expect(self.uprompt)\n\n self.sendline('setenv bootargs \"$bcm_bootargs %s\"' % bootargs)\n self.expect(self.uprompt)\n\n self.sendline(\n \"setenv bootcmd 'fatload mmc 0 ${kernel_addr_r} %s; bootm ${kernel_addr_r} - ${fdt_addr}; booti ${kernel_addr_r} - ${fdt_addr}'\"\n % getattr(self, 'kernel_file', 'uImage'))\n self.expect(self.uprompt)\n self.sendline('saveenv')\n self.expect(self.uprompt)\n self.sendline('boot')\n\n # Linux handles serial better ?\n self.delaybetweenchar = None",
"def test_list_drives_drive_firmware_update(self):\n pass",
"def expand_second_partition(device):\n\n print('Deleting the original boot partition from the thumb drive')\n _delete_partition(device, 1)\n\n print('Expanding the partition. Resizing isn\\'t worth it. Or obvious to do.')\n resize_command = ['sudo', 'parted', device.path, 'resizepart', '2', '\"-1s\"']\n interactive_console(resize_command)\n\n print('Fixing the nibbly bits for the partition itself')\n target_partition = device.partitions(full_paths=True)[0]\n interactive_console(['sudo', 'e2fsck', '-f', target_partition])\n\n print('Fixing ext4 so it goes all the way to the end')\n target_end = device.partition_specs(2)['End']\n interactive_console(['sudo', 'resize2fs', target_partition, target_end])\n\n print('Success!')",
"def get_pending_boot_mode(self):\n headers, uri, bios_settings = self._check_bios_resource(['BootMode'])\n _, _, settings = self._get_bios_settings_resource(bios_settings)\n boot_mode = settings.get('BootMode')\n if boot_mode == 'LegacyBios':\n boot_mode = 'legacy'\n return boot_mode.upper()",
"def prepare_device(self):\n self._get_pairing_status()\n self._update_system_clock()\n self._update_system()\n # Above will block during update process and kill this instance if\n # new software is installed\n\n if self.backend_down:\n self._notify_backend_down()\n else:\n self._display_skill_loading_notification()\n self.bus.emit(Message('mycroft.internet.connected'))\n self._ensure_device_is_paired()\n self._update_device_attributes_on_backend()",
"def do_configure_partition(cls, part, source_params, creator, cr_workdir,\n oe_builddir, bootimg_dir, kernel_dir,\n native_sysroot):\n if not cls.__imgBiosObj:\n cls.__instanciateBIOSClass()\n\n bootloader = creator.ks.bootloader\n\n if not bootloader.configfile:\n splash = os.path.join(cr_workdir, \"/hdd/boot/splash.jpg\")\n if os.path.exists(splash):\n splashline = \"menu background splash.jpg\"\n else:\n splashline = \"\"\n\n syslinux_conf = \"\"\n syslinux_conf += \"PROMPT 0\\n\"\n syslinux_conf += \"TIMEOUT \" + str(bootloader.timeout) + \"\\n\"\n syslinux_conf += \"\\n\"\n syslinux_conf += \"ALLOWOPTIONS 1\\n\"\n syslinux_conf += \"\\n\"\n if splashline:\n syslinux_conf += \"%s\\n\" % splashline\n\n syslinux_conf += \"DEFAULT boot\\n\"\n syslinux_conf += \"LABEL boot\\n\"\n syslinux_conf += \" KERNEL mboot.c32\\n\"\n\n # Split the bootloader args at '---' to separate the Xen args\n # from the Linux kernel args.\n # The Xen args here are defaults; overridden by bootloader append.\n xen_args = \"console=com1,vga com1=115200,8n1\"\n kernel_append = \"\"\n if bootloader.append:\n separator_pos = bootloader.append.find('---')\n if separator_pos != -1:\n xen_args = bootloader.append[:separator_pos]\n kernel_append = bootloader.append[separator_pos+3:]\n else:\n kernel_append = bootloader.append\n\n kernel_args = \"label=boot root=%s %s\" % \\\n (creator.rootdev, kernel_append)\n\n syslinux_conf += \" APPEND /xen.gz %s --- /vmlinuz %s\" % \\\n (xen_args, kernel_args)\n\n initrd = source_params.get('initrd')\n if initrd:\n initrds = initrd.split(';')\n for initrd_file in initrds:\n syslinux_conf += \" --- /%s\" % os.path.basename(initrd_file)\n syslinux_conf += \"\\n\"\n\n logger.debug(\"Writing syslinux config %s/hdd/boot/syslinux.cfg\",\n cr_workdir)\n\n hdddir = \"%s/hdd/boot\" % cr_workdir\n install_cmd = \"install -d %s\" % hdddir\n exec_cmd(install_cmd)\n\n cfg = open(\"%s/hdd/boot/syslinux.cfg\" % cr_workdir, \"w\")\n cfg.write(syslinux_conf)\n cfg.close()\n\n else:\n cls.__imgBiosObj.do_configure_partition(part, source_params,\n creator, cr_workdir,\n oe_builddir, bootimg_dir,\n kernel_dir, native_sysroot)",
"def default_bootmodules(self):\n # FIXME: clean up / separate platform-specific logic\n\n machine = self\n a = machine.get_bootarch()\n\n # set the kernel: elver on x86_64\n if a == \"x86_64\":\n kernel = \"elver\"\n elif a == \"armv7\" or a == \"armv8\":\n kernel = \"cpu_%s\" % machine.get_platform()\n else:\n kernel = \"cpu\"\n\n m = barrelfish.BootModules(machine, prefix=(\"%s/sbin/\" % a), kernel=kernel)\n m.add_kernel_args(machine.get_kernel_args())\n # default for all barrelfish archs\n # hack: cpu driver is not called \"cpu\" for ARMv7 builds\n if a == \"armv7\" :\n m.add_module(\"cpu_%s\" % machine.get_platform(), machine.get_kernel_args())\n elif a == \"armv8\" :\n # remove kernel\n m.set_kernel(None)\n # add cpu driver\n m.set_cpu_driver(kernel, machine.get_kernel_args())\n # add boot driver\n m.set_boot_driver(machine.get_boot_driver())\n else :\n m.add_module(\"cpu\", machine.get_kernel_args())\n\n m.add_module(\"init\")\n m.add_module(\"mem_serv\")\n m.add_module(\"monitor\")\n m.add_module(\"ramfsd\", [\"boot\"])\n m.add_module(\"skb\", [\"boot\"])\n m.add_module(\"proc_mgmt\", [\"boot\"])\n m.add_module(\"spawnd\", [\"boot\"])\n m.add_module(\"startd\", [\"boot\"])\n m.add_module(\"/eclipseclp_ramfs.cpio.gz\", [\"nospawn\"])\n m.add_module(\"/skb_ramfs.cpio.gz\", [\"nospawn\"])\n m.add_module(\"corectrl\", [\"auto\"])\n\n # armv8\n if a == \"armv8\" :\n if not machine._uboot: # no ACPI on U-Boot\n m.add_module(\"acpi\", [\"boot\"])\n m.add_module(\"kaluga\", [\"boot\"])\n\n # SKB and PCI are x86-only for the moment\n if a == \"x86_64\" or a == \"x86_32\":\n # Add acpi with machine-specific extra-arguments\n m.add_module(\"acpi\", [\"boot\"] + machine.get_acpi_args())\n m.add_module(\"routing_setup\", [\"boot\"])\n\n # Add pci with machine-specific extra-arguments\n m.add_module(\"pci\", [\"auto\"] + machine.get_pci_args())\n\n # Add kaluga with machine-specific bus:dev:fun triplet for eth0\n # interface\n m.add_module(\"kaluga\",\n [\"boot\", \"eth0=%d:%d:%d\" % machine.get_eth0()])\n\n # coreboot should work on armv7 now\n if a == \"armv7\":\n m.add_module(\"kaluga\", machine.get_kaluga_args())\n m.add_module(\"driverdomain_pl390\", [\"auto\"])\n m.add_module(\"serial_kernel\", [\"auto\"])\n m.add_module(\"serial_pl011\", [\"auto\"])\n m.add_module(\"int_route\", [])\n\n return m",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"cat /proc/uptime\", \"hostname\", \"show version\"], None, 'text')",
"def load_kernel_module(params) -> None:\n print(\"Loading kernel module...\")\n os.system(\"modprobe -r v4l2loopback >/dev/null 2>&1\")\n cmd = \"modprobe v4l2loopback devices=1 video_nr=\" + params['loopback_nr'] + \\\n \" card_label=\" + params['loopback_name'] + \\\n \" exclusive_caps=\" + params['loopback_exclusive'] + \" >/dev/null 2>&1\"\n if os.system(cmd) == 0:\n print(\" Success !\")\n else:\n print(\" Failure !\")",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version\", \"show run hostname\"], None)",
"def resetDeviceStates(self):",
"def inspect_boot_loader(g, device) -> inspect_pb2.InspectionResults:\n\n bios_bootable = False\n uefi_bootable = False\n root_fs = \"\"\n\n try:\n stream = os.popen('gdisk -l {}'.format(device))\n output = stream.read()\n print(output)\n if _inspect_for_hybrid_mbr(output):\n bios_bootable = True\n\n part_list = g.part_list('/dev/sda')\n for part in part_list:\n try:\n guid = g.part_get_gpt_type('/dev/sda', part['part_num'])\n # It covers both GPT \"EFI System\" and BIOS \"EFI (FAT-12/16/32)\".\n if guid == 'C12A7328-F81F-11D2-BA4B-00A0C93EC93B':\n uefi_bootable = True\n # TODO: detect root_fs (b/169245755)\n # It covers \"BIOS boot\", which make a protective-MBR bios-bootable.\n if guid == '21686148-6449-6E6F-744E-656564454649':\n bios_bootable = True\n except Exception:\n continue\n\n except Exception as e:\n print(\"Failed to inspect disk partition: \", e)\n\n return inspect_pb2.InspectionResults(\n bios_bootable=bios_bootable,\n uefi_bootable=uefi_bootable,\n root_fs=root_fs,\n )",
"def test_persistent_group3(dev):\n form(dev[0], dev[1])\n dev[1].request(\"BSS_FLUSH 0\")\n invite_from_cli(dev[0], dev[1])\n dev[1].request(\"BSS_FLUSH 0\")\n invite_from_go(dev[0], dev[1])",
"def is_bootable(self):\n return self.bootable_flag == 0x80",
"def set_bootmodules(self, modules):\n raise NotImplementedError",
"def get_bootvar(self):\n module = 'bootimage/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n bootdefault = response.json()['bootimage']['oper']['hd-default']\n print(self.device + ' The device is set to boot from: ' + bootdefault + ' in the future')\n return bootdefault",
"def bootloader(self):\n\n self.space.write(assemble(SET, Y, 0xd000))\n self.space.write(assemble(SET, J, 0x5))\n self.space.write(assemble(SET, PC, [J]))\n\n # Allocate space for the address of QUIT.\n self.space.write(\"\\x00\\x00\")\n\n # Allocate space for STATE.\n self.STATE = self.space.tell()\n self.space.write(\"\\x00\\x00\")\n\n # And HERE.\n self.HERE = self.space.tell()\n self.space.write(\"\\x00\\x00\")\n\n # And LATEST, too.\n self.LATEST = self.space.tell()\n self.space.write(\"\\x00\\x00\")\n\n # Don't forget FB.\n self.FB = self.space.tell()\n self.space.write(\"\\x80\\x00\")\n\n # NEXT. Increment IP and move through it.\n ucode = assemble(ADD, J, 0x1)\n ucode += assemble(SET, PC, [J])\n self.prim(\"next\", ucode)\n\n # EXIT. Pop RSP into IP and then call NEXT.\n ucode = POPRSP(J)\n ucode += assemble(SET, PC, self.asmwords[\"next\"])\n self.prim(\"exit\", ucode)\n\n # ENTER. Save IP to RSP, dereference IP to find the caller, enter the\n # new word, call NEXT.\n ucode = PUSHRSP(J)\n ucode += assemble(SET, J, [J])\n ucode += assemble(SET, PC, self.asmwords[\"next\"])\n self.prim(\"enter\", ucode)",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show system uptime|display json\",\n \"show version\"], None, 'mixed')",
"def randomBoots():\n return random.choice(BOOTS)",
"def get_boot_mode():\n boot_mode = 'Legacy'\n try:\n reg_key = winreg.OpenKey(\n winreg.HKEY_LOCAL_MACHINE, r'System\\CurrentControlSet\\Control')\n reg_value = winreg.QueryValueEx(reg_key, 'PEFirmwareType')[0]\n if reg_value == 2:\n boot_mode = 'UEFI'\n except:\n boot_mode = 'Unknown'\n\n return boot_mode",
"def main():\n\n # a quick way to verify the version\n if getscriptversion:\n print('This script is running version: ' + scriptversion)\n exit(0)\n\n # verify that environmental and script requirements are met\n requirements()\n\n # pretty the screen up\n clear()\n # do the MD5 checksum\n checkmd5sum()\n if not user or not password:\n getcreds()\n\n # if device_file is provided parse the lines into a list of devices\n if device_file:\n with open(device_file) as line:\n devices = line.readlines()\n devices = [x.strip() for x in devices]\n else:\n devices = args.devices.split(\",\")\n\n for device in devices:\n\n device = Acos(device)\n print('')\n print('')\n print(dev_addr + ' ' + '{:*^100}'.format('Begin upgrade log for ' + dev_addr))\n print(dev_addr + ' ' + '{:*^100}'.format('Performing pre-upgrade checks'))\n\n # check if the device is online before running\n status = device.checkstatus()\n if status == 'FAIL':\n continue\n\n # authenticate to the device\n response = device.axapi_authenticate(user, password)\n if response == 'FAIL':\n continue\n # get the device hostname\n device.get_hostname()\n\n # get the currently running version\n version = device.get_running_ver()\n\n print(dev_addr + ' ' + '{:*^100}'.format(' Performing upgrade'))\n\n # if we are running 4.1.0 we have to use a different upgrade method\n if '4.1.0' in version:\n response = device.gui_upgrade(user, password)\n if response == 'FAIL':\n continue\n # for other versions just use the normal method\n else:\n response = device.upgrade()\n if response == 'FAIL':\n continue\n bootvar = device.get_bootvar()\n\n # if the user has specified they'd like to update the boot variable\n if updatebootvar:\n # why do work that we don't have to\n if partition in bootvar:\n print(dev_addr + ' Bootvar update requested, but not necessary, device already set to boot from ' + partition)\n # if you're not already set to boot from the partition we installed to, update the bootvar\n else:\n device.update_bootvar()\n # if the user wants to reboot to initialize the new code reboot the box\n if reboot:\n device.reboot()\n # if the user wants to speed up the script, then just skip monitoring them\n if dontwaitforreturn:\n print(dev_addr + ' Skipping post-upgrade verification at user request')\n continue\n # otherwise you probably want to make sure the box comes up first\n else:\n device.reboot_monitor()\n if not reboot:\n print(dev_addr + '{:*^100}'.format('NOTICE NOTICE NOTICE'))\n print(dev_addr + 'You have requested the device not reboot, in order to initialize the new code you will need to reboot the device')\n # if you install to a partition the device won't reboot to, we probably want to stop you from shooting yourself in the foot\n elif not partition in bootvar:\n print(dev_addr + '{:*^100}'.format('NOTICE NOTICE NOTICE'))\n print(dev_addr + ' You have chosen to install to the partition that the device does not currently boot from.')\n print(dev_addr + ' If you wish for the device to run the new code upon reboot you need to update the boot variable manually.')\n if reboot:\n print(dev_addr + ' You have also requested a reboot which will not invoke the new code, SKIPPING REBOOT')\n elif reboot:\n device.reboot()\n # if the user wants to speed up the script, then just skip monitoring them\n if dontwaitforreturn:\n print(dev_addr + ' Skipping post-upgrade verification at user request')\n continue\n # otherwise you probably want to make sure the box comes up first\n else:\n device.reboot_monitor()\n # technically we could still use the old AXAPI token, however for sake of code clarity we're going to do a quick log off then back on\n # the alternative would be having to shove the remaining steps below into each of the appropriate loops making this a bit more\n # spaghettish than it already is\n else:\n device.axapi_logoff()\n\n print(dev_addr + ' ' + '{:*^100}'.format(' Performing post-upgrade checks'))\n\n # since it is very likely the box has rebooted, and our old token is gone, lets get a new one\n response = device.axapi_authenticate(user, password)\n if response == 'FAIL':\n continue\n\n # find out where the device was booted from\n bootdefault = device.get_bootvar()\n\n # get the version of the currently booted partition\n device.get_ver(bootdefault)\n\n # get the current boot variable\n device.get_bootvar()\n\n # get the current running version\n device.get_running_ver()\n\n # log off\n device.axapi_logoff()\n print(dev_addr + ' ' + '{:*^100}'.format(' End upgrade log for ' + dev_addr))",
"def get_boot_mode(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def reboot(host=None):\r\n if host:\r\n host.reboot()",
"def invalidateBoot (self):\n if self.isBootValid(): \n self.mountBootPartition()\n installFilePath = self._getBootInstallationFilePath()\n if os.path.exists(installFilePath):\n os.remove(installFilePath)\n\n #self._runCommandRaiseIfFail(\"rm -rf %s\" % (self._getBootInstallationFilePath()))\n self._log(\"invalidate-boot\").notice(\"boot partition is invalidated\")\n else:\n self._log(\"invalidate-boot\").notice(\"boot partition is already invalid\")",
"def update_firmware(self):\n self.execute_command(CMD_UPDATE_FIRMWARE)",
"def set_console_xen(self):\n print \"\"\n self.exec_cmd(\"echo \\\"xvc0\\\" >> %s/etc/securetty\" % self.rep_vhosts_vm) \n if os.path.isfile(\"%s/etc/inittab\" % self.rep_vhosts_vm):\n self.exec_cmd(\"echo \\\"7:2345:respawn:/sbin/getty 38400 xvc0\\\" >> %s/etc/inittab\" % self.rep_vhosts_vm) \n\n if os.path.isfile(\"%s/etc/event.d/tty1\" % self.rep_vhosts_vm):\n self.exec_cmd(\"cp %s/etc/event.d/tty1 %s/etc/event.d/xvc0\" % (self.rep_vhosts_vm,self.rep_vhosts_vm))\n self.exec_cmd(\"sed -i \\\"s@tty1@xvc0@\\\" %s/etc/event.d/xvc0\" % self.rep_vhosts_vm)\n \n if os.path.isfile(\"%s/etc/init/tty1.conf\" % self.rep_vhosts_vm):\n self.exec_cmd(\"cp %s/etc/init/tty1.conf %s/etc/init/xvc0.conf\" % (self.rep_vhosts_vm,self.rep_vhosts_vm))\n self.exec_cmd(\"sed -i \\\"s@tty1@xvc0@\\\" %s/etc/init/xvc0.conf\" % self.rep_vhosts_vm)",
"def get_boot_order(rfo, api=1, unit=1):\n\n url = f\"/redfish/v{api}/systems/{unit}/bios\"\n res = rfo.get(url)\n if res.status != 200:\n print(f\"Error: {res.status}: {res.read}\")\n return \"XXX\"\n booturl = res.dict['Oem']['Hpe']['Links']['Boot']['@odata.id']\n res = rfo.get(booturl)\n if res.status != 200:\n print(f\"HTTP Fail Status: {res.status} - {res.read}\")\n return \"XXX\"\n return res.dict['DefaultBootOrder']",
"def setUbootFlashAddress(self):\n\t\tself.ubootflashaddress = self.settings.getKeyValue('u-boot.flash.address')\n\t\treturn None",
"def boot(self, boot_node_request):\n return self.client.call('POST',\n self.name + 'boot', payload=boot_node_request)",
"def modif_fstab(self):\n print \"preparation du fichier fstab\"\n self.exec_cmd(\"cp %s/etc/fstab %s/etc/fstab.pre.p2v\" % (self.rep_vhosts_vm,self.rep_vhosts_vm))\n self.exec_cmd(\"cp %s/etc/fstab_without_uuid %s/etc/fstab\" % (self.rep_vhosts_vm,self.rep_vhosts_vm))\n line = open(\"/vhosts/\"+ name_vm_dest +\"/etc/fstab_without_uuid\",\"r\").read()\n for i in self.tri(partitions[type_p2v]):\n line = line.replace(partitions[type_p2v][i][0],\"/dev/%s\" % i,1)\n fichier = open(\"/vhosts/\"+ name_vm_dest +\"/etc/fstab\",\"w\")\n fichier.write(line)\n fichier.close()",
"def _change_secure_boot_settings(self, property, value):\n system = self._get_host_details()\n # find the BIOS URI\n if ('links' not in system['Oem']['Hp'] or\n 'SecureBoot' not in system['Oem']['Hp']['links']):\n msg = (' \"SecureBoot\" resource or feature is not '\n 'supported on this system')\n raise exception.IloCommandNotSupportedError(msg)\n\n secure_boot_uri = system['Oem']['Hp']['links']['SecureBoot']['href']\n\n # Change the property required\n new_secure_boot_settings = {}\n new_secure_boot_settings[property] = value\n\n # perform the patch\n status, headers, response = self._rest_patch(\n secure_boot_uri, None, new_secure_boot_settings)\n\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)\n\n # Change the bios setting as a workaround to enable secure boot\n # Can be removed when fixed for Gen9 snap2\n val = self._get_bios_setting('CustomPostMessage')\n val = val.rstrip() if val.endswith(\" \") else val+\" \"\n self._change_bios_setting({'CustomPostMessage': val})",
"def test_patch_pci_device(self):\n pass",
"def flash_uboot(self, uboot):\n common.print_bold(\"\\n===== Flashing bootloader (and u-boot) =====\\n\")\n filename = self.prepare_file(uboot)\n size = self.tftp_get_file_uboot(self.uboot_ddr_addr, filename)\n\n self.sendline('mmc part')\n # get offset of ext (83) partition after a fat (0c) partition\n self.expect(r'\\r\\n\\s+\\d+\\s+(\\d+)\\s+(\\d+).*0c( Boot)?\\r\\n')\n start = hex(int(self.match.groups()[0]))\n if (int(size) != int(self.match.groups()[1]) * 512):\n raise Exception(\"Partition size does not match, refusing to flash\")\n self.expect(self.uprompt)\n count = hex(int(size / 512))\n self.sendline('mmc erase %s %s' % (start, count))\n self.expect(self.uprompt)\n self.sendline('mmc write %s %s %s' %\n (self.uboot_ddr_addr, start, count))\n self.expect(self.uprompt, timeout=120)\n\n self.reset()\n self.wait_for_boot()\n self.setup_uboot_network()",
"def test_update_pci_device(self):\n pass",
"def _post_init(self):\n if WIN:\n self._find_devices_win()\n elif MAC:\n self._find_devices_mac()\n else:\n self._find_devices()\n self._update_all_devices()\n if NIX:\n self._find_leds()",
"def reboot(self, node):",
"def setup_usb(self):\n global DEVICE\n global epBulkWriter\n global epBulkReader\n global VID\n global PID\n\n DEVICE = usb.core.find(idVendor=0x2AB9,idProduct=0xFFFF)\n if DEVICE is None:#If not a LVPM, look for an HVPM.\n DEVICE = usb.core.find(idVendor=0x04d8,idProduct=0x000b)\n VID = '0x4d8'\n PID = '0xb'\n if \"Linux\" == platform.system():\n try:\n DEVICE.detach_kernel_driver(0)\n except:\n pass # already unregistered\n DEVICE.set_configuration()\n\n cfg = DEVICE.get_active_configuration()\n intf = cfg[(0,0)]\n\n epBulkWriter = usb.util.find_descriptor(\n intf,\n custom_match = \\\n lambda e: \\\n usb.util.endpoint_direction(e.bEndpointAddress) == \\\n usb.util.ENDPOINT_OUT)\n epBulkReader = usb.util.find_descriptor(\n intf,\n custom_match = \\\n lambda e: \\\n usb.util.endpoint_direction(e.bEndpointAddress) == \\\n usb.util.ENDPOINT_IN)",
"def get_boot_record(disk):\n\n #TODO\n return \"Unknown\", \"Unknown\"",
"def validateBoot (self):\n self.mountBootPartition()\n stateDictionary = self._createBootInstallationDictionary()\n self._writeDictionaryAsJson(stateDictionary, self._getBootInstallationFilePath())\n self._log(\"validate-boot\").notice(\"boot partition is validated\")",
"def reboot(self):\n raise NotImplementedError",
"def reboot_monitor(self):\n # define cross-platform /dev/null\n devnull = open(os.devnull, 'w')\n\n # if the OS is windows\n if os.name == 'nt':\n ping = ['ping', '-n', '1', self.device]\n\n # if the OS is posix\n else:\n ping = ['ping', '-c', '1', self.device]\n\n print(self.device + ' Waiting for device to finish rebooting, please wait', end='', flush=True)\n time.sleep(10)\n count = 1\n successcount = 0\n while count < 300:\n print('.', end='', flush=True)\n ping_call = subprocess.Popen(ping, stdout=devnull)\n returncode = ping_call.wait()\n # we need multiple successes to allow this to work, otherwise a single response while the box is still initializing can bite us\n if returncode == 0:\n successcount = successcount + 1\n if successcount == 5:\n break\n time.sleep(1)\n count = count + 1\n\n print('')\n if count == 300:\n print(self.device + ' Device has not responded to 300 pings, please manually check device')\n print(self.device + ' Exiting...')\n else:\n print(self.device + ' Device is now initializing')\n time.sleep(10)\n print(self.device + ' Device has finished rebooting')",
"def systemOff():\n # Updated 11/19/16\n I2C.write_byte_data(Valve_bus, pinOut_O, 0x00 )\n I2C.write_byte_data(Pump_Mag_bus, pinOut_O, 0x00)",
"def change_device(self):\n if self.state.ser:\n UsbHost.close_port(self.state.ser)\n device = self.CBDevices.currentText()\n if device:\n comport = self.devices[int(device)]\n self.state.ser = UsbHost.open_port(comport)\n if not self.state.ser:\n self.statusbar.showMessage(\"Выбранный порт более недоступен. Произведите повторный поиск\")\n return\n answer: str = self.UsbHost.send_command(self.state.ser, \"ping\", device)\n if answer in wrong_answers:\n error_message(\"Выбранный девайс не отвечает\")\n self.statusbar.showMessage(\"Выбранный порт более недоступен. Произведите повторный поиск\")\n return\n self.state.device_id = int(device)\n self.state.comport = comport\n self.create_message()\n self.set_controls_state(True)\n self.BtnL1.click()\n self.BtnAttenuate.click()\n self.SpinDACValue.setValue(35000)\n self.BtnSetDACValue.click()\n self.set_sw(\"0 1\")",
"def partition_session(self):\n if self.user['drive']['name'] is not None:\n\n # Set root size\n if self.user['root_freespace'] is True:\n self.user['root_size'] = 'freespace'\n\n # Set partition parameters\n self.user['partitions'] = {'name': ['boot', 'root'],\n 'size': [self.user['boot_size'],\n self.user['root_size']],\n 'filesystem': ['fat32', 'ext4'],\n 'mountpoint': ['/mnt/boot', '/mnt'],\n 'mountorder': [1, 0]}\n\n # Set swap size and filesystem\n if 'Swap' in self.user['optional_partitions']:\n self.user['partitions']['size'].insert(1, self.user['swap_size'])\n self.user['partitions']['filesystem'].insert(1, 'swap')\n\n # Set home size and filesystem\n if 'Home' in self.user['optional_partitions']:\n if self.user['home_freespace'] is True:\n self.user['home_size'] = 'freespace'\n self.user['partitions']['size'].append(self.user['home_size'])\n self.user['partitions']['filesystem'].append('ext4')\n\n # Custom partitions\n else:\n\n # Set partition parameters\n self.user['partitions'] = {\n 'name': ['boot', 'root'],\n 'drive_id': [self.user['boot_id'].split()[0],\n self.user['root_id'].split()[0]],\n 'mountpoint': ['/mnt/boot', '/mnt'],\n 'mountorder': [1, 0]}\n\n # Set swap drive ID\n if self.user['swap_id'] is not None:\n self.user['partitions']['drive_id'].insert(\n 1, self.user['swap_id'].split()[0])\n\n # Set home drive ID\n if self.user['home_id'] is not None:\n self.user['partitions']['drive_id'].append(\n self.user['home_id'].split()[0])\n\n # Set swap parameters\n if ('Swap' in self.user['optional_partitions']) or \\\n (self.user['swap_id'] is not None):\n self.user['partitions']['name'].insert(1, 'swap')\n self.user['partitions']['mountpoint'].insert(1, 'swap')\n self.user['partitions']['mountorder'].insert(1, 2)\n\n # Set home parameters\n if 'Home' in self.user['optional_partitions'] or \\\n (self.user['home_id'] is not None):\n self.user['partitions']['name'].append('home')\n self.user['partitions']['mountpoint'].append('/mnt/home')\n self.user['partitions']['mountorder'].append(3)"
] | [
"0.7011512",
"0.6966963",
"0.68623865",
"0.65245754",
"0.6446166",
"0.61577415",
"0.6123408",
"0.608467",
"0.5957088",
"0.59353226",
"0.5923807",
"0.5921193",
"0.5887735",
"0.5859409",
"0.58277696",
"0.58038384",
"0.5781227",
"0.5723276",
"0.57024425",
"0.56310064",
"0.55985206",
"0.5594711",
"0.5590981",
"0.5589681",
"0.5548229",
"0.55426663",
"0.5428197",
"0.5400307",
"0.53941864",
"0.5383077",
"0.53666264",
"0.5338894",
"0.5325629",
"0.5304778",
"0.52771014",
"0.5259521",
"0.52477187",
"0.52413666",
"0.5238524",
"0.5235995",
"0.5225717",
"0.5223994",
"0.5219495",
"0.52164865",
"0.52158827",
"0.51917577",
"0.5178385",
"0.51771116",
"0.5172786",
"0.5157806",
"0.51542026",
"0.515285",
"0.51308054",
"0.51283145",
"0.51277995",
"0.51212454",
"0.5099639",
"0.508112",
"0.507887",
"0.50786406",
"0.50761366",
"0.5058519",
"0.5057448",
"0.5040599",
"0.50375706",
"0.50371295",
"0.5023613",
"0.501615",
"0.500212",
"0.5001622",
"0.49941483",
"0.4976207",
"0.49650687",
"0.49531394",
"0.4951963",
"0.49514145",
"0.49499524",
"0.49490294",
"0.49466744",
"0.4936329",
"0.4933519",
"0.4896061",
"0.48880985",
"0.48758328",
"0.48562217",
"0.4854396",
"0.48526233",
"0.48363757",
"0.48334247",
"0.4831035",
"0.48309824",
"0.48276302",
"0.4819823",
"0.48190546",
"0.48180178",
"0.48109096",
"0.48088986",
"0.48081052",
"0.48038912",
"0.47868747"
] | 0.6388082 | 5 |
Changes the persistent boot device order for the host | def update_persistent_boot(self, device_type=[], mac=None):
# Check if the input is valid
for item in device_type:
if item.upper() not in DEVICE_COMMON_TO_RIS:
raise exception.IloInvalidInputError("Invalid input. Valid "
"devices: NETWORK, HDD,"
" ISCSI or CDROM.")
self._update_persistent_boot(device_type, persistent=True, mac=mac) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_one_time_boot(self, device, mac=None):\n self._update_persistent_boot([device], persistent=False, mac=mac)",
"def _update_persistent_boot(self, device_type=[], persistent=False,\n mac=None):\n tenure = 'Once'\n new_device = device_type[0]\n # If it is a standard device, we need to convert in RIS convention\n if device_type[0].upper() in DEVICE_COMMON_TO_RIS:\n new_device = DEVICE_COMMON_TO_RIS[device_type[0].upper()]\n\n if persistent:\n tenure = 'Continuous'\n\n systems_uri = \"/rest/v1/Systems/1\"\n # Need to set this option first if device is 'UefiTarget'\n if new_device is 'UefiTarget':\n if not mac:\n msg = ('Mac is needed for iscsi uefi boot')\n raise exception.IloInvalidInputError(msg)\n\n headers, bios_uri, bios_settings = self._check_bios_resource()\n # Get the Boot resource and Mappings resource.\n boot_settings = self._get_bios_boot_resource(bios_settings)\n StructuredBootString = None\n\n for boot_setting in boot_settings['BootSources']:\n if(mac.upper() in boot_setting['UEFIDevicePath'] and\n 'iSCSI' in boot_setting['UEFIDevicePath']):\n StructuredBootString = boot_setting['StructuredBootString']\n break\n if not StructuredBootString:\n msg = ('MAC provided is Invalid \"%s\"' % mac)\n raise exception.IloInvalidInputError(msg)\n\n new_boot_settings = {}\n new_boot_settings['Boot'] = {'UefiTargetBootSourceOverride':\n StructuredBootString}\n status, headers, response = self._rest_patch(systems_uri, None,\n new_boot_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)\n\n new_boot_settings = {}\n new_boot_settings['Boot'] = {'BootSourceOverrideEnabled': tenure,\n 'BootSourceOverrideTarget': new_device}\n status, headers, response = self._rest_patch(systems_uri, None,\n new_boot_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def set_boot_device(self, device, persistent=False):\n\n operation = \"set_boot_device\"\n try:\n self.sp_manager.create_boot_policy()\n self.sp_manager.set_boot_device(device)\n\n except UcsException as ex:\n raise exception.UcsOperationError(operation=operation, error=ex)",
"def test_update_bios_boot_mode(self):\n pass",
"def set_boot_order(profile_obj):\n status = True\n logger._log_to_console_and_log_file(\"\")\n logger._log_to_console_and_log_file(\"### Testing the 'Boot Settings' session ###\")\n logger._log_to_console_and_log_file(\"- Select the 'Legacy BIOS' mode\")\n createprofile_elements = ProfileContainer(ProfileContainerType.ADD)\n __select_value_from_a_profile_combo_box(createprofile_elements.ID_COMBO_PROFILE_BOOT_MODE, createprofile_elements.ID_COMBO_PROFILE_BOOT_MODE_LIST % \"Legacy BIOS\")\n # Set invalid values\n logger._log_to_console_and_log_file(\"Testing using invalid values\")\n for profile in profile_obj:\n items = [[\"CD\", profile.cd], [\"USB\", profile.usb], [\"HardDisk\", profile.harddisk]]\n for data in items:\n ui_lib.wait_for_element_and_input_text(\"name=%s\" % data[0], data[1])\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_CREATE_SERVER_PROFILE_FORM)\n if data[0] == \"HardDisk\":\n data[0] = \"Hard Disk\"\n if ui_lib.wait_for_element_text(FusionServerProfilesPage.ID_BOOT_ORDER_POSITION % data[0], data[1], timeout=1):\n logger._log_to_console_and_log_file(\"- \" + \"'\" + data[0] + \"'\" + \" field was not cleared to the default value and persisted as '\" + str(data[1]) + \"'\")\n status = False\n else:\n logger._log_to_console_and_log_file(\"- \" + \"'\" + data[0] + \"'\" + \" field was correctly cleared to the default value\")\n return status",
"def test_patch_bios_boot_mode(self):\n pass",
"def boot(self):\n\n pass",
"def update_sdcard_boot_commands(device):\n mount_dir = mkdtemp()\n\n boot_partition = device.partitions(full_paths=True)[0]\n\n mount_command = ['sudo', 'mount', boot_partition, mount_dir]\n\n print(f'Mounting SD Card partition {boot_partition} to temp directory {mount_dir}')\n interactive_console(mount_command)\n\n # Note- this sed command is what the target mounts will look like\n # I'm not messing with the blk_ids of our devices as we know them\n # here.\n\n sed_command = [\n 'sudo',\n 'sed',\n '-i',\n '-E',\n 's#root=[^ ]+#root=/dev/sda2#',\n os.path.join(mount_dir, 'cmdline.txt')]\n console(sed_command)\n sed_command = [\n 'sudo',\n 'sed',\n '-i',\n 's# init=/usr/lib/raspi-config/init_resize.sh##',\n os.path.join(mount_dir, 'cmdline.txt')]\n\n print('Modifying init command line')\n console(sed_command)\n\n print('Successfully modified! Unmounting.')\n umount_command = ['sudo', 'umount', mount_dir]\n interactive_console(umount_command)\n\n print('Cleaning up mounted dir')\n os.rmdir(mount_dir)",
"def boot(self, boot):\n\n self._boot = boot",
"def _get_persistent_boot_devices(self):\n # Check if the BIOS resource if exists.\n headers_bios, bios_uri, bios_settings = self._check_bios_resource()\n\n # Get the Boot resource.\n boot_settings = self._get_bios_boot_resource(bios_settings)\n\n # Get the BootSources resource\n try:\n boot_sources = boot_settings['BootSources']\n except KeyError:\n msg = (\"BootSources resource not found.\")\n raise exception.IloError(msg)\n\n try:\n boot_order = boot_settings['PersistentBootConfigOrder']\n except KeyError:\n msg = (\"PersistentBootConfigOrder resource not found.\")\n raise exception.IloCommandNotSupportedError(msg)\n\n return boot_sources, boot_order",
"def _boot_using_bootmon(self, target):\n self.logger.debug('Booting using bootmon.')\n\n try:\n self._wait_for_vemsd_mount(target, timeout=20)\n except DeviceError:\n # OK, something's wrong. Reboot the board and try again.\n self.logger.debug('VEMSD not mounted, attempting to power cycle device.')\n target.sendline(' ')\n state = target.expect(['Cmd> ', self.config.bootmon_prompt, self.android_prompt]) # pylint: disable=E1101\n\n if state == 0 or state == 1:\n # Reboot - Bootmon\n target.sendline('reboot')\n target.expect('Powering up system...')\n elif state == 2:\n target.sendline('reboot -n')\n target.expect('Powering up system...')\n else:\n raise DeviceError('Unexpected board state {}; should be 0, 1 or 2'.format(state))\n\n self._wait_for_vemsd_mount(target)\n\n self._setup_before_reboot()\n\n # Reboot - Bootmon\n self.logger.debug('Rebooting into bootloader...')\n open(os.path.join(self.config.root_mount, 'reboot.txt'), 'a').close()\n target.expect('Powering up system...')\n target.expect(self.config.bootmon_prompt)\n\n # Wait for VEMSD to mount\n self._wait_for_vemsd_mount(target)\n\n #Boot Linux - Bootmon\n target.sendline('fl linux fdt ' + self.config.dtb)\n target.expect(self.config.bootmon_prompt)\n target.sendline('fl linux initrd ' + self.config.initrd)\n target.expect(self.config.bootmon_prompt)\n #Workaround TC2 bootmon serial issue for loading large initrd blob\n target.sendline(' ')\n target.expect(self.config.bootmon_prompt)\n target.sendline('fl linux boot ' + self.config.kernel + self.config.kernel_arguments)",
"def load_devices():",
"def step7(self):\n for indx, mr in enumerate(self.mrs):\n self.log.info(\"Set boot drive on controller:%d\"\n % (mr.ctrl_id))\n for vd in self.mr_vds[indx]:\n if (int(mr.cli.bootdrive_vd_get()) != vd):\n mr.cli.bootdrive_vd_set(vd_id=self.mr_vds[indx][indx],\n setting=\"On\")\n break",
"def set_boot_device(self, task, device, persistent=False):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified.\") % device)\n\n uefi_mode = (\n boot_mode_utils.get_boot_mode(task.node) == 'uefi')\n\n # disable 60 secs timer\n timeout_disable = \"0x00 0x08 0x03 0x08\"\n ipmitool.send_raw(task, timeout_disable)\n\n # note(naohirot):\n # Set System Boot Options : ipmi cmd '0x08', bootparam '0x05'\n #\n # $ ipmitool raw 0x00 0x08 0x05 data1 data2 0x00 0x00 0x00\n #\n # data1 : '0xe0' persistent + uefi\n # '0xc0' persistent + bios\n # '0xa0' next only + uefi\n # '0x80' next only + bios\n # data2 : boot device defined in the dict _BOOTPARAM5_DATA2\n\n bootparam5 = '0x00 0x08 0x05 %s %s 0x00 0x00 0x00'\n if persistent:\n data1 = '0xe0' if uefi_mode else '0xc0'\n else:\n data1 = '0xa0' if uefi_mode else '0x80'\n data2 = _BOOTPARAM5_DATA2[device]\n\n cmd8 = bootparam5 % (data1, data2)\n ipmitool.send_raw(task, cmd8)\n else:\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified. \"\n \"Current iRMC firmware condition doesn't support IPMI \"\n \"but Redfish.\") % device)\n super(ipmitool.IPMIManagement, self).set_boot_device(\n task, device, persistent)",
"def magma_device_sync():\n\n _libmagma.magma_device_sync()",
"def setup_device(device):\n try:\n # Gets around \"Resource busy\" errors\n device.detach_kernel_driver(0)\n except Exception:\n pass\n device.set_configuration()",
"def load_device():",
"def uefi_reorder_loaders(grubcfg, target):\n if grubcfg.get('reorder_uefi', True):\n efi_output = util.get_efibootmgr(target)\n currently_booted = efi_output.get('current', None)\n boot_order = efi_output.get('order', [])\n if currently_booted:\n if currently_booted in boot_order:\n boot_order.remove(currently_booted)\n boot_order = [currently_booted] + boot_order\n new_boot_order = ','.join(boot_order)\n LOG.debug(\n \"Setting currently booted %s as the first \"\n \"UEFI loader.\", currently_booted)\n LOG.debug(\n \"New UEFI boot order: %s\", new_boot_order)\n with util.ChrootableTarget(target) as in_chroot:\n in_chroot.subp(['efibootmgr', '-o', new_boot_order])\n else:\n LOG.debug(\"Skipped reordering of UEFI boot methods.\")\n LOG.debug(\"Currently booted UEFI loader might no longer boot.\")",
"def power_on_post_boot(self):\n self.log.info(\"post-power-up for boot to flashed image\")\n with self.console_takeover() as (descr, log):\n e = pexpect.fdpexpect.fdspawn(descr, logfile = log, timeout = 20)\n ttbl.target.expect_send_sequence(\n self.log, e, self.kernel_boot_cmd_list)\n # From here on, the console returns to normal",
"def boot(self):\n\t\tmesslen, received = self.socket.send('bootm\\r', 25)\t\t\n\t\treturn None",
"def default_device_names_for_instance(self,\n instance,\n root_device_name,\n *block_device_lists):\n self.prep_for_spawn(context=None, instance=instance)",
"def get_persistent_boot_device(self):\n system = self._get_host_details()\n try:\n # Return boot device if it is persistent.\n if system['Boot']['BootSourceOverrideEnabled'] == 'Continuous':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n except KeyError as e:\n msg = \"get_persistent_boot_device failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)\n\n # Check if we are in BIOS boot mode.\n # There is no resource to fetch boot device order for BIOS boot mode\n if not self._is_boot_mode_uefi():\n return None\n\n # Get persistent boot device order for UEFI\n boot_sources, boot_devices = self._get_persistent_boot_devices()\n\n boot_string = \"\"\n try:\n for source in boot_sources:\n if (source[\"StructuredBootString\"] == boot_devices[0]):\n boot_string = source[\"BootString\"]\n break\n except KeyError as e:\n msg = \"get_persistent_boot_device failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)\n\n if 'HP iLO Virtual USB CD' in boot_string:\n return 'CDROM'\n\n elif ('NIC' in boot_string or\n 'PXE' in boot_string or\n \"iSCSI\" in boot_string):\n return 'NETWORK'\n\n elif common.isDisk(boot_string):\n return 'HDD'\n\n else:\n return None",
"def prepare_device(self):\n self._get_pairing_status()\n self._update_system_clock()\n self._update_system()\n # Above will block during update process and kill this instance if\n # new software is installed\n\n if self.backend_down:\n self._notify_backend_down()\n else:\n self._display_skill_loading_notification()\n self.bus.emit(Message('mycroft.internet.connected'))\n self._ensure_device_is_paired()\n self._update_device_attributes_on_backend()",
"def resetDeviceStates(self):",
"def _update_device_attributes_on_backend(self):\n if self.is_paired:\n LOG.info('Sending updated device attributes to the backend...')\n try:\n api = DeviceApi()\n api.update_version()\n except Exception:\n self._notify_backend_down()",
"def _update_all_devices(self):\n self.all_devices = []\n self.all_devices.extend(self.keyboards)\n self.all_devices.extend(self.mice)\n self.all_devices.extend(self.gamepads)\n self.all_devices.extend(self.other_devices)",
"def bootloader() -> NoReturn:",
"def _setBootable(self, bootable):\n if self.partedPartition:\n if arch.isS390():\n return\n if self.flagAvailable(parted.PARTITION_BOOT):\n if bootable:\n self.setFlag(parted.PARTITION_BOOT)\n else:\n self.unsetFlag(parted.PARTITION_BOOT)\n else:\n raise errors.DeviceError(\"boot flag not available for this partition\", self.name)\n\n self._bootable = bootable\n else:\n self.req_bootable = bootable",
"def setBootargs(self):\n\t\tif self.testType == 'auto' or self.testType == 'manual':\n\t\t\tself.bootargs = self.settings.getKeyValue('nfs.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<nfsroot>', self.nfsroot)\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\n\t\telse:\n\t\t\tself.bootargs = self.settings.getKeyValue('ramdisk.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\t\t\n\t\treturn None",
"def _turn_on_dev_mode(self):\n if self._device is not None:\n self._char_write(self._BLE_SERVICE_ANTI_DOS,\n [ord(c) for c in self._ANTI_DOS_MESSAGE])\n self._char_write(self._BLE_SERVICE_TX_POWER,\n [self._TX_POWER_VALUE])\n # Sending 0x01 to the wake service wakes the sphero.\n self._char_write(self._BLE_SERVICE_WAKE, [0x01])",
"async def reload_platform(self) -> None:",
"def make_BootSettings(order, manageBoot=False):\n return {'manageBoot': manageBoot,\n 'order': order\n }",
"def step4(self):\n for mr in self.mrs:\n self.log.info(\"Boot drive of controller: %d is %d\"\n % (mr.ctrl_id, mr.cli.bootdrive_vd_get()))",
"def _import_devices(self) -> None:\n self._devices.clear()\n\n # Exctract all devices\n for device in self._udev.list_devices():\n # Skip devices without mapping\n if not device.device_node or self.helper.hide_virtual_device(device):\n continue\n self._devices[device.sys_name] = Device.import_udev(device)",
"def test_list_drives_drive_firmware_update(self):\n pass",
"def test_update_pci_device(self):\n pass",
"def test_patch_pci_device(self):\n pass",
"def set_pending_boot_mode(self, boot_mode):\n boot_mode = boot_mode.lower()\n if boot_mode not in ['uefi', 'legacy']:\n msg = 'Invalid Boot mode specified'\n raise exception.IloInvalidInputError(msg)\n\n boot_properties = {'BootMode': boot_mode}\n\n if boot_mode == 'legacy':\n boot_properties['BootMode'] = 'LegacyBios'\n else:\n # If Boot Mode is 'Uefi' set the UEFIOptimizedBoot first.\n boot_properties['UefiOptimizedBoot'] = \"Enabled\"\n\n # Change the Boot Mode\n self._change_bios_setting(boot_properties)",
"def main():\n\n # a quick way to verify the version\n if getscriptversion:\n print('This script is running version: ' + scriptversion)\n exit(0)\n\n # verify that environmental and script requirements are met\n requirements()\n\n # pretty the screen up\n clear()\n # do the MD5 checksum\n checkmd5sum()\n if not user or not password:\n getcreds()\n\n # if device_file is provided parse the lines into a list of devices\n if device_file:\n with open(device_file) as line:\n devices = line.readlines()\n devices = [x.strip() for x in devices]\n else:\n devices = args.devices.split(\",\")\n\n for device in devices:\n\n device = Acos(device)\n print('')\n print('')\n print(dev_addr + ' ' + '{:*^100}'.format('Begin upgrade log for ' + dev_addr))\n print(dev_addr + ' ' + '{:*^100}'.format('Performing pre-upgrade checks'))\n\n # check if the device is online before running\n status = device.checkstatus()\n if status == 'FAIL':\n continue\n\n # authenticate to the device\n response = device.axapi_authenticate(user, password)\n if response == 'FAIL':\n continue\n # get the device hostname\n device.get_hostname()\n\n # get the currently running version\n version = device.get_running_ver()\n\n print(dev_addr + ' ' + '{:*^100}'.format(' Performing upgrade'))\n\n # if we are running 4.1.0 we have to use a different upgrade method\n if '4.1.0' in version:\n response = device.gui_upgrade(user, password)\n if response == 'FAIL':\n continue\n # for other versions just use the normal method\n else:\n response = device.upgrade()\n if response == 'FAIL':\n continue\n bootvar = device.get_bootvar()\n\n # if the user has specified they'd like to update the boot variable\n if updatebootvar:\n # why do work that we don't have to\n if partition in bootvar:\n print(dev_addr + ' Bootvar update requested, but not necessary, device already set to boot from ' + partition)\n # if you're not already set to boot from the partition we installed to, update the bootvar\n else:\n device.update_bootvar()\n # if the user wants to reboot to initialize the new code reboot the box\n if reboot:\n device.reboot()\n # if the user wants to speed up the script, then just skip monitoring them\n if dontwaitforreturn:\n print(dev_addr + ' Skipping post-upgrade verification at user request')\n continue\n # otherwise you probably want to make sure the box comes up first\n else:\n device.reboot_monitor()\n if not reboot:\n print(dev_addr + '{:*^100}'.format('NOTICE NOTICE NOTICE'))\n print(dev_addr + 'You have requested the device not reboot, in order to initialize the new code you will need to reboot the device')\n # if you install to a partition the device won't reboot to, we probably want to stop you from shooting yourself in the foot\n elif not partition in bootvar:\n print(dev_addr + '{:*^100}'.format('NOTICE NOTICE NOTICE'))\n print(dev_addr + ' You have chosen to install to the partition that the device does not currently boot from.')\n print(dev_addr + ' If you wish for the device to run the new code upon reboot you need to update the boot variable manually.')\n if reboot:\n print(dev_addr + ' You have also requested a reboot which will not invoke the new code, SKIPPING REBOOT')\n elif reboot:\n device.reboot()\n # if the user wants to speed up the script, then just skip monitoring them\n if dontwaitforreturn:\n print(dev_addr + ' Skipping post-upgrade verification at user request')\n continue\n # otherwise you probably want to make sure the box comes up first\n else:\n device.reboot_monitor()\n # technically we could still use the old AXAPI token, however for sake of code clarity we're going to do a quick log off then back on\n # the alternative would be having to shove the remaining steps below into each of the appropriate loops making this a bit more\n # spaghettish than it already is\n else:\n device.axapi_logoff()\n\n print(dev_addr + ' ' + '{:*^100}'.format(' Performing post-upgrade checks'))\n\n # since it is very likely the box has rebooted, and our old token is gone, lets get a new one\n response = device.axapi_authenticate(user, password)\n if response == 'FAIL':\n continue\n\n # find out where the device was booted from\n bootdefault = device.get_bootvar()\n\n # get the version of the currently booted partition\n device.get_ver(bootdefault)\n\n # get the current boot variable\n device.get_bootvar()\n\n # get the current running version\n device.get_running_ver()\n\n # log off\n device.axapi_logoff()\n print(dev_addr + ' ' + '{:*^100}'.format(' End upgrade log for ' + dev_addr))",
"def reboot(host=None):\r\n if host:\r\n host.reboot()",
"def reset_secure_boot_keys(self):\n if self._is_boot_mode_uefi():\n self._change_secure_boot_settings('ResetToDefaultKeys', True)\n else:\n msg = ('System is not in UEFI boot mode. \"SecureBoot\" related '\n 'resources cannot be changed.')\n raise exception.IloCommandNotSupportedInBiosError(msg)",
"def reboot(self):\n raise NotImplementedError",
"def boot_config():\n # quick check to grab a config file from /boot partition.\n # this function helps users who cannot SSH/access the Pi,\n # but can access the microSD card\n if os.path.exists(BOOT_CONFIG_PATH):\n print(\"Configuration loaded from /boot directory.\")\n with open(BOOT_CONFIG_PATH) as boot_file:\n with open(CONFIG_FILE_PATH, 'w+') as config_file:\n for line in boot_file:\n config_file.write(line)",
"def modify_devices(self):\n\n for i in self._nodes.items():\n node = i[1]\n devices = node[\"devices\"]\n other_devices = devices[\"other_devices\"]\n kernel_devices = devices[\"kernel_devices\"]\n dpdk_devices = devices[\"dpdk_devices\"]\n\n if other_devices:\n self._modify_other_devices(\n node, other_devices, kernel_devices, dpdk_devices\n )\n\n # Get the devices again for this node\n self._get_device(node)\n devices = node[\"devices\"]\n kernel_devices = devices[\"kernel_devices\"]\n dpdk_devices = devices[\"dpdk_devices\"]\n\n klen = len(kernel_devices)\n if klen > 0:\n print(\"\\nThese devices are safe to be used with VPP.\\n\")\n VppPCIUtil.show_vpp_devices(kernel_devices)\n question = (\n \"\\nWould you like to use any of these \" \"device(s) for VPP [y/N]? \"\n )\n answer = self._ask_user_yn(question, \"n\")\n if answer == \"y\":\n vppd = {}\n for dit in kernel_devices.items():\n dvid = dit[0]\n device = dit[1]\n question = \"Would you like to use device {} \".format(dvid)\n question += \"for VPP [y/N]? \"\n answer = self._ask_user_yn(question, \"n\")\n if answer == \"y\":\n vppd[dvid] = device\n for dit in vppd.items():\n dvid = dit[0]\n device = dit[1]\n if (\n \"unused\" in device\n and len(device[\"unused\"]) != 0\n and device[\"unused\"][0] != \"\"\n ):\n driver = device[\"unused\"][0]\n question = \"Would you like to bind the driver {} for {} [y/N]? \".format(\n driver, dvid\n )\n answer = self._ask_user_yn(question, \"n\")\n if answer == \"y\":\n logging.debug(\n \"Binding device {} to driver {}\".format(\n dvid, driver\n )\n )\n ret = VppPCIUtil.bind_vpp_device(node, driver, dvid)\n if ret:\n logging.debug(\n \"Could not bind device {}\".format(dvid)\n )\n dpdk_devices[dvid] = device\n del kernel_devices[dvid]\n\n dlen = len(dpdk_devices)\n if dlen > 0:\n print(\"\\nThese device(s) are already using DPDK.\\n\")\n VppPCIUtil.show_vpp_devices(dpdk_devices, show_interfaces=False)\n question = \"\\nWould you like to remove any of \"\n question += \"these device(s) [y/N]? \"\n answer = self._ask_user_yn(question, \"n\")\n if answer == \"y\":\n vppdl = {}\n for dit in dpdk_devices.items():\n dvid = dit[0]\n device = dit[1]\n question = \"Would you like to remove {} [y/N]? \".format(dvid)\n answer = self._ask_user_yn(question, \"n\")\n if answer == \"y\":\n vppdl[dvid] = device\n for dit in vppdl.items():\n dvid = dit[0]\n device = dit[1]\n if (\n \"unused\" in device\n and len(device[\"unused\"]) != 0\n and device[\"unused\"][0] != \"\"\n ):\n driver = device[\"unused\"][0]\n logging.debug(\n \"Binding device {} to driver {}\".format(dvid, driver)\n )\n ret = VppPCIUtil.bind_vpp_device(node, driver, dvid)\n if ret:\n logging.debug(\"Could not bind device {}\".format(dvid))\n else:\n kernel_devices[dvid] = device\n del dpdk_devices[dvid]\n\n interfaces = {}\n for dit in dpdk_devices.items():\n dvid = dit[0]\n device = dit[1]\n VppPCIUtil.vpp_create_interface(interfaces, dvid, device)\n node[\"interfaces\"] = interfaces\n\n self._update_auto_config()\n self.updateconfig()",
"def get_one_time_boot(self):\n system = self._get_host_details()\n try:\n if system['Boot']['BootSourceOverrideEnabled'] == 'Once':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n else:\n # value returned by RIBCL if one-time boot setting are absent\n return 'Normal'\n\n except KeyError as e:\n msg = \"get_one_time_boot failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)",
"def reboot(self, node):",
"def invalidateBoot (self):\n if self.isBootValid(): \n self.mountBootPartition()\n installFilePath = self._getBootInstallationFilePath()\n if os.path.exists(installFilePath):\n os.remove(installFilePath)\n\n #self._runCommandRaiseIfFail(\"rm -rf %s\" % (self._getBootInstallationFilePath()))\n self._log(\"invalidate-boot\").notice(\"boot partition is invalidated\")\n else:\n self._log(\"invalidate-boot\").notice(\"boot partition is already invalid\")",
"def wait_boot(self, value: int) -> None:\n self._data[ATTR_WAIT_BOOT] = value",
"async def async_init_single_device(dev: Device) -> None:\n await dev.async_added_to_hass()\n dev.async_write_ha_state()",
"def usb_setup():\n print(\"Warning: using deprecated usb_setup routine!\")\n largest = largest_partition()\n medium = medium_partition()\n smallest = smallest_partition()\n\n print(\"Starting USB installation\")\n print(\"Using {} as archive storage\".format(largest))\n print(\"Using {} as volatile storage\".format(medium))\n print(\"Using {} as important storage\".format(smallest))\n\n lncm_usb = \"/usr/local/sbin/lncm-usb\"\n\n cli_invocation = [\n lncm_usb,\n largest,\n medium,\n smallest,\n get_uuid(largest),\n get_uuid(medium),\n get_uuid(smallest),\n str(largest_part_size()),\n ]\n\n call(cli_invocation)",
"def last_boot(self, value: datetime) -> None:\n self._data[ATTR_LAST_BOOT] = value.isoformat()",
"def do_device(self, args):\n self.device_command.cmdloop(\"Enter to device mode\")",
"def reboot_monitor(self):\n # define cross-platform /dev/null\n devnull = open(os.devnull, 'w')\n\n # if the OS is windows\n if os.name == 'nt':\n ping = ['ping', '-n', '1', self.device]\n\n # if the OS is posix\n else:\n ping = ['ping', '-c', '1', self.device]\n\n print(self.device + ' Waiting for device to finish rebooting, please wait', end='', flush=True)\n time.sleep(10)\n count = 1\n successcount = 0\n while count < 300:\n print('.', end='', flush=True)\n ping_call = subprocess.Popen(ping, stdout=devnull)\n returncode = ping_call.wait()\n # we need multiple successes to allow this to work, otherwise a single response while the box is still initializing can bite us\n if returncode == 0:\n successcount = successcount + 1\n if successcount == 5:\n break\n time.sleep(1)\n count = count + 1\n\n print('')\n if count == 300:\n print(self.device + ' Device has not responded to 300 pings, please manually check device')\n print(self.device + ' Exiting...')\n else:\n print(self.device + ' Device is now initializing')\n time.sleep(10)\n print(self.device + ' Device has finished rebooting')",
"def _new_device(device):\n key = f\"{DOMAIN}.{device.name}\"\n hass.data[DOMAIN][key] = device\n ent_platform = base_config[DOMAIN][CONF_TYPES].get(key, platform)\n discovery.load_platform(\n hass,\n ent_platform,\n DOMAIN,\n discovered={ATTR_NEW: [key]},\n hass_config=base_config,\n )",
"def bootup(debug_port, lines):\n lines.skip_until(\"Booting...\")\n lines.skip_until(\"Loading blocks...\")\n lines.skip_until(\"Starting user space\")\n authenticate(debug_port, lines)\n lines.expect_next(\"Enter command\")",
"def setup_platform(hass, config, add_devices, discovery_info=None):\n hub.update()\n\n for vacbot in hub.vacbots:\n add_devices([DeebotMopAttachedBinarySensor(vacbot, \"mop_attached\")], True)",
"def change_device(self):\n if self.state.ser:\n UsbHost.close_port(self.state.ser)\n device = self.CBDevices.currentText()\n if device:\n comport = self.devices[int(device)]\n self.state.ser = UsbHost.open_port(comport)\n if not self.state.ser:\n self.statusbar.showMessage(\"Выбранный порт более недоступен. Произведите повторный поиск\")\n return\n answer: str = self.UsbHost.send_command(self.state.ser, \"ping\", device)\n if answer in wrong_answers:\n error_message(\"Выбранный девайс не отвечает\")\n self.statusbar.showMessage(\"Выбранный порт более недоступен. Произведите повторный поиск\")\n return\n self.state.device_id = int(device)\n self.state.comport = comport\n self.create_message()\n self.set_controls_state(True)\n self.BtnL1.click()\n self.BtnAttenuate.click()\n self.SpinDACValue.setValue(35000)\n self.BtnSetDACValue.click()\n self.set_sw(\"0 1\")",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version|json\", \"show hostname\"], None,\n 'mixed')",
"def test_update_device(self):\n pass",
"def test_update_device(self):\n pass",
"def expand_second_partition(device):\n\n print('Deleting the original boot partition from the thumb drive')\n _delete_partition(device, 1)\n\n print('Expanding the partition. Resizing isn\\'t worth it. Or obvious to do.')\n resize_command = ['sudo', 'parted', device.path, 'resizepart', '2', '\"-1s\"']\n interactive_console(resize_command)\n\n print('Fixing the nibbly bits for the partition itself')\n target_partition = device.partitions(full_paths=True)[0]\n interactive_console(['sudo', 'e2fsck', '-f', target_partition])\n\n print('Fixing ext4 so it goes all the way to the end')\n target_end = device.partition_specs(2)['End']\n interactive_console(['sudo', 'resize2fs', target_partition, target_end])\n\n print('Success!')",
"def flashUboot(self):\n\t\tif self.settings.getKeyValue('flash.uboot?') == 'y':\n\t\t\tloadAddress = self.settings.getKeyValue('u-boot.flash.address')\n\t\t\tcmd = self.settings.getKeyValue('u-boot.load.command')\n\t\t\tcmd = cmd.replace('<u-boot>', 'u-boot.bin.12x.2430')\n\t\t\tself.socket.send(cmd, 5)\n\t\t\t#self.socket.send('protect off 1:0-1\\r', 2)\n\t\t\t#self.socket.send('erase 1:0-1\\r', 2)\n\t\t\t#self.socket.send('cp.b 80000000 %s 2ffff\\r' % loadAddress)\n\t\t\treturn None\n\t\t\t#cmd = cmd.replace('<u-bootloadadress>', self.u-bootloadaddress)",
"def initialize_devices(self):\n if not self.loaded_devices:\n raise Exception('Devices have to be loaded before being initialized.')\n\n for dev in self.devices:\n d = self.devices[dev]['dev'] # This is the Device instance\n d.initialize_driver()",
"def setup_devices(self, devices):\n \n self.devices = devices\n \n barrier = ReusableBarrier(len(devices))\n lock = Lock()\n aux_dict = {}\n\n for device in devices:\n device.barrier = barrier\n device.global_lock = lock\n for location in device.sensor_data: \n if location not in aux_dict:\n aux_dict[location] = Semaphore() \n \n for device in devices:\n device.device_semaphores = aux_dict\n\n self.setup_master_thread()",
"async def async_setup_entry(hass, config_entry, async_add_devices):\n loxconfig = hass.data[DOMAIN]['loxconfig']\n identify = loxconfig['msInfo']['serialNr']\n\n devices = []\n all_dimmers = []\n all_light_controller_dimmers = []\n all_color_picker = []\n all_switches = []\n all_dimmers = get_all_dimmer(loxconfig)\n\n for light_controller in get_all_light_controller(loxconfig):\n light_controller.update({'room': get_room_name_from_room_uuid(loxconfig, light_controller.get('room', '')),\n 'cat': get_cat_name_from_cat_uuid(loxconfig, light_controller.get('cat', '')),\n 'async_add_devices': async_add_devices\n })\n new_light_controller = LoxonelightcontrollerV2(**light_controller)\n\n if 'subControls' in light_controller:\n if len(light_controller['subControls']) > 0:\n for sub_controll in light_controller['subControls']:\n if light_controller['subControls'][sub_controll]['type'] == \"Dimmer\":\n light_controller['subControls'][sub_controll]['room'] = light_controller.get('room', '')\n light_controller['subControls'][sub_controll]['cat'] = light_controller.get('cat', '')\n light_controller['subControls'][sub_controll][\n 'lightcontroller_id'] = new_light_controller.unique_id\n all_light_controller_dimmers.append(light_controller['subControls'][sub_controll])\n\n elif light_controller['subControls'][sub_controll]['type'] == \"Switch\":\n light_controller['subControls'][sub_controll]['room'] = light_controller.get('room', '')\n light_controller['subControls'][sub_controll]['cat'] = light_controller.get('cat', '')\n light_controller['subControls'][sub_controll][\n 'lightcontroller_id'] = new_light_controller.unique_id\n all_switches.append(light_controller['subControls'][sub_controll])\n\n elif light_controller['subControls'][sub_controll]['type'] == \"ColorPickerV2\":\n light_controller['subControls'][sub_controll]['room'] = light_controller.get('room', '')\n light_controller['subControls'][sub_controll]['cat'] = light_controller.get('cat', '')\n light_controller['subControls'][sub_controll][\n 'lightcontroller_id'] = new_light_controller.unique_id\n all_color_picker.append(light_controller['subControls'][sub_controll])\n\n hass.bus.async_listen(EVENT, new_light_controller.event_handler)\n devices.append(new_light_controller)\n\n _ = all_dimmers + all_light_controller_dimmers\n\n for dimmer in _:\n if dimmer in all_light_controller_dimmers:\n dimmer.update({'room': get_room_name_from_room_uuid(loxconfig, light_controller.get('room', '')),\n 'cat': get_cat_name_from_cat_uuid(loxconfig, light_controller.get('cat', '')),\n 'async_add_devices': async_add_devices\n })\n else:\n dimmer.update({'room': get_room_name_from_room_uuid(loxconfig, dimmer.get('room', '')),\n 'cat': get_cat_name_from_cat_uuid(loxconfig, dimmer.get('cat', '')),\n 'async_add_devices': async_add_devices\n })\n\n new_dimmer = LoxoneDimmer(**dimmer)\n hass.bus.async_listen(EVENT, new_dimmer.event_handler)\n devices.append(new_dimmer)\n\n for switch in all_switches:\n switch.update({'room': get_room_name_from_room_uuid(loxconfig, light_controller.get('room', '')),\n 'cat': get_cat_name_from_cat_uuid(loxconfig, light_controller.get('cat', '')),\n 'async_add_devices': async_add_devices\n })\n new_switch = LoxoneLight(**switch)\n hass.bus.async_listen(EVENT, new_switch.event_handler)\n devices.append(new_switch)\n\n for color_picker in all_color_picker:\n color_picker.update({'room': get_room_name_from_room_uuid(loxconfig, light_controller.get('room', '')),\n 'cat': get_cat_name_from_cat_uuid(loxconfig, light_controller.get('cat', '')),\n 'async_add_devices': async_add_devices\n })\n new_color_picker = LoxoneColorPickerV2(**color_picker)\n hass.bus.async_listen(EVENT, new_color_picker.event_handler)\n devices.append(new_color_picker)\n\n async_add_devices(devices, True)\n return True",
"def update(self):\n _LOGGER.debug(\"Updating Warmup devices\")\n self._warmup.update_all_devices()",
"def _post_init(self):\n if WIN:\n self._find_devices_win()\n elif MAC:\n self._find_devices_mac()\n else:\n self._find_devices()\n self._update_all_devices()\n if NIX:\n self._find_leds()",
"def zap(dev):\n try:\n LOG.debug('Zapping partition table on %s', dev)\n\n # try to wipe out any GPT partition table backups. sgdisk\n # isn't too thorough.\n lba_size = 4096\n size = 33 * lba_size\n with file(dev, 'wb') as dev_file:\n dev_file.seek(-size, os.SEEK_END)\n dev_file.write(size*'\\0')\n\n command_check_call(\n [\n 'sgdisk',\n '--zap-all',\n '--',\n dev,\n ],\n )\n command_check_call(\n [\n 'sgdisk',\n '--clear',\n '--mbrtogpt',\n '--',\n dev,\n ],\n )\n\n update_partition('-d', dev, 'zapped')\n\n except subprocess.CalledProcessError as e:\n raise Error(e)",
"def _post_init(self):\n self._led_type_code = self.manager.get_typecode('LED')\n self.device_path = os.path.realpath(os.path.join(self.path, 'device'))\n if '::' in self.name:\n chardev, code_name = self.name.split('::')\n if code_name in self.manager.codes['LED_type_codes']:\n self.code = self.manager.codes['LED_type_codes'][code_name]\n try:\n event_number = chardev.split('input')[1]\n except IndexError:\n print(\"Failed with\", self.name)\n raise\n else:\n self._character_device_path = '/dev/input/event' + event_number\n self._match_device()",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"cat /proc/uptime\", \"hostnamectl\",\n \"cat /etc/os-release\"], None, 'text')",
"def boot():\r\n print \"\"\"\r\n ###### ## ## ### ## ## ## ## ######## ########\r\n ## ## ## ## ## ## ### ## ## ## ## ## ##\r\n ## #### ## ## #### ## ## ## ## ## ##\r\n ## ## ## ## ## ## ## ## ## ######## ######\r\n ## ## ######### ## #### ## ## ## ## ##\r\n ## ## ## ## ## ## ### ## ## ## ## ##\r\n ###### ## ## ## ## ## ####### ## ## ########\r\n\r\n Version %s-%s\r\n\r\n Multi Purpose Artificial Inelegance Program\r\n Copyright (c) Alexandre Gauthier 2010-2011\r\n All Rights Reserved\r\n \"\"\" % ( constants.VERSION, constants.TAGNAME )\r\n\r\n # Initialize log\r\n # TODO: The values should be read from config file.\r\n log.init_log('cyanure.log', 'DEBUG')\r\n\r\n logger.info(\"Cyanure system init: Version %s (%s)\" % (\r\n constants.VERSION, constants.TAGNAME ))",
"def reboot_fpga(self):\n log.info(\"Booting FPGA from SPI prom\")\n self.set(\"FPGA_CTRL\", \"boot_fpga\", 1);",
"def swap(name, persist=True, config=\"/etc/fstab\"):\n ret = {\"name\": name, \"changes\": {}, \"result\": True, \"comment\": \"\"}\n on_ = __salt__[\"mount.swaps\"]()\n\n if __salt__[\"file.is_link\"](name):\n real_swap_device = __salt__[\"file.readlink\"](name)\n if not real_swap_device.startswith(\"/\"):\n real_swap_device = \"/dev/{}\".format(os.path.basename(real_swap_device))\n else:\n real_swap_device = name\n\n if real_swap_device in on_:\n ret[\"comment\"] = \"Swap {} already active\".format(name)\n elif __opts__[\"test\"]:\n ret[\"result\"] = None\n ret[\"comment\"] = \"Swap {} is set to be activated\".format(name)\n else:\n __salt__[\"mount.swapon\"](real_swap_device)\n\n on_ = __salt__[\"mount.swaps\"]()\n\n if real_swap_device in on_:\n ret[\"comment\"] = \"Swap {} activated\".format(name)\n ret[\"changes\"] = on_[real_swap_device]\n else:\n ret[\"comment\"] = \"Swap {} failed to activate\".format(name)\n ret[\"result\"] = False\n\n if persist:\n device_key_name = \"device\"\n if \"AIX\" in __grains__[\"os\"]:\n device_key_name = \"dev\"\n if \"/etc/fstab\" == config:\n # Override default for AIX\n config = \"/etc/filesystems\"\n fstab_data = __salt__[\"mount.filesystems\"](config)\n else:\n fstab_data = __salt__[\"mount.fstab\"](config)\n if __opts__[\"test\"]:\n if name not in fstab_data and name not in [\n fstab_data[item][\"device\"] for item in fstab_data\n ]:\n ret[\"result\"] = None\n if name in on_:\n ret[\n \"comment\"\n ] = \"Swap {} is set to be added to the fstab and to be activated\".format(\n name\n )\n return ret\n\n if \"none\" in fstab_data:\n if (\n fstab_data[\"none\"][device_key_name] == name\n and fstab_data[\"none\"][\"fstype\"] != \"swap\"\n ):\n return ret\n\n if \"AIX\" in __grains__[\"os\"]:\n out = None\n ret[\"result\"] = False\n ret[\"comment\"] += \". swap not present in /etc/filesystems on AIX.\"\n return ret\n else:\n # present, new, change, bad config\n # Make sure the entry is in the fstab\n out = __salt__[\"mount.set_fstab\"](\n \"none\", name, \"swap\", [\"defaults\"], 0, 0, config\n )\n if out == \"present\":\n return ret\n if out == \"new\":\n ret[\"changes\"][\"persist\"] = \"new\"\n ret[\"comment\"] += \". Added new entry to the fstab.\"\n return ret\n if out == \"change\":\n ret[\"changes\"][\"persist\"] = \"update\"\n ret[\"comment\"] += \". Updated the entry in the fstab.\"\n return ret\n if out == \"bad config\":\n ret[\"result\"] = False\n ret[\"comment\"] += \". However, the fstab was not found.\"\n return ret\n return ret",
"def software_load(self, filename: str) -> None:\n pass # Most boards can use serialboot.",
"async def async_setup(self) -> None:\n await self.hass.async_add_executor_job(self._setup)\n\n # set already known devices to away instead of unavailable\n device_registry = dr.async_get(self.hass)\n devices = dr.async_entries_for_config_entry(device_registry, self.entry_id)\n for device_entry in devices:\n if device_entry.via_device_id is None:\n continue # do not add the router itself\n\n device_mac = dict(device_entry.connections).get(dr.CONNECTION_NETWORK_MAC)\n self.devices[device_mac] = {\n \"mac\": device_mac,\n \"name\": device_entry.name,\n \"active\": False,\n \"last_seen\": dt_util.utcnow() - timedelta(days=365),\n \"device_model\": None,\n \"device_type\": None,\n \"type\": None,\n \"link_rate\": None,\n \"signal\": None,\n \"ip\": None,\n }\n\n await self.async_update_device_trackers()\n self.entry.async_on_unload(\n async_track_time_interval(\n self.hass, self.async_update_device_trackers, SCAN_INTERVAL\n )\n )\n\n async_dispatcher_send(self.hass, self.signal_device_new)",
"def refreshPartitionTable(device):\n\n try:\n fd = os.open(device, os.O_RDONLY)\n except EnvironmentError, (error, strerror):\n print 'Could not open device %s. Reason: %s.'%(device, strerror)\n sys.exit(-1)\n\n # Sync and wait for Sync to complete\n os.system(PATH_SYNC)\n sleep(2)\n\n # Call required ioctl to re-read partition table\n try:\n ioctl(fd, BLKRRPART())\n except EnvironmentError, (error, message):\n # Attempt ioctl call twice in case an older kernel (1.2.x) is being used\n os.system(PATH_SYNC)\n sleep(2)\n\n try:\n ioctl(fd, BLKRRPART())\n except EnvironmentError, (error, strerror):\n print 'IOCTL Error: %s for device %s.'%(strerror, device)\n sys.exit(-1)\n\n print 'Successfully re-read partition table on device %s.'%(device)\n # Sync file buffers\n os.fsync(fd)\n os.close(fd)\n\n # Final sync\n print \"Syncing %s ... \" % (device),\n os.system(PATH_SYNC)\n sleep(4) # for sync()\n print \"Done.\"",
"def set_boot_options(self, image_name, **vendor_specifics):\n current_boot = self.show(\"show running-config | inc ^boot system \")\n file_system = vendor_specifics.get(\"file_system\")\n if file_system is None:\n file_system = self._get_file_system()\n\n file_system_files = self.show(f\"dir {file_system}\")\n if re.search(image_name, file_system_files) is None:\n log.error(\"Host %s: File not found error for image %s.\", self.host, image_name)\n raise NTCFileNotFoundError(\n # TODO: Update to use hostname\n hostname=self.host,\n file=image_name,\n directory=file_system,\n )\n\n current_images = current_boot.splitlines()\n commands_to_exec = [f\"no {image}\" for image in current_images]\n commands_to_exec.append(f\"boot system {file_system}/{image_name}\")\n self.config(commands_to_exec)\n\n self.save()\n if self.boot_options[\"sys\"] != image_name:\n log.error(\"Host %s: Setting boot command did not yield expected results\", self.host)\n raise CommandError(\n command=f\"boot system {file_system}/{image_name}\",\n message=\"Setting boot command did not yield expected results\",\n )\n\n log.info(\"Host %s: boot options have been set to %s\", self.host, image_name)",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version\", \"show run hostname\"], None)",
"def _update_device_types(self):\n device_types = self.adapter.device_types()\n for device_type in device_types.items:\n key = device_type.id\n self._make_up_to_date('/device_types', key, device_type)",
"def device(self, serial):\n self._devices = []",
"def default_bootmodules(self):\n # FIXME: clean up / separate platform-specific logic\n\n machine = self\n a = machine.get_bootarch()\n\n # set the kernel: elver on x86_64\n if a == \"x86_64\":\n kernel = \"elver\"\n elif a == \"armv7\" or a == \"armv8\":\n kernel = \"cpu_%s\" % machine.get_platform()\n else:\n kernel = \"cpu\"\n\n m = barrelfish.BootModules(machine, prefix=(\"%s/sbin/\" % a), kernel=kernel)\n m.add_kernel_args(machine.get_kernel_args())\n # default for all barrelfish archs\n # hack: cpu driver is not called \"cpu\" for ARMv7 builds\n if a == \"armv7\" :\n m.add_module(\"cpu_%s\" % machine.get_platform(), machine.get_kernel_args())\n elif a == \"armv8\" :\n # remove kernel\n m.set_kernel(None)\n # add cpu driver\n m.set_cpu_driver(kernel, machine.get_kernel_args())\n # add boot driver\n m.set_boot_driver(machine.get_boot_driver())\n else :\n m.add_module(\"cpu\", machine.get_kernel_args())\n\n m.add_module(\"init\")\n m.add_module(\"mem_serv\")\n m.add_module(\"monitor\")\n m.add_module(\"ramfsd\", [\"boot\"])\n m.add_module(\"skb\", [\"boot\"])\n m.add_module(\"proc_mgmt\", [\"boot\"])\n m.add_module(\"spawnd\", [\"boot\"])\n m.add_module(\"startd\", [\"boot\"])\n m.add_module(\"/eclipseclp_ramfs.cpio.gz\", [\"nospawn\"])\n m.add_module(\"/skb_ramfs.cpio.gz\", [\"nospawn\"])\n m.add_module(\"corectrl\", [\"auto\"])\n\n # armv8\n if a == \"armv8\" :\n if not machine._uboot: # no ACPI on U-Boot\n m.add_module(\"acpi\", [\"boot\"])\n m.add_module(\"kaluga\", [\"boot\"])\n\n # SKB and PCI are x86-only for the moment\n if a == \"x86_64\" or a == \"x86_32\":\n # Add acpi with machine-specific extra-arguments\n m.add_module(\"acpi\", [\"boot\"] + machine.get_acpi_args())\n m.add_module(\"routing_setup\", [\"boot\"])\n\n # Add pci with machine-specific extra-arguments\n m.add_module(\"pci\", [\"auto\"] + machine.get_pci_args())\n\n # Add kaluga with machine-specific bus:dev:fun triplet for eth0\n # interface\n m.add_module(\"kaluga\",\n [\"boot\", \"eth0=%d:%d:%d\" % machine.get_eth0()])\n\n # coreboot should work on armv7 now\n if a == \"armv7\":\n m.add_module(\"kaluga\", machine.get_kaluga_args())\n m.add_module(\"driverdomain_pl390\", [\"auto\"])\n m.add_module(\"serial_kernel\", [\"auto\"])\n m.add_module(\"serial_pl011\", [\"auto\"])\n m.add_module(\"int_route\", [])\n\n return m",
"def device_reset(self):\n\t\tlogger.info('Device Reset')\n\t\tself.spi.writebytes([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff])\n\t\tprint(DELIMITER*'-')",
"def init_devices(self):\n self.hp_nb = int(self.rs_nb* self.hp_proportion/(1- self.hp_proportion))\n self.defense_cost = self.hp_nb * self.hp_unit_cost\n rs_devices = [True for i in range(self.rs_nb)] #rs --> True\n hp_devices = [False for i in range(self.hp_nb)] #hp --> False\n self.devices = rs_devices + hp_devices\n shuffle(self.devices)",
"async def init_boot_time(self):\n await self.exec_cmd(self._parse_boottime_hostname,\n [\"show version\"], None)",
"def reset_device_bridge(self, client, device_type):\r\n client.resetDeviceBridgeOS(device_type)",
"def setup_platform(hass, config, add_devices_callback, discovery_info=None):\n host = config.get(CONF_HOST)\n name = config.get(CONF_NAME)\n token = config.get('token')\n\n add_devices_callback([MiroboSwitch(name, host, token)])",
"def test_update_pci_switch(self):\n pass",
"def load_kernel_module(params) -> None:\n print(\"Loading kernel module...\")\n os.system(\"modprobe -r v4l2loopback >/dev/null 2>&1\")\n cmd = \"modprobe v4l2loopback devices=1 video_nr=\" + params['loopback_nr'] + \\\n \" card_label=\" + params['loopback_name'] + \\\n \" exclusive_caps=\" + params['loopback_exclusive'] + \" >/dev/null 2>&1\"\n if os.system(cmd) == 0:\n print(\" Success !\")\n else:\n print(\" Failure !\")",
"def initialize_devices(self):\n for k in self.devices:\n dev = self.devices[k]\n print('Starting %s' % dev.properties['name'])\n dev.initialize_driver()\n # print('Error initializing %s' % dev.properties['name'])\n if 'defaults' in dev.properties:\n defaults_file = dev.properties['defaults']\n defaults = from_yaml_to_dict(defaults_file)[dev.properties['name']]\n dev.apply_values(defaults)\n if dev.properties['type'] == 'daq':\n self.daqs[dev.properties['name']] = {'input': [],\n 'output': [],\n 'monitor': [], } # Creates an entry for every different DAQ.",
"def test_persistent_group3(dev):\n form(dev[0], dev[1])\n dev[1].request(\"BSS_FLUSH 0\")\n invite_from_cli(dev[0], dev[1])\n dev[1].request(\"BSS_FLUSH 0\")\n invite_from_go(dev[0], dev[1])",
"def bootpart(disks):\n return path_to_partition(disks, '/boot/foo')",
"def _update(self, device=None):\n self._attr_available = True\n self.schedule_update_ha_state(True)",
"def reorder_udev_rules(self):\n self.udev.reorder_rules()",
"def unselect_and_select_boot_order():\n # Unselect and select the \"Manage boot order\" option\n selenium2lib = ui_lib.get_s2l()\n status = True\n logger._log_to_console_and_log_file(\"\")\n logger._log_to_console_and_log_file(\"Unselecting and selecting the 'Manage boot order' checkbox\")\n ui_lib.wait_for_checkbox_and_unselect(FusionServerProfilesPage.ID_CHKBOX_MANAGE_BOOT_ORDER)\n if not ui_lib.wait_for_element_visible(\"name=%s\" % \"CD\") and ui_lib.wait_for_element_visible(\"name=%s\" % \"USB\") and ui_lib.wait_for_element_visible(\"name=%s\" % \"HardDisk\"):\n logger._log_to_console_and_log_file(\"- 'Manage boot order' items were correctly hidden\")\n else:\n logger._log_to_console_and_log_file(\"- 'Manage boot order' items are still being displayed\")\n selenium2lib.capture_page_screenshot()\n status = False\n ui_lib.wait_for_checkbox_and_select(FusionServerProfilesPage.ID_CHKBOX_MANAGE_BOOT_ORDER)\n if ui_lib.wait_for_element_visible(\"name=%s\" % \"CD\") and ui_lib.wait_for_element_visible(\"name=%s\" % \"USB\") and ui_lib.wait_for_element_visible(\"name=%s\" % \"HardDisk\"):\n logger._log_to_console_and_log_file(\"- 'Manage boot order' items were correctly displayed\")\n else:\n logger._log_to_console_and_log_file(\"- 'Manage boot order' items were NOT displayed\")\n selenium2lib.capture_page_screenshot()\n status = False\n return status",
"def boot(self, boot_node_request):\n return self.client.call('POST',\n self.name + 'boot', payload=boot_node_request)",
"def _SetDeviceSerial(self, device_serial):\n self._device_address = (\"127.0.0.1:%s\" % self._adb_port if\n self._adb_port else \"\")\n self._device_serial = (device_serial if device_serial else\n self._device_address)",
"def update_firmware(self):\n self.execute_command(CMD_UPDATE_FIRMWARE)",
"def test_config_device_reset(get_config, monkeypatch):\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', {'test': 'conf'})\n cfg = get_config(DeviceConfig, base_config)\n cfg.save()\n cfg.write_default()\n new_conf = cfg.load()\n\n assert cfg.data == cfg.minimal_essential_conf, 'failed to apply default config'\n assert new_conf == cfg.minimal_essential_conf, 'failed to load default config'",
"def create_boot_dev(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_CreateBootDev', self.handle))",
"def update_device(self, dev_dict):\n # Note(jprabh1x): added bus,slot,function into fields dict as \n # seperate fields.\n no_changes = ('status', 'instance_uuid', 'id', 'extra_info', 'workload')\n map(lambda x: dev_dict.pop(x, None),\n [key for key in no_changes])\n\n # Note(jprabh1x): populating values for bus,slot,function from address in dev_dict.\n if dev_dict.has_key(\"address\"):\n \t\taddress = pci_utils.parse_address(dev_dict[\"address\"])\n \t\tdev_dict.update({'bus':str(address[1]), 'slot':str(address[2]), 'function':str(address[3])})\n for k, v in dev_dict.items():\n if k in self.fields.keys():\n self[k] = v\n else:\n extra_info = self.extra_info\n extra_info.update({k: str(v)})\n self.extra_info = extra_info"
] | [
"0.6520707",
"0.6508391",
"0.6398044",
"0.623242",
"0.6228627",
"0.6185939",
"0.6038498",
"0.59571797",
"0.58889556",
"0.5886491",
"0.5790621",
"0.5788118",
"0.5766241",
"0.5762834",
"0.57031375",
"0.5653588",
"0.5599488",
"0.55963147",
"0.5501934",
"0.54900634",
"0.5485925",
"0.5477025",
"0.5457647",
"0.54468477",
"0.54135704",
"0.53657985",
"0.53639513",
"0.53071654",
"0.52887815",
"0.5278719",
"0.5270356",
"0.52571756",
"0.5233908",
"0.520998",
"0.51927716",
"0.5179826",
"0.51759",
"0.51343936",
"0.5128883",
"0.5121244",
"0.51127297",
"0.5107306",
"0.5103555",
"0.50930816",
"0.508734",
"0.50845796",
"0.5082704",
"0.5082632",
"0.50821453",
"0.5080129",
"0.506031",
"0.50504684",
"0.50468063",
"0.50467235",
"0.5044444",
"0.5041082",
"0.5038342",
"0.5037935",
"0.50374687",
"0.50374687",
"0.5027853",
"0.50206506",
"0.50205547",
"0.5020164",
"0.5001667",
"0.50004476",
"0.49989086",
"0.49891815",
"0.498364",
"0.49825695",
"0.49824864",
"0.4980418",
"0.49706316",
"0.49703446",
"0.494817",
"0.49351344",
"0.49224606",
"0.49216548",
"0.4921404",
"0.49197304",
"0.4918945",
"0.49164325",
"0.49157315",
"0.49103293",
"0.49063888",
"0.49053016",
"0.4890816",
"0.48873535",
"0.48805755",
"0.48772427",
"0.48717412",
"0.4866505",
"0.48618588",
"0.4860068",
"0.48524386",
"0.485228",
"0.48492372",
"0.48483875",
"0.48442203",
"0.48437023"
] | 0.5921619 | 8 |
Configures a single boot from a specific device. | def set_one_time_boot(self, device, mac=None):
self._update_persistent_boot([device], persistent=False, mac=mac) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def setup_device(device):\n try:\n # Gets around \"Resource busy\" errors\n device.detach_kernel_driver(0)\n except Exception:\n pass\n device.set_configuration()",
"def _configure_device():\n vendor_id = 0x04D8 # These ids are microchip's libusb based device\n product_id = 0x0204 # ids\n dev = usb.core.find(idVendor=vendor_id, idProduct = product_id)\n try:\n dev.set_configuration()\n return dev\n except:\n return None",
"def set_boot_device(self, device, persistent=False):\n\n operation = \"set_boot_device\"\n try:\n self.sp_manager.create_boot_policy()\n self.sp_manager.set_boot_device(device)\n\n except UcsException as ex:\n raise exception.UcsOperationError(operation=operation, error=ex)",
"def boot(self, boot):\n\n self._boot = boot",
"def set_boot_device(self, task, device, persistent=False):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified.\") % device)\n\n uefi_mode = (\n boot_mode_utils.get_boot_mode(task.node) == 'uefi')\n\n # disable 60 secs timer\n timeout_disable = \"0x00 0x08 0x03 0x08\"\n ipmitool.send_raw(task, timeout_disable)\n\n # note(naohirot):\n # Set System Boot Options : ipmi cmd '0x08', bootparam '0x05'\n #\n # $ ipmitool raw 0x00 0x08 0x05 data1 data2 0x00 0x00 0x00\n #\n # data1 : '0xe0' persistent + uefi\n # '0xc0' persistent + bios\n # '0xa0' next only + uefi\n # '0x80' next only + bios\n # data2 : boot device defined in the dict _BOOTPARAM5_DATA2\n\n bootparam5 = '0x00 0x08 0x05 %s %s 0x00 0x00 0x00'\n if persistent:\n data1 = '0xe0' if uefi_mode else '0xc0'\n else:\n data1 = '0xa0' if uefi_mode else '0x80'\n data2 = _BOOTPARAM5_DATA2[device]\n\n cmd8 = bootparam5 % (data1, data2)\n ipmitool.send_raw(task, cmd8)\n else:\n if device not in self.get_supported_boot_devices(task):\n raise exception.InvalidParameterValue(_(\n \"Invalid boot device %s specified. \"\n \"Current iRMC firmware condition doesn't support IPMI \"\n \"but Redfish.\") % device)\n super(ipmitool.IPMIManagement, self).set_boot_device(\n task, device, persistent)",
"def config_device(self, cfg):\n\n\t\tif self.host is not None:\n\t\t\tself.tell(\"Configuring device\")\n\n\t\tself._dev = cfg\n\n\t\tself.do_checklist([])",
"def configure_device(cls, params, device_id, token):\n # Meta information to be published along with device actuation message\n meta = {\n 'service': ''\n }\n invalid_attrs = []\n\n meta['service'] = init_tenant_context(token, db)\n\n meta['timestamp'] = int(time.time() * 1000)\n\n orm_device = assert_device_exists(device_id)\n full_device = serialize_full_device(orm_device, meta['service'])\n LOGGER.debug(f\" Full device: {json.dumps(full_device)}\")\n\n data = params.get('data')\n payload = json.loads(data)\n LOGGER.debug(f' Parsed request payload: {json.dumps(payload)}')\n\n payload['id'] = orm_device.id\n\n for attr in payload['attrs']:\n if find_attribute(full_device, attr, 'actuator') is None:\n invalid_attrs.append(attr)\n\n if not invalid_attrs:\n LOGGER.debug(f' Sending configuration message through Kafka.')\n kafka_handler_instance = cls.kafka.getInstance(cls.kafka.kafkaNotifier)\n kafka_handler_instance.configure(payload, meta)\n LOGGER.debug(f' Configuration sent.')\n result = {f' status': 'configuration sent to device'}\n else:\n LOGGER.warning(f' invalid attributes detected in command: {invalid_attrs}')\n result = {\n 'status': 'some of the attributes are not configurable',\n 'attrs': invalid_attrs\n }\n raise HTTPRequestError(403, result)\n\n return result",
"def _new_device(device):\n key = f\"{DOMAIN}.{device.name}\"\n hass.data[DOMAIN][key] = device\n ent_platform = base_config[DOMAIN][CONF_TYPES].get(key, platform)\n discovery.load_platform(\n hass,\n ent_platform,\n DOMAIN,\n discovered={ATTR_NEW: [key]},\n hass_config=base_config,\n )",
"def boot_config():\n # quick check to grab a config file from /boot partition.\n # this function helps users who cannot SSH/access the Pi,\n # but can access the microSD card\n if os.path.exists(BOOT_CONFIG_PATH):\n print(\"Configuration loaded from /boot directory.\")\n with open(BOOT_CONFIG_PATH) as boot_file:\n with open(CONFIG_FILE_PATH, 'w+') as config_file:\n for line in boot_file:\n config_file.write(line)",
"def set_device(self, device):\n self.device = device",
"def __init__(self, device):\n self.device = device\n self.device.get_active_configuration()",
"def _update_persistent_boot(self, device_type=[], persistent=False,\n mac=None):\n tenure = 'Once'\n new_device = device_type[0]\n # If it is a standard device, we need to convert in RIS convention\n if device_type[0].upper() in DEVICE_COMMON_TO_RIS:\n new_device = DEVICE_COMMON_TO_RIS[device_type[0].upper()]\n\n if persistent:\n tenure = 'Continuous'\n\n systems_uri = \"/rest/v1/Systems/1\"\n # Need to set this option first if device is 'UefiTarget'\n if new_device is 'UefiTarget':\n if not mac:\n msg = ('Mac is needed for iscsi uefi boot')\n raise exception.IloInvalidInputError(msg)\n\n headers, bios_uri, bios_settings = self._check_bios_resource()\n # Get the Boot resource and Mappings resource.\n boot_settings = self._get_bios_boot_resource(bios_settings)\n StructuredBootString = None\n\n for boot_setting in boot_settings['BootSources']:\n if(mac.upper() in boot_setting['UEFIDevicePath'] and\n 'iSCSI' in boot_setting['UEFIDevicePath']):\n StructuredBootString = boot_setting['StructuredBootString']\n break\n if not StructuredBootString:\n msg = ('MAC provided is Invalid \"%s\"' % mac)\n raise exception.IloInvalidInputError(msg)\n\n new_boot_settings = {}\n new_boot_settings['Boot'] = {'UefiTargetBootSourceOverride':\n StructuredBootString}\n status, headers, response = self._rest_patch(systems_uri, None,\n new_boot_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)\n\n new_boot_settings = {}\n new_boot_settings['Boot'] = {'BootSourceOverrideEnabled': tenure,\n 'BootSourceOverrideTarget': new_device}\n status, headers, response = self._rest_patch(systems_uri, None,\n new_boot_settings)\n if status >= 300:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)",
"def boot(self):\n\n pass",
"def load_device():",
"def _SetDeviceSerial(self, device_serial):\n self._device_address = (\"127.0.0.1:%s\" % self._adb_port if\n self._adb_port else \"\")\n self._device_serial = (device_serial if device_serial else\n self._device_address)",
"async def async_step_configure_device(self, user_input=None):\n errors = {}\n dev_id = self.selected_device\n if user_input is not None:\n try:\n self.device_data = user_input.copy()\n if dev_id is not None:\n # self.device_data[CONF_PRODUCT_KEY] = self.devices[\n # self.selected_device\n # ][\"productKey\"]\n cloud_devs = self.hass.data[DOMAIN][DATA_CLOUD].device_list\n if dev_id in cloud_devs:\n self.device_data[CONF_MODEL] = cloud_devs[dev_id].get(\n CONF_PRODUCT_NAME\n )\n if self.editing_device:\n self.device_data.update(\n {\n CONF_DEVICE_ID: dev_id,\n CONF_DPS_STRINGS: self.dps_strings,\n CONF_ENTITIES: [],\n }\n )\n if user_input[CONF_ENTITIES]:\n entity_ids = [\n int(entity.split(\":\")[0])\n for entity in user_input[CONF_ENTITIES]\n ]\n device_config = self.config_entry.data[CONF_DEVICES][dev_id]\n self.entities = [\n entity\n for entity in device_config[CONF_ENTITIES]\n if entity[CONF_ID] in entity_ids\n ]\n return await self.async_step_configure_entity()\n\n self.dps_strings = await validate_input(self.hass, user_input)\n return await self.async_step_pick_entity_type()\n except CannotConnect:\n errors[\"base\"] = \"cannot_connect\"\n except InvalidAuth:\n errors[\"base\"] = \"invalid_auth\"\n except EmptyDpsList:\n errors[\"base\"] = \"empty_dps\"\n except Exception: # pylint: disable=broad-except\n _LOGGER.exception(\"Unexpected exception\")\n errors[\"base\"] = \"unknown\"\n\n defaults = {}\n if self.editing_device:\n # If selected device exists as a config entry, load config from it\n defaults = self.config_entry.data[CONF_DEVICES][dev_id].copy()\n schema = schema_defaults(options_schema(self.entities), **defaults)\n placeholders = {\"for_device\": f\" for device `{dev_id}`\"}\n else:\n defaults[CONF_PROTOCOL_VERSION] = \"3.3\"\n defaults[CONF_HOST] = \"\"\n defaults[CONF_DEVICE_ID] = \"\"\n defaults[CONF_LOCAL_KEY] = \"\"\n defaults[CONF_FRIENDLY_NAME] = \"\"\n if dev_id is not None:\n # Insert default values from discovery and cloud if present\n device = self.discovered_devices[dev_id]\n defaults[CONF_HOST] = device.get(\"ip\")\n defaults[CONF_DEVICE_ID] = device.get(\"gwId\")\n defaults[CONF_PROTOCOL_VERSION] = device.get(\"version\")\n cloud_devs = self.hass.data[DOMAIN][DATA_CLOUD].device_list\n if dev_id in cloud_devs:\n defaults[CONF_LOCAL_KEY] = cloud_devs[dev_id].get(CONF_LOCAL_KEY)\n defaults[CONF_FRIENDLY_NAME] = cloud_devs[dev_id].get(CONF_NAME)\n schema = schema_defaults(CONFIGURE_DEVICE_SCHEMA, **defaults)\n\n placeholders = {\"for_device\": \"\"}\n\n return self.async_show_form(\n step_id=\"configure_device\",\n data_schema=schema,\n errors=errors,\n description_placeholders=placeholders,\n )",
"def get_configuration(self):\n\t\tdevice = DeviceBase(self.name)\n\n\t\tif len(self.master_url) > 0:\n\t\t\tdevice.master_url = self.master_url\n\t\t\tr = requests.get(self.master_url + '/configuration/' + self.name)\n\n\t\t\tif r.status_code == 200:\n\t\t\t\ttry:\n\t\t\t\t\t#Request success\n\t\t\t\t\tconfig = json.loads(r.text)\n\t\t\t\t\tif config['deviceType'] == 1:\n\t\t\t\t\t\t\"\"\" HID Reader \"\"\"\n\t\t\t\t\t\tdevice = HIDReader(self.name)\n\t\t\t\t\tif config['deviceType'] == 2:\n\t\t\t\t\t\t\"\"\" ZK45Reader \"\"\"\n\t\t\t\t\t\tdevice = ZK45Reader(self.name)\n\t\t\t\t\tif config['deviceType'] == 4:\n\t\t\t\t\t\t\"\"\" ZFM20Reader \"\"\"\n\t\t\t\t\t\tdevice = ZFM20Reader(self.name)\n\t\t\t\t\tif config['deviceType'] == 5:\n\t\t\t\t\t\t\"\"\" IOEcho \"\"\"\n\t\t\t\t\t\tdevice = IOEcho(name=self.name, pin_and_label_matrix='')\n\t\t\t\t\telif config['deviceType'] == 0:\n\t\t\t\t\t\t\"\"\" None \"\"\"\n\t\t\t\t\t\tdevice = DeviceBase(name=self.name)\n\t\t\t\t\telse:\n\t\t\t\t\t\t\"\"\" Disable \"\"\"\n\t\t\t\t\t\tdevice = DeviceBase(self.name)\n\n\t\t\t\t\tdevice.zone_id = config['zone']\n\n\t\t\t\t\tdevice.is_zone_enabled = config['enabled']\n\t\t\t\t\tdevice.is_zone_day_time_only = config['dayTimeOnly']\n\t\t\t\t\tdevice.is_configuration_loaded = True\n\n\t\t\t\t\tdevice.master_secret = config['secret']\n\t\t\t\t\tdevice.master_url = self.master_url\n\n\t\t\t\t\tdevice.is_in_error = False\n\t\t\t\t\tdevice.error_status = \"OK\"\n\t\t\t\t\tdevice.type = config['deviceType']\n\n\t\t\t\t\tprint(\"Configuration loaded.\")\n\t\t\t\texcept Exception as e:\n\t\t\t\t\terror_message = \"Device type not supported by current platform. Configuration aborted. (\" + str(e) + \")\"\n\t\t\t\t\tprint(error_message)\n\t\t\t\t\tdevice.zone_id = 1\n\n\t\t\t\t\tdevice.is_zone_enabled = False\n\t\t\t\t\tdevice.is_zone_day_time_only = False\n\t\t\t\t\tdevice.is_in_error = True\n\t\t\t\t\tdevice.error_status = error_message\n\t\t\telse:\n\t\t\t\tprint(\"Configuration loading failed. (Server response : \" + str(r.status_code) + \")\")\n\t\t\t\tdevice.zone_id = 1\n\t\t\t\tdevice.is_zone_enabled = False\n\t\t\t\tdevice.is_zone_day_time_only = False\n\t\t\t\tdevice.is_in_error = True\n\t\t\t\tdevice.error_status = \"Configuration loading failed. (Server response : \" + str(r.status_code) + \")\"\n\t\telse:\n\t\t\tself.zone_id = 1\n\t\t\tself.is_zone_enabled = True\n\t\t\tself.is_zone_day_time_only = True\n\t\t\tdevice.is_in_error = True\n\t\t\tdevice.error_status = \"No master URL defined\"\n\n\t\tdevice.report_state()\n\t\treturn device",
"def boot_configuration(self):\n bootconfs = self.get_logical_configuration(gdef.BOOT_LOG_CONF)\n if not bootconfs:\n return bootconfs\n assert len(bootconfs) == 1 # Only one boot configuration can exist for each device instance.\n return bootconfs[0]",
"def setup_dhcp_env(device):\n raise NotImplementedError",
"def _setBootable(self, bootable):\n if self.partedPartition:\n if arch.isS390():\n return\n if self.flagAvailable(parted.PARTITION_BOOT):\n if bootable:\n self.setFlag(parted.PARTITION_BOOT)\n else:\n self.unsetFlag(parted.PARTITION_BOOT)\n else:\n raise errors.DeviceError(\"boot flag not available for this partition\", self.name)\n\n self._bootable = bootable\n else:\n self.req_bootable = bootable",
"def magma_setdevice(dev):\n\n _libmagma.magma_setdevice(dev)",
"def change_device(self):\n if self.state.ser:\n UsbHost.close_port(self.state.ser)\n device = self.CBDevices.currentText()\n if device:\n comport = self.devices[int(device)]\n self.state.ser = UsbHost.open_port(comport)\n if not self.state.ser:\n self.statusbar.showMessage(\"Выбранный порт более недоступен. Произведите повторный поиск\")\n return\n answer: str = self.UsbHost.send_command(self.state.ser, \"ping\", device)\n if answer in wrong_answers:\n error_message(\"Выбранный девайс не отвечает\")\n self.statusbar.showMessage(\"Выбранный порт более недоступен. Произведите повторный поиск\")\n return\n self.state.device_id = int(device)\n self.state.comport = comport\n self.create_message()\n self.set_controls_state(True)\n self.BtnL1.click()\n self.BtnAttenuate.click()\n self.SpinDACValue.setValue(35000)\n self.BtnSetDACValue.click()\n self.set_sw(\"0 1\")",
"def update_sdcard_boot_commands(device):\n mount_dir = mkdtemp()\n\n boot_partition = device.partitions(full_paths=True)[0]\n\n mount_command = ['sudo', 'mount', boot_partition, mount_dir]\n\n print(f'Mounting SD Card partition {boot_partition} to temp directory {mount_dir}')\n interactive_console(mount_command)\n\n # Note- this sed command is what the target mounts will look like\n # I'm not messing with the blk_ids of our devices as we know them\n # here.\n\n sed_command = [\n 'sudo',\n 'sed',\n '-i',\n '-E',\n 's#root=[^ ]+#root=/dev/sda2#',\n os.path.join(mount_dir, 'cmdline.txt')]\n console(sed_command)\n sed_command = [\n 'sudo',\n 'sed',\n '-i',\n 's# init=/usr/lib/raspi-config/init_resize.sh##',\n os.path.join(mount_dir, 'cmdline.txt')]\n\n print('Modifying init command line')\n console(sed_command)\n\n print('Successfully modified! Unmounting.')\n umount_command = ['sudo', 'umount', mount_dir]\n interactive_console(umount_command)\n\n print('Cleaning up mounted dir')\n os.rmdir(mount_dir)",
"def do_device(self, args):\n self.device_command.cmdloop(\"Enter to device mode\")",
"def create_boot_dev(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_CreateBootDev', self.handle))",
"def _boot_using_bootmon(self, target):\n self.logger.debug('Booting using bootmon.')\n\n try:\n self._wait_for_vemsd_mount(target, timeout=20)\n except DeviceError:\n # OK, something's wrong. Reboot the board and try again.\n self.logger.debug('VEMSD not mounted, attempting to power cycle device.')\n target.sendline(' ')\n state = target.expect(['Cmd> ', self.config.bootmon_prompt, self.android_prompt]) # pylint: disable=E1101\n\n if state == 0 or state == 1:\n # Reboot - Bootmon\n target.sendline('reboot')\n target.expect('Powering up system...')\n elif state == 2:\n target.sendline('reboot -n')\n target.expect('Powering up system...')\n else:\n raise DeviceError('Unexpected board state {}; should be 0, 1 or 2'.format(state))\n\n self._wait_for_vemsd_mount(target)\n\n self._setup_before_reboot()\n\n # Reboot - Bootmon\n self.logger.debug('Rebooting into bootloader...')\n open(os.path.join(self.config.root_mount, 'reboot.txt'), 'a').close()\n target.expect('Powering up system...')\n target.expect(self.config.bootmon_prompt)\n\n # Wait for VEMSD to mount\n self._wait_for_vemsd_mount(target)\n\n #Boot Linux - Bootmon\n target.sendline('fl linux fdt ' + self.config.dtb)\n target.expect(self.config.bootmon_prompt)\n target.sendline('fl linux initrd ' + self.config.initrd)\n target.expect(self.config.bootmon_prompt)\n #Workaround TC2 bootmon serial issue for loading large initrd blob\n target.sendline(' ')\n target.expect(self.config.bootmon_prompt)\n target.sendline('fl linux boot ' + self.config.kernel + self.config.kernel_arguments)",
"def config_led(my_bus):\n try:\n my_bus.write_i2c_block_data(LED_DEVICE_ADDRESS, 0x2F, [0xFF]) # system setup\n my_bus.write_i2c_block_data(LED_DEVICE_ADDRESS, 0x89, [0xFF]) # display on\n except IOError:\n t = 1\n print(\"got IOError. try again in\", t, \"second\")\n time.sleep(t)",
"def do_config(self, request):\n try:\n config_file = request['file']\n except KeyError:\n config_file = None\n self._target.load_device_config(request['device'], config_file)\n return None",
"def device(self, device):\n\n self._device = device",
"def _set_device_path(self):\n pass",
"def doInitializeDevice(self):\n try:\n\n if self.serialNumber == \"*\" or self.serialNumber == \".*\":\n self.device = OISpectrometer.matchUniqueUSBDevice( idProduct=self.idProduct)\n else:\n self.device = OISpectrometer.matchUniqueUSBDevice( idProduct=self.idProduct,\n serialNumber=self.serialNumber)\n\n \"\"\" Below are all the USB protocol details. This requires reading\n the USB documentation, the Spectrometer documentation and many other \n details. What follows may sound like gibberish.\n\n There is a single USB Configuration (default) with a single USB Interface \n without alternate settings, so we can use (0,0).\n \"\"\"\n self.device.set_configuration()\n self.configuration = self.device.get_active_configuration()\n self.interface = self.configuration[(0,0)]\n\n \"\"\"\n We are working on the reasonable assumption from the documentation\n that the first input and output endpoints are the main endpoints and the\n second input is the data endpoint. If that is not the case, the subclass can\n simply reassign the endpoints properly in its __init__ function. \n \"\"\"\n for endpoint in self.interface:\n \"\"\" The endpoint address has the 8th bit set to 1 when it is an input.\n We can check with the bitwise operator & (and) 0x80. It will be zero\n if an output and non-zero if an input. \"\"\"\n if endpoint.bEndpointAddress & 0x80 != 0:\n self.inputEndpoints.append(endpoint)\n else:\n self.outputEndpoints.append(endpoint)\n\n\n if len(self.inputEndpoints) >= 2 or len(self.outputEndpoints) > 0:\n \"\"\" We have at least 2 input endpoints and 1 output. We assign the\n endpoints according to the documentation, otherwise\n the subclass will need to assign them.\"\"\"\n self.epCommandOut = self.outputEndpoints[self.epCommandOutIdx]\n self.epMainIn = self.inputEndpoints[self.epMainInIdx]\n self.epSecondaryIn = self.inputEndpoints[self.epSecondaryInIdx]\n self.epParameters = self.inputEndpoints[self.epParametersIdx]\n self.epStatus = self.inputEndpoints[self.epStatusIdx]\n\n self.flushEndpoints()\n self.sendCommand(b'0x01')\n time.sleep(0.1)\n self.getCalibration()\n except Exception as err:\n raise UnableToInitialize(\"Error when initializing device: {0}\".format(err))",
"def device(self, primary_name=\"\", secondary_name=\"\"):\n logging.debug(\"In device() for FTDDeviceHAPairs class.\")\n primary = Device(fmc=self.fmc)\n primary.get(name=primary_name)\n secondary = Device(fmc=self.fmc)\n secondary.get(name=secondary_name)\n if \"id\" in primary.__dict__:\n self.primary_id = primary.id\n else:\n logging.warning(\n f\"Device {primary_name} not found. Cannot set up device for FTDDeviceHAPairs.\"\n )\n if \"id\" in secondary.__dict__:\n self.secondary_id = secondary.id\n else:\n logging.warning(\n f\"Device {secondary_name} not found. Cannot set up device for FTDDeviceHAPairs.\"\n )",
"def __init__(\n self, name=None, dm_name=None, appname=None, verbose=0,\n version=__version__, base_dir=None, use_stderr=False,\n simulate=False, sudo=False, quiet=False,\n *targs, **kwargs):\n\n # Normalisation of 'name' and 'dm_name'\n if name is not None:\n name = str(name).strip()\n\n if dm_name is not None:\n dm_name = str(dm_name).strip()\n\n # One of those two parameters must be valid:\n if not name and not dm_name:\n msg = _(\n \"In minimum one parameter of 'name' and 'dm_name' \"\n \"must be given on initialisation of a %s.\") % (\n self.__class__.__name__)\n raise DmDeviceInitError(msg)\n\n super(DeviceMapperDevice, self).__init__(\n name=name,\n appname=appname,\n verbose=verbose,\n version=version,\n base_dir=base_dir,\n use_stderr=use_stderr,\n simulate=simulate,\n sudo=sudo,\n quiet=quiet,\n )\n self.initialized = False\n\n if not name:\n name = self.retr_blockdev_name(dm_name)\n self._name = name\n\n self._dm_name = dm_name\n \"\"\"\n @ivar: the devicemapper name of the device\n @type: str\n \"\"\"\n\n failed_commands = []\n\n self._dmsetup_cmd = DMSETUP_CMD\n \"\"\"\n @ivar: the dmsetup command for manipulating the devicemapper device\n @type: str\n \"\"\"\n if not os.path.exists(self.dmsetup_cmd) or not os.access(\n self.dmsetup_cmd, os.X_OK):\n self._dmsetup_cmd = self.get_command('dmsetup')\n if not self.dmsetup_cmd:\n failed_commands.append('dmsetup')\n\n self._suspended = None\n \"\"\"\n @ivar: flag that the current device is in suspended mode\n @type: bool or None\n \"\"\"\n\n self._uuid = None\n \"\"\"\n @ivar: the devicemapper UUID\n @type: str or None\n \"\"\"\n\n self._table = None\n \"\"\"\n @ivar: the device mapper table (whatever it is)\n @type: str\n \"\"\"\n\n # Some commands are missing\n if failed_commands:\n raise CommandNotFoundError(failed_commands)\n\n self.initialized = True\n if self.verbose > 3:\n LOG.debug(_(\"Initialized.\"))",
"def createDevice(config, section):\n\n try:\n module_name, class_name = config.get(section, \"Class\").rsplit(\".\", 1)\n MyDevice = getattr(importlib.import_module(module_name), class_name)\n\n params = lambda key: config.get(section, key)\n devConns = []\n \n try:\n for connStr in params(\"Connection\").split(\",\"):\n devConns.append(connections[connStr])\n except ConfigParser.NoOptionError:\n # No connection is valid e.g. an actuator connection target\n pass\n \n d = MyDevice(devConns, logger, params, sensors, actuators)\n if config.getfloat(section, \"Poll\") == -1:\n Thread(target=d.checkState).start() # don't need to use cleanup-on-exit for non-polling sensors\n logger.info(\"Started thread to to run sensor\")\n\n return d\n except ImportError:\n logger.error(\"%s.%s is not supported on this platform\" % module_name, class_name)",
"def update_config(path: str, dev_id: str, device: Device) -> None:\n with open(path, \"a\", encoding=\"utf8\") as out:\n device_config = {\n device.dev_id: {\n ATTR_NAME: device.name,\n ATTR_MAC: device.mac,\n ATTR_ICON: device.icon,\n \"picture\": device.config_picture,\n \"track\": device.track,\n }\n }\n out.write(\"\\n\")\n out.write(dump(device_config))",
"def __init__(self, hass, entry, device: aioshelly.Device):\n super().__init__(\n hass,\n _LOGGER,\n name=device.settings[\"name\"] or device.settings[\"device\"][\"hostname\"],\n update_interval=timedelta(seconds=5),\n )\n self.hass = hass\n self.entry = entry\n self.device = device",
"def _use_existing_configuration(self):\n HW_Init(self.ftdi, None)",
"def set_device(device: Union[str, torch.device]) -> torch.device:\n err_msg = None\n if isinstance(device, torch.device):\n pass\n elif device == 'auto':\n cuda = torch.cuda.is_available()\n device = torch.device('cuda' if cuda else 'cpu')\n elif device == 'gpu':\n cuda = torch.cuda.is_available()\n if cuda:\n device = torch.device('cuda')\n else:\n err_msg = ('Device set to \"gpu\", but could not access '\n 'any CUDA-enabled GPU. Please make sure that '\n 'a GPU is available and CUDA is installed '\n 'on this machine.')\n elif device == 'cpu':\n device = torch.device('cpu')\n else:\n err_msg = f'Unknown device \"{device}\". Try \"auto\".'\n if err_msg is not None:\n logger = get_logger(__name__, verbose=0)\n logger.error(f'Unknown device \"{device}\". Try \"auto\".')\n import sys\n sys.exit(1)\n return device",
"def configure_wired_radius_attribute_44(device):\n try:\n device.configure([\n \"radius-server attribute 44 extend-with-addr\"\n ])\n except SubCommandFailure:\n raise SubCommandFailure(\n 'Could not configure wired radius attribute 44'\n )",
"def __init__(self,\n device_name,\n create_device_func,\n props,\n hub_name_prop,\n primary_port_prop,\n secondary_port_prop,\n ethernet_switch_prop,\n ethernet_port_prop,\n get_switchboard_if_initialized,\n power_and_data_share_cable=False,\n pre_off_func=None):\n super().__init__(device_name=device_name)\n\n self._create_device_func = create_device_func\n self._hub_name_prop = hub_name_prop\n self._primary_port_prop = primary_port_prop\n self._secondary_port_prop = secondary_port_prop\n self._props = props\n self._ethernet_switch = None\n\n # Set the properties\n self._get_switchboard_if_initialized = get_switchboard_if_initialized\n self._power_and_data_share_cable = power_and_data_share_cable\n self._pre_off_func = pre_off_func\n self._ethernet_switch_prop = ethernet_switch_prop\n self._ethernet_port_prop = ethernet_port_prop",
"def get_boot_device(self):\n operation = 'get_boot_device'\n try:\n boot_device = self.sp_manager.get_boot_device()\n return boot_device\n except UcsException as ex:\n print(_(\"Cisco client exception: %(msg)s.\"), {'msg': ex})\n raise exception.UcsOperationError(operation=operation, error=ex)",
"def setup_device(self, conf: DictConfig) -> device:\n device = torch.device(conf.runner.device) if torch.cuda.is_available() else torch.device('cpu')\n\n return device",
"def set_device(num):\n safe_call(backend.get().af_set_device(num))",
"def config(self, command):\n self._enter_config()\n if isinstance(command, list):\n entered_commands = []\n for command_instance in command:\n entered_commands.append(command_instance)\n try:\n self._send_command(command_instance)\n except CommandError as e:\n raise CommandListError(entered_commands, command_instance, e.cli_error_msg)\n else:\n self._send_command(command)\n self.native.exit_config_mode()\n log.info(\"Host %s: Device configured with command %s.\", self.host, command)",
"def _select_device(call: ServiceCall) -> None:\n if not (addr := call.data[ATTR_DEVICE]):\n _LOGGER.error(\"Device not found: %s\", call.data[ATTR_DEVICE])\n return\n if addr in device_aliases:\n addr = device_aliases[addr]\n else:\n entity = hass.states.get(addr)\n _LOGGER.debug(\"Selecting entity %s\", entity)\n if entity is not None:\n addr = entity.attributes[\"physical_address\"]\n _LOGGER.debug(\"Address acquired: %s\", addr)\n if addr is None:\n _LOGGER.error(\n \"Device %s has not physical address\", call.data[ATTR_DEVICE]\n )\n return\n if not isinstance(addr, (PhysicalAddress,)):\n addr = PhysicalAddress(addr)\n hdmi_network.active_source(addr)\n _LOGGER.info(\"Selected %s (%s)\", call.data[ATTR_DEVICE], addr)",
"def elAddNetworkConfigurationWithDhcp(self, device):\n commandSection = self.sectionByName(\"command\")\n # see http://docs.redhat.com/docs/en-US/Red_Hat_Enterprise_Linux/6/html/Installation_Guide/s1-kickstart2-options.html\n deviceMatch = re.match(r\"([^0-9]+)([0-9])\", device)\n if deviceMatch:\n # e.g. \"eth0\"\n devicePrefix = deviceMatch.group(1)\n deviceNumber = deviceMatch.group(2)\n deviceNumber = int(deviceNumber)\n for i in range(8, deviceNumber - 1, -1):\n deviceI = devicePrefix + str(i)\n deviceIPlus1 = devicePrefix + str(i + 1)\n # move up by one device each network configuration\n commandSection.string = re.sub(r\"(?m)^([ \\t]*network[ \\t]+.*--device[ \\t]*(?:=|[ \\t])[ \\t]*)\" + re.escape(deviceI) + r\"(.*)$\",\n r\"\\g<1>\" + deviceIPlus1 + r\"\\g<2>\",\n commandSection.string)\n # not --noipv6\n networkConfiguration = \"network --device=\" + device + \" --bootproto=dhcp --onboot=yes --activate\"\n if deviceMatch and deviceNumber == 0:\n # having configuration of eth0 first appears to be more conducive to overall success,\n # and also, per http://fedoraproject.org/wiki/Anaconda/Kickstart#network, supposedly\n # \"... in installer environment. Device of the first network command is activated if network is required,\n # e.g. in case of network installation ...\",\n commandSection.string = networkConfiguration + \"\\n\" \\\n + \"#\\n\" \\\n + commandSection.string\n else:\n commandSection.string = commandSection.string \\\n + \"#\\n\" \\\n + networkConfiguration + \"\\n\"",
"def on(config: dict):\n switch_device(config, config[\"inching\"], \"on\")",
"def setup_platform(hass, config, add_devices, discovery_info=None):\n name = config.get(CONF_NAME)\n mac = config.get(CONF_MAC)\n pin = config.get(CONF_PIN)\n\n add_devices([ProgtimeSwitch(mac, pin, name)])",
"def setDeviceConfig(self, device_config_dict):\n ip_address = str(device_config_dict[\"IP Address\"])\n port = int(device_config_dict[\"Port No\"])\n try:\n self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n # A single string is used for the AF_UNIX address family. A pair (host, port) is used for the\n # AF_INET address family, where host is a string representing either a hostname in Internet domain\n # notation like 'daring.cwi.nl' or an IPv4 address like '100.50.200.5', and port is an integer.\n #E.g., self.sock.connect(('192.168.1.155', 7777)) #raspberry ip = 192.168.1.155 and port = 7777\n self.sock.connect((ip_address, port))\n except socket.error,msg:\n dlg = wx.MessageDialog(None, str(msg), 'Info',wx.OK)\n dlg.ShowModal()\n raise",
"def setup_platform(hass, config, add_devices_callback, discovery_info=None):\n host = config.get(CONF_HOST)\n name = config.get(CONF_NAME)\n token = config.get('token')\n\n add_devices_callback([MiroboSwitch(name, host, token)])",
"def setup_usb(self):\n global DEVICE\n global epBulkWriter\n global epBulkReader\n global VID\n global PID\n\n DEVICE = usb.core.find(idVendor=0x2AB9,idProduct=0xFFFF)\n if DEVICE is None:#If not a LVPM, look for an HVPM.\n DEVICE = usb.core.find(idVendor=0x04d8,idProduct=0x000b)\n VID = '0x4d8'\n PID = '0xb'\n if \"Linux\" == platform.system():\n try:\n DEVICE.detach_kernel_driver(0)\n except:\n pass # already unregistered\n DEVICE.set_configuration()\n\n cfg = DEVICE.get_active_configuration()\n intf = cfg[(0,0)]\n\n epBulkWriter = usb.util.find_descriptor(\n intf,\n custom_match = \\\n lambda e: \\\n usb.util.endpoint_direction(e.bEndpointAddress) == \\\n usb.util.ENDPOINT_OUT)\n epBulkReader = usb.util.find_descriptor(\n intf,\n custom_match = \\\n lambda e: \\\n usb.util.endpoint_direction(e.bEndpointAddress) == \\\n usb.util.ENDPOINT_IN)",
"def device(self, value):\n try:\n if isinstance(value, str):\n self._device_serial = value\n self._check_requirements()\n except ValueError:\n self._device_serial = None",
"def get_one_time_boot(self):\n system = self._get_host_details()\n try:\n if system['Boot']['BootSourceOverrideEnabled'] == 'Once':\n device = system['Boot']['BootSourceOverrideTarget']\n if device in DEVICE_RIS_TO_COMMON:\n return DEVICE_RIS_TO_COMMON[device]\n return device\n else:\n # value returned by RIBCL if one-time boot setting are absent\n return 'Normal'\n\n except KeyError as e:\n msg = \"get_one_time_boot failed with the KeyError:%s\"\n raise exception.IloError((msg) % e)",
"def setup_platform(hass, config, add_devices, discovery_info=None) -> None:\n friendly_name = config.get(CONF_FRIENDLY_NAME)\n mac_addr = config.get(CONF_MAC)\n add_devices([Switchmate(mac_addr, friendly_name)], True)",
"async def async_init_single_device(dev: Device) -> None:\n await dev.async_added_to_hass()\n dev.async_write_ha_state()",
"def boot(self, boot_node_request):\n return self.client.call('POST',\n self.name + 'boot', payload=boot_node_request)",
"def test_construct_3_default_bootsraps(self):\n configerus.new_config()",
"def load_config(device, filename):\n with open(filename, 'r') as f:\n config_data = json.load(f)\n\n device.send_configuration(config_data)",
"def set_device(self, device: torch.Tensor) -> None:\n raise NotImplementedError",
"def __init__(self, device: Dict[str, Union[str, int, float]]) -> None:\n super().__init__()\n log_int.debug(f\"Initializing serial device for {device['name']} :: Port: {device['port']} - Baud rate: \"\n f\"{device['baudrate']} - Timeout: {device['timeout']} - RTS/CTS: {bool(device['flowcontrol'])}\")\n self.device = serial.Serial(port=device['port'], baudrate=device['baudrate'],\n timeout=device['timeout'], rtscts=device['flowcontrol'])",
"def bootstrap_default():\n\treturn default_configuration",
"def do_configure_partition(cls, part, source_params, creator, cr_workdir,\n oe_builddir, bootimg_dir, kernel_dir,\n native_sysroot):\n if not cls.__imgBiosObj:\n cls.__instanciateBIOSClass()\n\n bootloader = creator.ks.bootloader\n\n if not bootloader.configfile:\n splash = os.path.join(cr_workdir, \"/hdd/boot/splash.jpg\")\n if os.path.exists(splash):\n splashline = \"menu background splash.jpg\"\n else:\n splashline = \"\"\n\n syslinux_conf = \"\"\n syslinux_conf += \"PROMPT 0\\n\"\n syslinux_conf += \"TIMEOUT \" + str(bootloader.timeout) + \"\\n\"\n syslinux_conf += \"\\n\"\n syslinux_conf += \"ALLOWOPTIONS 1\\n\"\n syslinux_conf += \"\\n\"\n if splashline:\n syslinux_conf += \"%s\\n\" % splashline\n\n syslinux_conf += \"DEFAULT boot\\n\"\n syslinux_conf += \"LABEL boot\\n\"\n syslinux_conf += \" KERNEL mboot.c32\\n\"\n\n # Split the bootloader args at '---' to separate the Xen args\n # from the Linux kernel args.\n # The Xen args here are defaults; overridden by bootloader append.\n xen_args = \"console=com1,vga com1=115200,8n1\"\n kernel_append = \"\"\n if bootloader.append:\n separator_pos = bootloader.append.find('---')\n if separator_pos != -1:\n xen_args = bootloader.append[:separator_pos]\n kernel_append = bootloader.append[separator_pos+3:]\n else:\n kernel_append = bootloader.append\n\n kernel_args = \"label=boot root=%s %s\" % \\\n (creator.rootdev, kernel_append)\n\n syslinux_conf += \" APPEND /xen.gz %s --- /vmlinuz %s\" % \\\n (xen_args, kernel_args)\n\n initrd = source_params.get('initrd')\n if initrd:\n initrds = initrd.split(';')\n for initrd_file in initrds:\n syslinux_conf += \" --- /%s\" % os.path.basename(initrd_file)\n syslinux_conf += \"\\n\"\n\n logger.debug(\"Writing syslinux config %s/hdd/boot/syslinux.cfg\",\n cr_workdir)\n\n hdddir = \"%s/hdd/boot\" % cr_workdir\n install_cmd = \"install -d %s\" % hdddir\n exec_cmd(install_cmd)\n\n cfg = open(\"%s/hdd/boot/syslinux.cfg\" % cr_workdir, \"w\")\n cfg.write(syslinux_conf)\n cfg.close()\n\n else:\n cls.__imgBiosObj.do_configure_partition(part, source_params,\n creator, cr_workdir,\n oe_builddir, bootimg_dir,\n kernel_dir, native_sysroot)",
"def flask_configure_device(device_id):\n try:\n # retrieve the authorization token\n token = retrieve_auth_token(request)\n\n params = {'data': request.data}\n\n LOGGER.info(f' Actuating in the device with id {device_id}.')\n result = DeviceHandler.configure_device(params, device_id, token)\n return make_response(jsonify(result), 200)\n\n except HTTPRequestError as error:\n LOGGER.error(f' {error.message} - {error.error_code}.')\n if isinstance(error.message, dict):\n return make_response(jsonify(error.message), error.error_code)\n\n return format_response(error.error_code, error.message)",
"def load_devices():",
"def script_set_device(self,udid=None):\n self.desired_caps['udid'] = udid;",
"def get_bootvar(self):\n module = 'bootimage/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n bootdefault = response.json()['bootimage']['oper']['hd-default']\n print(self.device + ' The device is set to boot from: ' + bootdefault + ' in the future')\n return bootdefault",
"def set_device_type(device: str = \"cuda\"):\n DefaultDeviceType._default_device_type = device",
"def __init__(self, device_dict):\n diff = set(device_dict.keys()) - set(YAMLKeyword.__dict__.keys())\n if len(diff) > 0:\n six.print_('Wrong key detected:')\n six.print_(diff)\n raise KeyError(str(diff))\n self.__dict__.update(device_dict)\n if self.system == SystemType.android:\n pass\n elif self.system == SystemType.arm_linux:\n try:\n sh.ssh('-q', '%s@%s' % (self.username, self.address),\n 'exit')\n except sh.ErrorReturnCode as e:\n six.print_('device connect failed, '\n 'please check your authentication',\n file=sys.stderr)\n raise e",
"def change_device(self, device=None):\n\n if device is None:\n # If the function is called without a device, use the current device\n device = self.device\n\n # Create the appropriate device object\n device = torch.device(f'cuda:{device}'\n if torch.cuda.is_available() else 'cpu')\n\n # Change device field\n self.device = device\n # Load the transcription model onto the device\n self.to(self.device)",
"def device_config(self):\n\t\ttry:\n\t\t\treturn self._dev\n\t\texcept:\n\t\t\treturn 0",
"def configure(args):\n\n emu = Emulator(args,\n cpu='68030',\n frequency=24 * 1000 * 1000)\n # initially only the EEPROM exists; aliased at 0 all the way up to 0xfe000000\n # we only map the low and high aliases, as the intermediates aren't interesting\n emu.add_memory(base=0, size=512 * 1024, writable=False, from_file=args.rom)\n emu.add_memory(base=0xfe000000, size=512 * 1024, writable=False, from_file=args.rom)\n\n emu.add_device(args,\n MC68681,\n address=0xfffff000,\n interrupt=m68k.IRQ_2,\n register_arrangement='16-bit-doubled')\n emu.add_device(args,\n CompactFlash,\n address=0xffffe000,\n register_arrangement='8-bit' if args.cf_width == 8 else '16-bit')\n emu.add_device(args,\n CB030Remap,\n address=0xffff8000)\n emu.add_device(args,\n CB030Ticker,\n address=0xffff9000,\n interrupt=m68k.IRQ_6)\n return emu",
"def settings_OBD(self, label):\n if label == 'bt':\n try:\n self.default['serialLabel'] = label\n self.default['serialDevice'] = config.config().serialDevice[label]\n os.system(\"blueman-manager\")\n except:\n print \"Please install 'blueman' package\"\n elif label == 'usb':\n self.default['serialLabel'] = label\n self.default['serialDevice'] = config.config().serialDevice[label]\n elif label == 'dev':\n self.default['serialLabel'] = label\n self.default['serialDevice'] = config.config().serialDevice[label]\n elif label == 'metric':\n self.default['units'] = 'metric'\n print 'made it'\n elif label == 'US':\n self.default['units'] = 'US' \n else: #ATSP signal return int -> else\n self.default['ATSP'] = self.ui.spinBox_ATSP.value()\n\n return",
"def test_config_device_init_with_defaults(get_config, monkeypatch):\n notbase_config = {'not_presented': 1}\n monkeypatch.setattr(DeviceConfig, 'minimal_essential_conf', notbase_config)\n cfg = get_config(DeviceConfig, base_config)\n\n assert isinstance(cfg, DeviceConfig), 'wrong class'\n assert cfg.data == notbase_config, 'bad config loaded'",
"def _get_bios_boot_resource(self, data):\n try:\n boot_uri = data['links']['Boot']['href']\n except KeyError:\n msg = ('Boot resource not found.')\n raise exception.IloCommandNotSupportedError(msg)\n\n status, headers, boot_settings = self._rest_get(boot_uri)\n\n if status != 200:\n msg = self._get_extended_error(boot_settings)\n raise exception.IloError(msg)\n\n return boot_settings",
"def __init__(self, **device_identifiers):\n\n # Connect to the first available device.\n try:\n self.device = usb.core.find(**device_identifiers)\n except usb.core.USBError as e:\n # On some platforms, providing identifiers that don't match with any\n # real device produces a USBError/Pipe Error. We'll convert it into a\n # DeviceNotFoundError.\n if e.errno == LIBUSB_PIPE_ERROR:\n raise DeviceNotFoundError()\n else:\n raise e\n\n # If we couldn't find a board, bail out early.\n if self.device is None:\n raise DeviceNotFoundError()\n\n # For now, supported boards provide a single configuration, so we\n # can accept the first configuration provided.\n self.device.set_configuration()\n\n # Run the parent initialization.\n super(USBCommsBackend, self).__init__(**device_identifiers)",
"def __init__(self, address=0x68, config=0):\r\n\t\tself.i2c = FT232H.I2CDevice(ft232h, address)\r\n\t\tif config == 0:\r\n\t\t\tself.setWake()\r\n\t\t\tself.setScale(mode='ACC',scale=0)\r\n\t\t\tself.setScale(mode='GYR',scale=0)\r\n\t\telif config == 1:\r\n\t\t\tself.setWake()\r\n\t\t\tself.setScale(mode='ACC',scale=1)\r\n\t\t\tself.setScale(mode='GYR',scale=1)\t\t\t\t\r\n\t\telif config == 2:\r\n\t\t\tself.setWake()\r\n\t\t\tself.setScale(mode='ACC',scale=2)\r\n\t\t\tself.setScale(mode='GYR',scale=2)\t\r\n\t\telif config == 3:\r\n\t\t\tself.setWake()\r\n\t\t\tself.setScale(mode='ACC',scale=3)\r\n\t\t\tself.setScale(mode='GYR',scale=3)\t\t\t\t\r\n\t\telif config == 4:\r\n\t\t\tself.setWake()\r\n\t\t\tself.setScale(mode='ACC',scale=1)\t\r\n\t\t\tself.setTempDisable()\r\n\t\t\tself.setGYRStandby(axis='X')\r\n\t\t\tself.setGYRStandby(axis='Y')\r\n\t\t\tself.setGYRStandby(axis='Z')",
"def configure_wired_radius_attribute(device, attr_num, attr_profile):\n try:\n device.configure([\n f\"radius-server attribute {attr_num} {attr_profile}\"\n ])\n except SubCommandFailure:\n raise SubCommandFailure(\n 'Could not configure wired radius attribute'\n )",
"def set_start_configuration(self):\r\n with open('config.json', 'rt') as jsonfile:\r\n configuration = jsonfile.read()\r\n configuration_data = json.loads(configuration)\r\n print(configuration_data[0][0])\r\n ip = IPRoute()\r\n index = ip.link_lookup(ifname='eth0')[0]\r\n ip.link('set', index=index, state='up')\r\n ip.addr('add', index, address=configuration_data[0][0], mask=24)\r\n ip.close()",
"def set_device(device, backend='autograd'):\n if backend == 'autograd':\n return None\n elif backend == 'pytorch':\n try:\n tc.cuda.set_device(device)\n except:\n pass",
"def setErrorDevice(device='file'):\n edict = {'file':'FILE','screen':'CONS'}\n dislin.errdev(edict[device])",
"def boot():\r\n print \"\"\"\r\n ###### ## ## ### ## ## ## ## ######## ########\r\n ## ## ## ## ## ## ### ## ## ## ## ## ##\r\n ## #### ## ## #### ## ## ## ## ## ##\r\n ## ## ## ## ## ## ## ## ## ######## ######\r\n ## ## ######### ## #### ## ## ## ## ##\r\n ## ## ## ## ## ## ### ## ## ## ## ##\r\n ###### ## ## ## ## ## ####### ## ## ########\r\n\r\n Version %s-%s\r\n\r\n Multi Purpose Artificial Inelegance Program\r\n Copyright (c) Alexandre Gauthier 2010-2011\r\n All Rights Reserved\r\n \"\"\" % ( constants.VERSION, constants.TAGNAME )\r\n\r\n # Initialize log\r\n # TODO: The values should be read from config file.\r\n log.init_log('cyanure.log', 'DEBUG')\r\n\r\n logger.info(\"Cyanure system init: Version %s (%s)\" % (\r\n constants.VERSION, constants.TAGNAME ))",
"def setBootargs(self):\n\t\tif self.testType == 'auto' or self.testType == 'manual':\n\t\t\tself.bootargs = self.settings.getKeyValue('nfs.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<nfsroot>', self.nfsroot)\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\n\t\telse:\n\t\t\tself.bootargs = self.settings.getKeyValue('ramdisk.fs')\n\t\t\tself.bootargs = self.bootargs.replace('<ipaddr>', self.ipaddr)\n\t\t\tself.socket.send('setenv bootargs ' + self.bootargs+'\\r', 1)\t\t\n\t\treturn None",
"def state(config: dict):\n\n async def state_callback(device):\n if device.basic_info is not None:\n if device.available:\n print_device_details(device)\n\n device.shutdown_event_loop()\n\n logger.info(\"Initialising SonoffSwitch with host %s\" % config[\"host\"])\n SonoffSwitch(\n host=config[\"host\"],\n callback_after_update=state_callback,\n logger=logger,\n device_id=config[\"device_id\"],\n api_key=config[\"api_key\"],\n )",
"def CreateDevices(cfg,\n build_target=None,\n build_id=None,\n branch=None,\n kernel_build_id=None,\n kernel_branch=None,\n kernel_build_target=None,\n system_branch=None,\n system_build_id=None,\n system_build_target=None,\n bootloader_branch=None,\n bootloader_build_id=None,\n bootloader_build_target=None,\n gpu=None,\n num=1,\n serial_log_file=None,\n autoconnect=False,\n report_internal_ip=False,\n boot_timeout_secs=None,\n ins_timeout_secs=None):\n client_adb_port = None\n unlock_screen = False\n wait_for_boot = True\n logger.info(\n \"Creating a cuttlefish device in project %s, \"\n \"build_target: %s, \"\n \"build_id: %s, \"\n \"branch: %s, \"\n \"kernel_build_id: %s, \"\n \"kernel_branch: %s, \"\n \"kernel_build_target: %s, \"\n \"system_branch: %s, \"\n \"system_build_id: %s, \"\n \"system_build_target: %s, \"\n \"bootloader_branch: %s, \"\n \"bootloader_build_id: %s, \"\n \"bootloader_build_target: %s, \"\n \"gpu: %s\"\n \"num: %s, \"\n \"serial_log_file: %s, \"\n \"autoconnect: %s, \"\n \"report_internal_ip: %s\", cfg.project, build_target,\n build_id, branch, kernel_build_id, kernel_branch, kernel_build_target,\n system_branch, system_build_id, system_build_target, bootloader_branch,\n bootloader_build_id, bootloader_build_target, gpu, num, serial_log_file,\n autoconnect, report_internal_ip)\n # If multi_stage enable, launch_cvd don't write serial log to instance. So\n # it doesn't go WaitForBoot function.\n if cfg.enable_multi_stage:\n wait_for_boot = False\n device_factory = CuttlefishDeviceFactory(\n cfg, build_target, build_id, branch=branch,\n kernel_build_id=kernel_build_id, kernel_branch=kernel_branch,\n kernel_build_target=kernel_build_target, system_branch=system_branch,\n system_build_id=system_build_id,\n system_build_target=system_build_target,\n bootloader_branch=bootloader_branch,\n bootloader_build_id=bootloader_build_id,\n bootloader_build_target=bootloader_build_target,\n boot_timeout_secs=boot_timeout_secs,\n ins_timeout_secs=ins_timeout_secs,\n report_internal_ip=report_internal_ip,\n gpu=gpu)\n return common_operations.CreateDevices(\"create_cf\", cfg, device_factory,\n num, constants.TYPE_CF,\n report_internal_ip, autoconnect,\n serial_log_file, client_adb_port,\n boot_timeout_secs, unlock_screen,\n wait_for_boot)",
"def set_boot_options(self, image_name, **vendor_specifics):\n current_boot = self.show(\"show running-config | inc ^boot system \")\n file_system = vendor_specifics.get(\"file_system\")\n if file_system is None:\n file_system = self._get_file_system()\n\n file_system_files = self.show(f\"dir {file_system}\")\n if re.search(image_name, file_system_files) is None:\n log.error(\"Host %s: File not found error for image %s.\", self.host, image_name)\n raise NTCFileNotFoundError(\n # TODO: Update to use hostname\n hostname=self.host,\n file=image_name,\n directory=file_system,\n )\n\n current_images = current_boot.splitlines()\n commands_to_exec = [f\"no {image}\" for image in current_images]\n commands_to_exec.append(f\"boot system {file_system}/{image_name}\")\n self.config(commands_to_exec)\n\n self.save()\n if self.boot_options[\"sys\"] != image_name:\n log.error(\"Host %s: Setting boot command did not yield expected results\", self.host)\n raise CommandError(\n command=f\"boot system {file_system}/{image_name}\",\n message=\"Setting boot command did not yield expected results\",\n )\n\n log.info(\"Host %s: boot options have been set to %s\", self.host, image_name)",
"def bootloader() -> NoReturn:",
"async def async_setup(self):\n dev_reg = await device_registry.async_get_registry(self.hass)\n model_type = self.device.settings[\"device\"][\"type\"]\n dev_reg.async_get_or_create(\n config_entry_id=self.entry.entry_id,\n name=self.name,\n connections={(device_registry.CONNECTION_NETWORK_MAC, self.mac)},\n # This is duplicate but otherwise via_device can't work\n identifiers={(DOMAIN, self.mac)},\n manufacturer=\"Shelly\",\n model=aioshelly.MODEL_NAMES.get(model_type, model_type),\n sw_version=self.device.settings[\"fw\"],\n )",
"def process_device(spc, device):\n try:\n d = device.get();\n print(\"Processing device: \", device.name)\n me_href = d['managed-elements']['managed-element'].get('href')\n me = factory.fetch_resource(spc, me_href)\n\n # Fetch Physical Termination Points\n ptps = me.ptps.get()\n for p in ptps:\n p.get()\n\n # Fetch equipment inventory\n ehs = me.equipment_holders.get()\n for eh in ehs:\n eh.get()\n\n # Fetch software inventory\n me.software_identities.get()\n\n # Fetch relevant configuration\n try:\n device.configurations.expanded.post(xpaths=[\n '/configuration/version',\n '/configuration/routing-instances',\n '/configuration/access/radius-server',\n '/configuration/system/domain-name',\n '/configuration/routing-options/router-id',\n '/configuration/interfaces/interface[name=\"lo0\"]'])\n except:\n pass\n\n return device.name\n except:\n raise Exception(\"Failed to process %s due to %s\" % (device.name, sys.exc_info()[1]))",
"def __init__(self, hass, entry, device: aioshelly.Device):\n super().__init__(\n hass,\n _LOGGER,\n name=device.settings[\"name\"] or entry.title,\n update_interval=timedelta(seconds=5),\n )\n self.hass = hass\n self.entry = entry\n self.device = device\n self._unsub_stop = None",
"def test_setup_adds_proper_devices(self, mock_light):\n good_config = {\n \"mochad\": {},\n \"light\": {\n \"platform\": \"mochad\",\n \"devices\": [{\"name\": \"Light1\", \"address\": \"a1\"}],\n },\n }\n assert setup_component(self.hass, light.DOMAIN, good_config)",
"def boot(self):\n\t\tmesslen, received = self.socket.send('bootm\\r', 25)\t\t\n\t\treturn None",
"def find(ctx, name):\n conf = settings.devices.get(name, dict())\n if conf.get('type') == 'command':\n return conf, name, name\n\n uuids = ctx.obj['uuids']\n context = Context()\n for dev in iter(context.list_devices()):\n if 'ID_FS_TYPE' in dev:\n if name == uuids.get(dev.get('ID_FS_UUID')):\n return (settings.devices[name], dev['DEVNAME'],\n settings.devices[name].get('label',\n dev.get('ID_FS_LABEL')))\n\n print('Device \"%s\" not found.' % name)\n sys.exit(1)",
"def setup_platform(hass, config, add_devices, discovery_info=None):\n switches = []\n for coil in config.get(\"coils\"):\n switches.append(ModbusCoilSwitch(\n coil.get(CONF_NAME),\n coil.get(CONF_SLAVE),\n coil.get(CONF_COIL)))\n add_devices(switches)",
"def _post_init(self):\n self._led_type_code = self.manager.get_typecode('LED')\n self.device_path = os.path.realpath(os.path.join(self.path, 'device'))\n if '::' in self.name:\n chardev, code_name = self.name.split('::')\n if code_name in self.manager.codes['LED_type_codes']:\n self.code = self.manager.codes['LED_type_codes'][code_name]\n try:\n event_number = chardev.split('input')[1]\n except IndexError:\n print(\"Failed with\", self.name)\n raise\n else:\n self._character_device_path = '/dev/input/event' + event_number\n self._match_device()",
"def select_data_config(self):\r\n\t\tDATA_CONFIG = (L3DG20_DEFAULT | L3DG20_FS_2000)\r\n\t\tbus.write_byte_data(L3DG20_DEFAULT_ADDRESS, L3DG20_REG_CTRL4, DATA_CONFIG)",
"def configure(self):\n self.node.get_logger().info('Configuring device...')\n try:\n data = self.con.receive(registers.BNO055_CHIP_ID_ADDR, 1)\n if data[0] != registers.BNO055_ID:\n raise IOError('Device ID=%s is incorrect' % data)\n # print(\"device sent \", binascii.hexlify(data))\n except Exception as e: # noqa: B902\n # This is the first communication - exit if it does not work\n self.node.get_logger().error('Communication error: %s' % e)\n self.node.get_logger().error('Shutting down ROS node...')\n sys.exit(1)\n\n # IMU connected => apply IMU Configuration:\n if not (self.con.transmit(registers.BNO055_OPR_MODE_ADDR, 1, bytes([registers.OPERATION_MODE_CONFIG]))):\n self.node.get_logger().warn('Unable to set IMU into config mode.')\n\n if not (self.con.transmit(registers.BNO055_PWR_MODE_ADDR, 1, bytes([registers.POWER_MODE_NORMAL]))):\n self.node.get_logger().warn('Unable to set IMU normal power mode.')\n\n if not (self.con.transmit(registers.BNO055_PAGE_ID_ADDR, 1, bytes([0x00]))):\n self.node.get_logger().warn('Unable to set IMU register page 0.')\n\n if not (self.con.transmit(registers.BNO055_SYS_TRIGGER_ADDR, 1, bytes([0x00]))):\n self.node.get_logger().warn('Unable to start IMU.')\n\n if not (self.con.transmit(registers.BNO055_UNIT_SEL_ADDR, 1, bytes([0x83]))):\n self.node.get_logger().warn('Unable to set IMU units.')\n\n # The sensor placement configuration (Axis remapping) defines the\n # position and orientation of the sensor mount.\n # See also Bosch BNO055 datasheet section Axis Remap\n mount_positions = {\n 'P0': bytes(b'\\x21\\x04'),\n 'P1': bytes(b'\\x24\\x00'),\n 'P2': bytes(b'\\x24\\x06'),\n 'P3': bytes(b'\\x21\\x02'),\n 'P4': bytes(b'\\x24\\x03'),\n 'P5': bytes(b'\\x21\\x02'),\n 'P6': bytes(b'\\x21\\x07'),\n 'P7': bytes(b'\\x24\\x05')\n }\n if not (self.con.transmit(registers.BNO055_AXIS_MAP_CONFIG_ADDR, 2,\n mount_positions[self.param.placement_axis_remap.value])):\n self.node.get_logger().warn('Unable to set sensor placement configuration.')\n\n # Show the current sensor offsets\n self.node.get_logger().info('Current sensor offsets:')\n self.print_calib_data()\n if self.param.set_offsets.value:\n configured_offsets = \\\n self.set_calib_offsets(\n self.param.offset_acc,\n self.param.offset_mag,\n self.param.offset_gyr,\n self.param.radius_mag,\n self.param.radius_acc)\n if configured_offsets:\n self.node.get_logger().info('Successfully configured sensor offsets to:')\n self.print_calib_data()\n else:\n self.node.get_logger().warn('setting offsets failed')\n\n\n # Set Device mode\n device_mode = self.param.operation_mode.value\n self.node.get_logger().info(f\"Setting device_mode to {device_mode}\")\n\n if not (self.con.transmit(registers.BNO055_OPR_MODE_ADDR, 1, bytes([device_mode]))):\n self.node.get_logger().warn('Unable to set IMU operation mode into operation mode.')\n\n self.node.get_logger().info('Bosch BNO055 IMU configuration complete.')",
"def start(self, device, *args, **kwargs):\n raise NotImplementedError",
"def part_device(part_number):\n return \"/dev/mmcblk0p\" + part_number",
"def build_config(device):\n capabilities = device.capabilities(verbose=True)\n config = {}\n\n for key, value in capabilities.items():\n for element in value:\n if type(element[0]) is tuple:\n config[element[0][1]] = element[0][0]\n elif type(element[0]) is list:\n config[element[1]] = element[0][0]\n elif (\"SYN\" in str(element[0])) or (\"FF\" in str(element[0])):\n pass\n else:\n config[element[1]] = element[0]\n\n print(\"Config Dict: \" + str(config) + \"\\n\")\n return config",
"def test_update_bios_boot_mode(self):\n pass"
] | [
"0.715671",
"0.6926375",
"0.6674017",
"0.65847355",
"0.6503858",
"0.6176356",
"0.6132347",
"0.6019202",
"0.6009357",
"0.5958234",
"0.5893135",
"0.58901983",
"0.5786446",
"0.5759376",
"0.5718868",
"0.569955",
"0.568893",
"0.56540716",
"0.5607827",
"0.5554028",
"0.55511093",
"0.5538043",
"0.55168605",
"0.5511721",
"0.5482761",
"0.54753107",
"0.5468619",
"0.54568845",
"0.54545057",
"0.543368",
"0.542492",
"0.54224366",
"0.54206556",
"0.5418279",
"0.5415675",
"0.54092515",
"0.53985023",
"0.5388817",
"0.5381518",
"0.537412",
"0.5368344",
"0.53667253",
"0.5366059",
"0.5364072",
"0.5361422",
"0.5358452",
"0.53530705",
"0.53374636",
"0.5330731",
"0.5329287",
"0.5316307",
"0.52985823",
"0.5297886",
"0.5294263",
"0.52917504",
"0.5291747",
"0.5285682",
"0.52841055",
"0.5282569",
"0.52811164",
"0.5272466",
"0.5271465",
"0.5266358",
"0.5258771",
"0.52551806",
"0.5247387",
"0.5240701",
"0.5240316",
"0.5239585",
"0.52284527",
"0.52232724",
"0.5219654",
"0.52164406",
"0.52122945",
"0.5211019",
"0.5206693",
"0.51978433",
"0.51909804",
"0.51857245",
"0.5180657",
"0.5177922",
"0.51771986",
"0.5173971",
"0.5148448",
"0.51467294",
"0.51397973",
"0.5128558",
"0.5126616",
"0.5116682",
"0.5113017",
"0.5109817",
"0.51089454",
"0.5106116",
"0.5098278",
"0.50965875",
"0.50955063",
"0.5092747",
"0.508898",
"0.50863206",
"0.50741875"
] | 0.65558326 | 4 |
Retrieves the current setting for the one time boot. | def get_one_time_boot(self):
system = self._get_host_details()
try:
if system['Boot']['BootSourceOverrideEnabled'] == 'Once':
device = system['Boot']['BootSourceOverrideTarget']
if device in DEVICE_RIS_TO_COMMON:
return DEVICE_RIS_TO_COMMON[device]
return device
else:
# value returned by RIBCL if one-time boot setting are absent
return 'Normal'
except KeyError as e:
msg = "get_one_time_boot failed with the KeyError:%s"
raise exception.IloError((msg) % e) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getCurrentSetting(self):\n return {}",
"def get_bootvar(self):\n module = 'bootimage/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n bootdefault = response.json()['bootimage']['oper']['hd-default']\n print(self.device + ' The device is set to boot from: ' + bootdefault + ' in the future')\n return bootdefault",
"def initial(self):\n from setman import settings\n return getattr(settings, self.name, self.default)",
"def settings():\n return _get_settings()[1]",
"def myCurrentSetting(self):\n paramDict = self.getCurrentSetting()\n return paramDict",
"def GetSettingInformation(self):\n if self.cur_uid is None:\n return\n self._get_device_hours()",
"def bootstrap_setting(value):\n return get_bootstrap_setting(value)",
"def get_setting(self, key, default=NOT_SET):\n if key in self.settings:\n return self.settings[key]\n app_key = 'tangled.app.' + key\n if app_key in self.settings:\n return self.settings[app_key]\n if default is NOT_SET:\n raise KeyError(\"'{}' not present in settings\".format(key))\n return default",
"def get_setting(self, id):\n return __settings__.getSetting(id)",
"def get_system_value(name: str):\n return Config.objects.first().__dict__[name]",
"def current_settings(self):\n return {\n 'power_state': self.power_state,\n 'brightness': self.brightness,\n }",
"def get_setting(self, setting):\n return self.do_rpc(\"get_setting\", key=key)",
"def get(self):\n self.value = os.getenv(self.name, self.default)\n return self.value",
"def __returnCurrentSettingLocal__(self):\n return self.dmdParams",
"def getGlobalSetting(self, setting):\n self._cacheConfig()\n settingVal = None\n try:\n settingVal = self._fileCache[setting]\n except KeyError:\n # if no global setting exists, try finding the value as a daily setting\n # (if all days are the same it'll be a global, but otherwise we'll just give today's setting)\n settingVal = self.getDailySetting(getDayFromNum(datetime.datetime.today().weekday()), setting)\n\n return settingVal",
"def getSystemAwake(self):\n print 'start of getSystemAwak() system_awake = {0}'.format(self.system_awake) # TESTING ++++++++++++++++\n try:\n self.db = shelve.open(os.path.join(self.xlocal, 'Launch Manager Utils\\\\launch.data'))\n if self.db['system_awake'] == False:\n print 'start of if true - getSystemAwak() system_awake = {0}'.format(self.system_awake) # TESTING ++++++++++++++++\n self.system_awake = self.db['system_awake']\n self.db.close()\n else:\n self.system_awake = True\n self.db['system_awake'] = self.system_awake\n self.db.close()\n \n print 'End of getSystemAwak() system_awake = {0}'.format(self.system_awake) # TESTING ++++++++++++++++\n \n except Exception, e:\n self.log_file.logEntry('{0}\\nUnable to load previous system_awake value, setting value to True'.format(e))\n self.system_awake = True",
"def get_settings(self):\n return self.settings",
"def config(self):\n return self[CONFIG_KEY]",
"def get_config():\n return CONFIG",
"def get_config():\n return _CONFIG",
"def __returnCurrentSettingLocal__(self):\n return {}",
"def default_value(self):\n return self.__class__.get_setting_default(self.key, **self.get_kwargs())",
"def get_settings(self):\n return self.request({\n \"path\": \"/\" + UUID + \"/setting\"\n })",
"def get_setting(key):\n try:\n from main import flask_app\n return flask_app.config[key]\n except:\n environment = get_environment()\n #Load settings from the corresponding class\n if environment == Config.ENV_PRODUCTION:\n obj = ProductionConfig()\n else:\n obj = TestingConfig()\n return getattr(obj, key)",
"def get_setting_value(self, key, default = None):\n \n if not \"settings\" in self.configuration or not key in self.configuration['settings']:\n return default\n \n return self.configuration['settings'][key]",
"def current_option(self) -> str | None:\n # If the translation key is \"zone_sleep\", we need to translate\n # the value to make it compatible with Home Assistant\n if (\n value := self.capability.current\n ) is not None and self.translation_key == \"zone_sleep\":\n return ZONE_SLEEP_STATE_MAPPING[value]\n\n return value",
"def _get_local_preference(self):\n return self.__local_preference",
"def _get_conf(self):\n self.press_conf = self.sysconf['PressureRegulators']\n return self.press_conf['PressureRegulator%d' % self.id_]",
"def GetCurrent():\n global ENV\n return ENV[threading.current_thread().ident]",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def _get_config(self):\n return self.__config",
"def get_config() -> Optional[Config]:\n return CurrentConfig.get()",
"def __get_current_auto_os_updates_setting_on_machine(self):\n try:\n download_updates_value = \"\"\n apply_updates_value = \"\"\n is_service_installed = False\n enable_on_reboot_value = False\n\n # get install state\n if not self.is_auto_update_service_installed(self.install_check_cmd):\n return is_service_installed, enable_on_reboot_value, download_updates_value, apply_updates_value\n\n is_service_installed = True\n enable_on_reboot_value = self.is_service_set_to_enable_on_reboot(self.enable_on_reboot_check_cmd)\n\n self.composite_logger.log_debug(\"Checking if auto updates are currently enabled...\")\n image_default_patch_configuration = self.env_layer.file_system.read_with_retry(self.os_patch_configuration_settings_file_path, raise_if_not_found=False)\n if image_default_patch_configuration is not None:\n settings = image_default_patch_configuration.strip().split('\\n')\n for setting in settings:\n match = re.search(self.download_updates_identifier_text + self.auto_update_config_pattern_match_text, str(setting))\n if match is not None:\n download_updates_value = match.group(1)\n\n match = re.search(self.apply_updates_identifier_text + self.auto_update_config_pattern_match_text, str(setting))\n if match is not None:\n apply_updates_value = match.group(1)\n\n if download_updates_value == \"\":\n self.composite_logger.log_debug(\"Machine did not have any value set for [Setting={0}]\".format(str(self.download_updates_identifier_text)))\n else:\n self.composite_logger.log_verbose(\"Current value set for [{0}={1}]\".format(str(self.download_updates_identifier_text), str(download_updates_value)))\n\n if apply_updates_value == \"\":\n self.composite_logger.log_debug(\"Machine did not have any value set for [Setting={0}]\".format(str(self.apply_updates_identifier_text)))\n else:\n self.composite_logger.log_verbose(\"Current value set for [{0}={1}]\".format(str(self.apply_updates_identifier_text), str(apply_updates_value)))\n\n return is_service_installed, enable_on_reboot_value, download_updates_value, apply_updates_value\n\n except Exception as error:\n raise Exception(\"Error occurred in fetching current auto OS update settings from the machine. [Exception={0}]\".format(repr(error)))"
] | [
"0.69801486",
"0.68082726",
"0.6772974",
"0.67710704",
"0.6631858",
"0.6596899",
"0.65748394",
"0.63340545",
"0.6281833",
"0.6263905",
"0.6256233",
"0.6233154",
"0.6194374",
"0.614808",
"0.61427236",
"0.6128543",
"0.61068916",
"0.6082622",
"0.60769004",
"0.6052895",
"0.6045621",
"0.60309285",
"0.6005268",
"0.6004204",
"0.59984726",
"0.5994063",
"0.5988337",
"0.59701717",
"0.59500754",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.59436923",
"0.5930191",
"0.5928344"
] | 0.6882444 | 1 |
Gets the firmware update service uri. | def _get_firmware_update_service_resource(self):
manager, uri = self._get_ilo_details()
try:
fw_uri = manager['Oem']['Hp']['links']['UpdateService']['href']
except KeyError:
msg = ("Firmware Update Service resource not found.")
raise exception.IloCommandNotSupportedError(msg)
return fw_uri | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_service_url():\n return get_config_handler().get_service_url()",
"def _get_uri(plex_server):\n return plex_server.url(\n \"/:/websockets/notifications\", includeToken=True\n ).replace(\"http\", \"ws\")",
"def get_http_boot_uri(self):\n try:\n sushy_system = self._get_sushy_system()\n http_boot_uri = sushy_system.http_boot_uri.httpbooturi\n except sushy.exceptions.SushyError as e:\n msg = (self._('Not able to find HTTP Boot URI. Error: '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n return http_boot_uri",
"def _uri(helper):\n return '/'.join((\n helper.context_meta['server_uri'],\n 'servicesNS',\n 'nobody',\n 'Splunk_TA_paloalto',\n 'storage',\n 'collections',\n 'data',\n 'minemeldfeeds'))",
"def EndpointURI(self):\n return '/'.join(str(x) for x in [self.base_endpoint,self.match,self.resource] if x)",
"def __get_url_addr(self):\n request = urlopen(self.url)\n version = request.readline()\n request.close()\n request = urlparse.urlparse(self.url)\n unparsed_url = urlparse.urlunparse((request.scheme, request.netloc,\n request.path, '', '', ''))\n updated_url = urlparse.urljoin(unparsed_url, version + '/' +\n self.file_name)\n return updated_url",
"def service_endpoint(self) -> str:\n return pulumi.get(self, \"service_endpoint\")",
"def get_uri(self):\n if self._uri is None:\n self._uri = \"{0}{1}/{2}\".format(\n self.session.resource_prefix,\n self.base_uri,\n self.ip_or_ifname_or_group_name,\n )\n\n return self._uri",
"def _get_api_endpoint():\n try:\n return get_service_endpoint(\"apiext\").strip(\"/\")\n except:\n log.warn(\n \"Could not find valid apiext endpoint for links so will use policy engine endpoint instead\"\n )\n try:\n return get_service_endpoint(\"policy_engine\").strip(\"/\")\n except:\n log.warn(\n \"No policy engine endpoint found either, using default but invalid url\"\n )\n return \"http://<valid endpoint not found>\"",
"def service_url(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"service_url\")",
"def get_overpass_uri() -> str:\n Config.__get()\n assert Config.__config is not None\n return Config.__config.get(\"wsgi\", \"overpass_uri\", fallback=\"https://overpass-api.de\").strip()",
"def endpoint_uri(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"endpoint_uri\")",
"def uri_for_service(self, region, service_id, base_uri):\n return str(URLPath.fromString(base_uri)\n .child(\"service\").child(region).child(service_id).child(\"\"))",
"def uri(cls):\n return f'{cls.app_label}.{cls.name}'",
"def uri(self) -> Optional[str]:\n return pulumi.get(self, \"uri\")",
"def get_uri(self):\n return self.url",
"def _get_webservice_url(self, ws_key):\n if self._webservices.get(ws_key) is None:\n raise PyiCloudServiceNotActivatedException(\n \"Webservice not available\", ws_key\n )\n return self._webservices[ws_key][\"url\"]",
"def uri(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"uri\")",
"def get_latest_version_link(self):\n return self.get_latest_version().dbgap_link",
"def get_wsdl_url(self):\n return self.mycam.devicemgmt.GetWsdlUrl()",
"def service_url(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"service_url\")",
"def service_url(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"service_url\")",
"def application_service_path(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"application_service_path\")",
"def api_endpoint(self, url):\n if urlparse(url).scheme in [\"http\", \"https\"]:\n return url # url is already complete\n return urljoin(f\"{RESOURCE}/{API_VERSION}/\", url.lstrip(\"/\"))",
"def uri(self) -> str:\n return self._uri",
"def get_update_url(self, resource_obj=None, **kwargs):\n\n full_url = getattr(resource_obj, 'full_url', None)\n if full_url:\n return full_url\n\n try:\n update_url = self._generate_url(\n url_type='update', resource_obj=resource_obj, **kwargs\n )\n except ValueError:\n update_url = None\n\n return update_url",
"def getURI(self):\n return _libsbml.SBasePlugin_getURI(self)",
"def getEndpoint(self):\n port = \"\"\n endpoint = \"\"\n keyConfig = self.getKeyConfig()\n\n if \"port\" in keyConfig:\n port = \":\" + keyConfig[\"port\"]\n elif self._data[\"port\"] != self.PORT:\n port = \":\" + self._data[\"port\"]\n\n if \"endpoint\" in keyConfig:\n endpoint = keyConfig[\"endpoint\"]\n else:\n endpoint = self._data[\"endpoint\"]\n\n return \"https://%s%s/%s/\" % (endpoint, port, self._data[\"api_version\"])",
"def _get_base_url(self):\n return 'https://'+self.get_address_and_port_string()",
"def endpoint_uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"endpoint_uri\")",
"def endpoint_uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"endpoint_uri\")",
"def getURI(self):\n return _libsbml.SBase_getURI(self)",
"def get_service_endpoint(service_name):\n service = Config.get_service_info(service_name)\n return 'http://%s:%s' % (service['url'], service['port'])",
"def uri(self):\n return self._uri",
"def uri(self):\n return self._uri",
"def uri(self):\n return self._uri",
"def uri(self):\n return self._uri",
"def uri(self):\n return self._uri",
"def uri(self):\n return self._uri",
"def get_url(self):\r\n if self.mod.filename:\r\n return self.mod.service.get_mirror() + self.mod.filename",
"def get_uri(self):\n return self.__uri",
"def uri(self) -> str:\n return self._host",
"def _base_url(self):\n # URL Protocol\n proto = 'https' if self._ssl else 'http'\n\n # Device port number\n if self._port is None:\n port = 8080 if self._ssl else 8008\n else:\n port = self._port\n \n return f'{proto}://{self._address}:{port}/api/v1'",
"def dmapi_uri(self):\n return self.get_uri(prefix=\"dmapi\")",
"def uri(self) -> Optional[str]: # noqa: D401\n return self._uri",
"def getURI(self):\n return _libsbml.FbcPkgNamespaces_getURI(self)",
"def get_update_user_endpoint_url() -> str:\n return \"/users/update\"",
"def get_api_path(self):\n return self._get_api_base() + '/' + self._get_resource_suffix()",
"def get_uri(self):\r\n return self.uri",
"def request_uri(self, identifier):\n path = self.PATH_TEMPLATE % (identifier, identifier)\n return self.api_baseurl + path",
"def api_url(self):\n return self.get_api_url()",
"def url(self):\n url = os.environ.get('PATH_INFO')\\\n or os.environ.get('REQUEST_URI')\n return url if url else ''",
"def get_sia_rrdp_notify(self):\n\n sia = self.get_POW().getSIA()\n return None if sia is None else first_https_uri(sia[3]) or first_http_uri(sia[3])",
"def uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"uri\")",
"def uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"uri\")",
"def uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"uri\")",
"def uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"uri\")",
"def uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"uri\")",
"def get_manager_file_server_blueprints_root_url():\n return os.environ[MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL_KEY]",
"def get_base_url(self):\n try:\n return self.get_metadata()['api_endpoint']\n except requests.exceptions.RequestException:\n raise",
"def uri(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"uri\")",
"def uri(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"uri\")",
"def uri(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"uri\")",
"def url(self) -> str:\n if \"main\" not in self._resources:\n self._initialize()\n return self._resources[\"main\"].url",
"def _getURL(serviceName, options):\n system = options['System']\n port = options['Port']\n host = socket.gethostname()\n url = 'dips://%s:%s/%s/%s' % (host, port, system, serviceName)\n return url",
"def full(self):\n url = (self.scheme + ':') if self.scheme else ''\n url += '//' + self.netloc + self.relative()\n return url",
"def firmware_update_image(self) -> Optional[str]:\n return pulumi.get(self, \"firmware_update_image\")",
"def _get_api_url(self):\n return \"%s/%s/\" % (settings.API_URL, settings.API_VERSION)",
"def endpoint(self) -> str:\n return pulumi.get(self, \"endpoint\")",
"def endpoint(self) -> str:\n return pulumi.get(self, \"endpoint\")",
"def existing_url(module):\n # Build the format dictionary\n url_base = \"/axapi/v3/event-notification/kafka/server\"\n\n f_dict = {}\n\n return url_base.format(**f_dict)",
"def url(self):\n if not self._is_served:\n raise RuntimeError('Cannot determine app url if app is not yet \"served\".')\n elif not (_current_server and _current_server.serving):\n raise RuntimeError('Cannot determine app url if the server is not '\n 'yet running.')\n else:\n host, port = _current_server.serving\n return 'http://%s:%i/%s/' % (host, port, self._path)",
"def api_url(self, endpoint):\n\n return '{}/{}'.format(self.api_root, endpoint)",
"def get_sia_manifest_uri(self):\n\n sia = self.get_POW().getSIA()\n return None if sia is None else first_rsync_uri(sia[1])",
"def build_service_address(self, binding_id):\n return \"%s://%s%s\" % (get_protocol(binding_id), Site.objects.get_current().domain, self.path)",
"def resource_uri(self) -> Optional[str]:\n return pulumi.get(self, \"resource_uri\")",
"def resource_uri(self) -> Optional[str]:\n return pulumi.get(self, \"resource_uri\")",
"def get_resolver_xml_url(dataset_url):\n root = get_element_root_from_url(dataset_url)\n latest = ''\n latest_base = ''\n latest_url = None\n for s in root.findall(xmlns_prefix + 'service'):\n if (s.get(\"serviceType\") == \"Resolver\"):\n latest = s.get(\"name\")\n latest_base = s.get(\"base\")\n ds = find_dataset(root,latest)\n if (ds is not None):\n # TODO: generalize this to handle relatives paths starting with a '/' \n latest_url = get_url_path(dataset_url) + latest_base + '/' + ds.get('urlPath')\n return latest_url",
"def get_url(self):\n if not self.__initialized:\n raise NSNitroError(\"Not initialized.\")\n return self.__baseurl",
"def resource_uri(self):\n primary_key_value = getattr(self, self.primary_key(), None)\n return '/{}/{}'.format(self.endpoint(), primary_key_value)",
"def api_url(self):\n return self._api_url",
"def _get_api_url_for (self, component):\n if self.api_data['API_ROOT'].find(self.api_data['API_BASE_URL']) > -1:\n return self.api_data['API_ROOT'] + '/' + self.api_data['BUILD_IDENTIFIER'] + self.urls[component]\n else:\n return self.api_data['API_ROOT'] + self.api_data['API_BASE_URL'] + '/' + self.api_data['BUILD_IDENTIFIER'] + self.urls[component]",
"def get_download_url(self, ha):\n return create_ipa_url(ha)",
"def getPackageURI(self):\n return _libsbml.CompBase_getPackageURI(self)",
"def build_websocket_url(self):\n\n r = requests.get(constants.WS_HOST, headers=constants.HTTP_HEADERS)\n ws_info = r.json()\n ws_info[\"securePort\"] = str(ws_info[\"securePort\"])\n ws_uri = constants.WS_URI + ws_info[\"token\"]\n ws_url = f\"wss://{ws_info['ip']}:{ws_info['securePort']}/{ws_uri}\"\n return ws_url",
"def _get_webservice_versionstring(self, service):\n version = self.get_webservice_version(service)\n return \".\".join(map(str, version))",
"def getFullURL(self):\n return self.FullURL",
"def get_services_dir():\n return bytestostr(libruss.russ_get_services_dir())",
"def get_service_url(service_name):\n\n # Use the Service Discovery service if Prod and toggle is on\n if Config.SD_STATUS == 'ON' and env.get('VCAP_SERVICES') is not None:\n try:\n creds = loads(env['VCAP_SERVICES'])['service_discovery'][0]['credentials']\n locator = ServiceLocator(creds['url'], creds['auth_token'])\n service_instances = loads(locator.get_services(service_name=service_name, status='UP',\n tags=env['LOGISTICS_WIZARD_ENV']))['instances']\n if len(service_instances) == 0:\n raise APIException('Dependent service not available')\n return 'https://%s' % service_instances[0]['endpoint']['value']\n except Exception as e:\n if isinstance(e, Exception):\n e = e.message\n raise APIException('Cannot get dependent service', user_details=str(e), internal_details=str(e))\n # Otherwise, get the service endpoint from an env var\n else:\n if service_name == 'lw-erp':\n return env['ERP_SERVICE']\n elif service_name == 'lw-recommendation':\n return env['RECOMMENDATION_SERVICE']\n else:\n raise APIException('Unrecognized service invocation')",
"def _get_url(self):\n return 'http://{}:{}'.format(self.host, self.port)",
"def get_manager_file_server_url():\n return os.environ[MANAGER_FILE_SERVER_URL_KEY]",
"def get_mozilla_dmg_url(self, base_url, product_name, release, locale):",
"def getURI(self):\n return _libsbml.MultiPkgNamespaces_getURI(self)",
"def getURI(self):\n return _libsbml.CompPkgNamespaces_getURI(self)",
"def getURI(self, *args):\n return _libsbml.FbcExtension_getURI(self, *args)",
"def get_url(self):\n return self.db_url",
"def get_outcome_service_url(self, service_name=\"grade_handler\"):\r\n return self.runtime.handler_url(self, service_name, thirdparty=True).rstrip('/?')",
"def get_feedback_url(self):\r\n\r\n return self.get_setting('service_feedback_url')",
"def xmlrpc_url(self):\n return self.key_name_parts()[0]",
"def health_http_uri(self) -> Optional[str]:\n return pulumi.get(self, \"health_http_uri\")"
] | [
"0.65430695",
"0.64294636",
"0.64230037",
"0.62257314",
"0.6150541",
"0.60008526",
"0.598782",
"0.59735656",
"0.593328",
"0.59007615",
"0.5866994",
"0.58536565",
"0.5798969",
"0.57935137",
"0.57414603",
"0.5725773",
"0.57218593",
"0.5717393",
"0.569106",
"0.5676379",
"0.5646494",
"0.5646494",
"0.5617997",
"0.5610391",
"0.5605624",
"0.55928123",
"0.55890864",
"0.5585353",
"0.55811614",
"0.55675423",
"0.55675423",
"0.55642885",
"0.5561149",
"0.55596876",
"0.55596876",
"0.55596876",
"0.55596876",
"0.55596876",
"0.55596876",
"0.5559421",
"0.5548387",
"0.55455375",
"0.55452883",
"0.5525479",
"0.55197734",
"0.5504261",
"0.54795784",
"0.5451125",
"0.544949",
"0.5445454",
"0.54397535",
"0.5426231",
"0.54260623",
"0.54114145",
"0.54114145",
"0.54114145",
"0.54114145",
"0.54114145",
"0.54026073",
"0.54019326",
"0.5400075",
"0.5400075",
"0.5400075",
"0.5394169",
"0.53918475",
"0.53899837",
"0.53895754",
"0.5374952",
"0.53653306",
"0.53653306",
"0.53630596",
"0.535756",
"0.5356544",
"0.53429174",
"0.53000504",
"0.52969384",
"0.52969384",
"0.5292025",
"0.5281142",
"0.5277324",
"0.52683395",
"0.5265074",
"0.526309",
"0.52606475",
"0.5249833",
"0.52463627",
"0.5239144",
"0.5237545",
"0.5217395",
"0.52128434",
"0.5211003",
"0.52038366",
"0.520251",
"0.5185805",
"0.5182738",
"0.5175706",
"0.51711094",
"0.51695436",
"0.516845",
"0.5166124"
] | 0.80128765 | 0 |
Updates the given firmware on the server for the given component. | def update_firmware(self, file_url, component_type):
fw_update_uri = self._get_firmware_update_service_resource()
action_data = {
'Action': 'InstallFromURI',
'FirmwareURI': file_url,
}
# perform the POST
LOG.debug(self._('Flashing firmware file: %s ...'), file_url)
status, headers, response = self._rest_post(
fw_update_uri, None, action_data)
if status != 200:
msg = self._get_extended_error(response)
raise exception.IloError(msg)
# wait till the firmware update completes.
common.wait_for_ris_firmware_update_to_complete(self)
try:
state, percent = self.get_firmware_update_progress()
except exception.IloError:
msg = 'Status of firmware update not known'
LOG.debug(self._(msg)) # noqa
return
if state == "ERROR":
msg = 'Unable to update firmware'
LOG.debug(self._(msg)) # noqa
raise exception.IloError(msg)
elif state == "UNKNOWN":
msg = 'Status of firmware update not known'
LOG.debug(self._(msg)) # noqa
else: # "COMPLETED" | "IDLE"
LOG.info(self._('Flashing firmware file: %s ... done'), file_url) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update_firmware(self):\n self.execute_command(CMD_UPDATE_FIRMWARE)",
"def update_firmware(self) -> str:",
"def fusion_api_li_upgrade_firmware(self, body=None, uri=None, api=None, param='', headers=None):\n param = '/firmware'\n return self.li.update(body=body, uri=uri, api=api, headers=headers, param=param)",
"def update_firmware(self) -> None:\n\n BROADCAST_ID = 0xFFF\n firmware_update_message = self.__set_module_state(\n BROADCAST_ID, Module.State.UPDATE_FIRMWARE, Module.State.PNP_OFF\n )\n self._send_q.put(firmware_update_message)\n self.__delay()",
"def update_firmware(self, node, port):\n return hpsum_controller.update_firmware(node)",
"def fusion_api_le_firmware_update(self, body=None, uri=None, api=None, headers=None, etag=None):\n return self.logical_enclosure.patch(body, uri, api, headers, etag)",
"def update_firmware(firmware_path, script_path):\n\n args = ['uflash', '-r', firmware_path, script_path]\n subprocess.call(args)",
"def fusion_api_edit_server_hardware_mp_firmware_version(self, body, uri, api=None, headers=None):\n return self.sh.update(body, uri, api, headers, param='/mpFirmwareVersion')",
"def update_firmware(self, file_url, reinstall=False,\n exclude_npar_fw=False):\n try:\n update_service_inst = self._sushy.get_update_service()\n update_service_inst.flash_firmware(\n self, file_url, reinstall, exclude_npar_fw)\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish controller failed to update firmware '\n 'with firmware %(file)s Error %(error)s') %\n {'file': file_url, 'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)",
"def fusion_api_update_sas_li_firmware(self, body=None, uri=None, api=None, headers=None):\n param = \"/firmware\" # put method expecting a param\n return self.sasli.put(body=body, uri=uri, param=param, api=api, headers=headers)",
"def fusion_api_upgrade_appliance_firmware(self, localfile, api=None, headers=None):\n param = '?file=%s' % localfile\n return self.appfirmware.update(api, headers, param)",
"def install_component_firmware(self, component_name, image_path):\n raise NotImplementedError",
"def update_firmware(self):\n return self._dll.JLINKARM_UpdateFirmwareIfNewer()",
"def fusion_api_edit_server_hardware(self, body, uri, api=None, headers=None):\n return self.sh.update(body, uri, api, headers)",
"def test_update_hyperflex_server_firmware_version(self):\n pass",
"def firmware_pack_modify(handle, org_name, name, rack_bundle_version=None,\n blade_bundle_version=None, descr=None, mode=None,\n org_parent=\"org-root\"):\n\n org_dn = org_parent + \"/org-\" + org_name\n fw_dn= org_dn + \"/fw-host-pack-\" + name\n mo = handle.query_dn(fw_dn)\n if mo is not None:\n if rack_bundle_version is not None:\n mo.rack_bundle_version = rack_bundle_version\n if blade_bundle_version is not None:\n mo.blade_bundle_version = blade_bundle_version\n if mode is not None:\n mo.mode=mode\n if descr is not None:\n mo.descr = descr\n\n handle.set_mo(mo)\n handle.commit()\n else:\n log.info(\"Firmware host pack <%s> not found.\" % name)",
"def fusion_api_patch_server_hardware(self, body, uri, api=None, headers=None):\n return self.sh.patch(body, uri, api, headers)",
"def upgrade(\n self, firmware: Union[bytes, str], status_tracker=_default_progress_tracker\n ) -> None:\n\n def _update_tracker(\n status_tracker,\n preamble_count,\n indexf,\n indexl,\n firmware_len,\n unknown_count,\n done,\n upload,\n ):\n if status_tracker is not None:\n status_tracker(\n preamble_count,\n indexf,\n indexl,\n firmware_len,\n unknown_count,\n done,\n upload,\n )\n\n if isinstance(firmware, str):\n # load binary from file\n with open(firmware, \"rb\") as f:\n firmware = f.read()\n\n firmware_len = int(len(firmware))\n half_len = int(firmware_len / 2)\n\n # split firmware in 2 halves (low and high part) and compute the half file length (2 bytes).\n two_bytes_half_len = half_len.to_bytes(2, \"big\")\n lenf = two_bytes_half_len[0].to_bytes(1, \"big\")\n lenl = two_bytes_half_len[1].to_bytes(1, \"big\")\n\n buff = firmware[:half_len]\n bufl = firmware[half_len:]\n\n # save RS-232 parameters\n baudrate = self.serial.baudrate\n timeout = self.serial.timeout\n\n indexf = -1\n indexl = -1\n preamble_count = 0\n unknown_count = 0\n\n # switch serial to upgrade mode\n self.serial.baudrate = 57600\n self.timeout = 5\n\n # Process upgrade preamble. Wait to have enough \"c\" chars to consider the preamble valid.\n # This helps getting rid of potential garbage in the buffer which could mess up with the protocol\n while preamble_count < 10:\n msg = self.serial.read(1)\n if msg == b\"\":\n # Timeout. Abort upgrade\n raise CloudWatcherException(\"Upgrade failed - timeout before transfer\")\n elif msg == b\"c\" or msg == b\"\\xff\":\n # 0xFF may occur after a B!O!O!T! -triggered reboot. Not on power-on. Funny.\n preamble_count += 1\n else:\n # Unknown message from CW. Should we abort ? Count just in case.\n unknown_count += 1\n _update_tracker(\n status_tracker,\n preamble_count,\n indexf,\n indexl,\n firmware_len,\n unknown_count,\n False,\n False,\n )\n\n # Signal CW that we are ready to transfer\n self.serial.write(b\"d\")\n\n # remain at 57600 bps but lower the timeout.\n self.timeout = 1\n\n # Actual firmware upload\n while indexf < half_len or indexl < half_len:\n msg = self.serial.read(1)\n if msg == b\"\":\n # Timeout. End transfer\n raise CloudWatcherException(\"Upgrade failed - timeout during transfer\")\n elif msg == b\"c\" or msg == b\"\\xff\":\n # Absorb excess \"c\" that may occur after sending \"d\". 0xFF occur after sending \"d\" in B!O!O!T! triggered sequences but not on power-on.\n preamble_count += 1\n elif msg == b\"0\":\n if indexf < 0:\n self.serial.write(lenf)\n else:\n self.serial.write(buff[indexf].to_bytes(1, \"big\"))\n indexf += 1\n elif msg == b\"1\":\n if indexl < 0:\n self.serial.write(lenl)\n else:\n self.serial.write(bufl[indexl].to_bytes(1, \"big\"))\n indexl += 1\n else:\n # Unknown message from CW. Should we abort ? Count just in case.\n unknown_count += 1\n\n _update_tracker(\n status_tracker,\n preamble_count,\n indexf,\n indexl,\n firmware_len,\n unknown_count,\n False,\n True,\n )\n\n # Tell the progress tracker we're done\n _update_tracker(\n status_tracker,\n preamble_count,\n indexf,\n indexl,\n firmware_len,\n unknown_count,\n True,\n True,\n )\n\n # CW should now be rebooting. It will be in upgrade mode for a few more seconds and send a bunch of \"c\"\n # Let's wait until it is over.\n char_count = 0\n err_count = 0\n while char_count < 1000:\n msg = self.serial.read()\n char_count += 1\n if msg == b\"\":\n # Timeout. Done with the upgrade-ready pattern.\n # restore RS-232 parameters and return to end the upload process\n self.serial.baudrate = baudrate\n self.serial.timeout = timeout\n return\n elif msg != b\"c\":\n err_count += 1\n\n # If the loop ended, CW is still in upgrade mode. This means the upgrade failed. Troubleshoot.\n raise CloudWatcherException(\"Upgrade failed - stuck in upgrade mode\")",
"def test_patch_hyperflex_server_firmware_version(self):\n pass",
"def update_device(self, dev_dict):\n # Note(jprabh1x): added bus,slot,function into fields dict as \n # seperate fields.\n no_changes = ('status', 'instance_uuid', 'id', 'extra_info', 'workload')\n map(lambda x: dev_dict.pop(x, None),\n [key for key in no_changes])\n\n # Note(jprabh1x): populating values for bus,slot,function from address in dev_dict.\n if dev_dict.has_key(\"address\"):\n \t\taddress = pci_utils.parse_address(dev_dict[\"address\"])\n \t\tdev_dict.update({'bus':str(address[1]), 'slot':str(address[2]), 'function':str(address[3])})\n for k, v in dev_dict.items():\n if k in self.fields.keys():\n self[k] = v\n else:\n extra_info = self.extra_info\n extra_info.update({k: str(v)})\n self.extra_info = extra_info",
"def _update_device_attributes_on_backend(self):\n if self.is_paired:\n LOG.info('Sending updated device attributes to the backend...')\n try:\n api = DeviceApi()\n api.update_version()\n except Exception:\n self._notify_backend_down()",
"def command_update_hw(self, cmd):\n # TODO\n pass",
"async def update_zigbee_firmware(hass: HomeAssistant, host: str, custom: bool):\n await async_process_requirements(hass, DOMAIN, ['xmodem==0.4.6'])\n\n try:\n async with shell.Session(host) as session:\n sh = await session.login()\n assert await sh.run_zigbee_flash()\n except Exception as e:\n _LOGGER.error(\"Can't update zigbee firmware\", exc_info=e)\n return False\n\n await asyncio.sleep(.5)\n\n args = [\n host, [8115, 8038], NCP_URL % 'mgl03_ncp_6_7_10_b38400_sw.gbl',\n 'v6.7.10', 8038\n ] if custom else [\n host, [8115, 8038], NCP_URL % 'ncp-uart-sw_mgl03_6_6_2_stock.gbl',\n 'v6.6.2', 8115\n ]\n\n for _ in range(3):\n if await hass.async_add_executor_job(flash_zigbee_firmware, *args):\n return True\n return False",
"def doFirmwareUpgrade(self, serial, unitId, fwFile):\n \n b = self.getBridge(serial)\n \n if unitId != 0:\n # We are going to upgrade the motes\n b.upgradeThread = SkymoteFirmwareUpgraderThread(b, fwFile, upgradeMotes = True, recovery = False)\n else:\n # We are going to upgrade the bridge\n b.upgradeThread = SkymoteFirmwareUpgraderThread(b, fwFile, upgradeMotes = False, recovery = False)\n \n b.upgradeThread.start()\n \n return True",
"def with_firmware_update(self, firmware_handler: FirmwareHandler): # type: ignore\n self.logger.debug(f\"Firmware handler: {firmware_handler}\")\n if self.file_management is None:\n raise RuntimeError(\n \"File management must be enabled before firmware update\"\n )\n self.firmware_update = OSFirmwareUpdate(\n firmware_handler, self._on_firmware_update_status\n )\n\n return self",
"def bdev_nvme_apply_firmware(client, bdev_name, filename):\n params = {\n 'filename': filename,\n 'bdev_name': bdev_name,\n }\n return client.call('bdev_nvme_apply_firmware', params)",
"def fusion_api_get_server_hardware_firmware_compliance(self, body, api=None, headers=None):\n return self.sh.post(body=body, param='/firmware-compliance', api=api, headers=headers)",
"def write_firmware(self, data):\n data = list(map(int, data))\n\n return self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_WRITE_FIRMWARE, (data,), '64B', 'B')",
"def write_firmware(self, data):\n self.check_validity()\n\n data = list(map(int, data))\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_WRITE_FIRMWARE, (data,), '64B', 9, 'B')",
"def install_firmware(self, firmware_file_path: str) -> None:\n raise NotImplementedError()",
"def sync_firmware(self):\n serial_no = self.serial_number\n\n if self.firmware_newer():\n # The J-Link's firmware is newer than the one compatible with the\n # DLL (though there are promises of backwards compatibility), so\n # perform a downgrade.\n try:\n # This may throw an exception on older versions of the J-Link\n # software due to the software timing out after a firmware\n # upgrade.\n self.invalidate_firmware()\n self.update_firmware()\n except errors.JLinkException as e:\n pass\n\n res = self.open(serial_no=serial_no)\n\n if self.firmware_newer():\n raise errors.JLinkException('Failed to sync firmware version.')\n\n return res\n\n elif self.firmware_outdated():\n # The J-Link's firmware is older than the one compatible with the\n # DLL, so perform a firmware upgrade.\n try:\n # This may throw an exception on older versions of the J-Link\n # software due to the software timing out after a firmware\n # upgrade.\n self.update_firmware()\n except errors.JLinkException as e:\n pass\n\n if self.firmware_outdated():\n raise errors.JLinkException('Failed to sync firmware version.')\n\n return self.open(serial_no=serial_no)\n\n return None",
"def update(self):\n try:\n self._device.update()\n except requests.exceptions.HTTPError as ex:\n _LOGGER.warning(\"Fritzhome connection error: %s\", ex)\n self._fritz.login()",
"def pack_firmware(self, work_dir, jobclient, version_string=\"\"):\n raise NotImplementedError(\"Abstract method not implemented\")",
"def install_firmware(self, pbz_path, recovery=False):\n\n\t\tresources = None\n\t\twith zipfile.ZipFile(pbz_path) as pbz:\n\t\t\tbinary = pbz.read(\"tintin_fw.bin\")\n\t\t\tif not recovery:\n\t\t\t\tresources = pbz.read(\"system_resources.pbpack\")\n\n\t\tself.system_message(\"FIRMWARE_START\")\n\t\ttime.sleep(2)\n\n\t\tif resources:\n\t\t\tclient = PutBytesClient(self, 0, \"SYS_RESOURCES\", resources)\n\t\t\tself.register_endpoint(\"PUTBYTES\", client.handle_message)\n\t\t\tclient.init()\n\t\t\twhile not client._done and not client._error:\n\t\t\t\tpass\n\t\t\tif client._error:\n\t\t\t\traise PebbleError(self.id, \"Failed to send firmware resources %s/system_resources.pbpack\" % pbz_path)\n\n\n\t\tclient = PutBytesClient(self, 0, \"RECOVERY\" if recovery else \"FIRMWARE\", binary)\n\t\tself.register_endpoint(\"PUTBYTES\", client.handle_message)\n\t\tclient.init()\n\t\twhile not client._done and not client._error:\n\t\t\tpass\n\t\tif client._error:\n\t\t\traise PebbleError(self.id, \"Failed to send firmware binary %s/tintin_fw.bin\" % pbz_path)\n\n\t\tself.system_message(\"FIRMWARE_COMPLETE\")",
"def update_device(device_id):\n netAdminToolDB = app.config['DATABASE']\n device = netAdminToolDB.get_device(device_id)\n #print(f'update_device request = {request.get_data()}')\n if device == None:\n return jsonify({'error': 'Device_id not found'}), 404\n\n input = request.get_json()\n\n if input == None:\n return jsonify({'error': 'Invalid PUT request'}), 400\n\n # Get update values from device for supported keys with value None\n if 'sw_version' in input and input['sw_version'] == None:\n # If device credentials were provided\n if 'device_username' and 'device_password' in input:\n input['sw_version'] = get_version_from_device(device,\n input['device_username'], input['device_password'])\n if input['sw_version'] == None:\n return jsonify({'error': 'Unable to retrieve sw_version from device.'}), 404\n # Device credentials not provided, return error\n else:\n return jsonify({'error': 'Updates from device require credentials.'}), 400\n\n if 'serial_number' in input and input['serial_number'] == None:\n # If device credentials were provided\n if 'device_username' and 'device_password' in input:\n input['serial_number'] = get_serial_from_device(device,\n input['device_username'], input['device_password'])\n if input['serial_number'] == None:\n return jsonify({'error': 'Unable to retrieve serial_number from device.'}), 404\n # Device credentials not provided, return error\n else:\n return jsonify({'error': 'Updates from device require credentials.'}), 400\n\n # Send input directly to update_device function, which checks each key.\n netAdminToolDB.update_device(device_id, **input)\n device = netAdminToolDB.get_device(device_id)\n deviceDict = dict(device)\n uri = url_for('get_device',device_id=device.id,_external=True)\n deviceDict['uri'] = uri\n\n return jsonify({'device': deviceDict}), 200",
"def update_server_profile_firmware(*profile_obj):\n logger._log_to_console_and_log_file(\"Update firmware for Server Profiles\")\n\n if isinstance(profile_obj, test_data.DataObj):\n profile_obj = [profile_obj]\n elif isinstance(profile_obj, tuple):\n profile_obj = list(profile_obj[0])\n\n for profile in profile_obj:\n selenium2lib = ui_lib.get_s2l()\n if not selenium2lib._is_element_present(FusionServerProfilesPage.ID_PAGE_LABEL):\n navigate()\n profile_list = [el.text for el in selenium2lib._element_find(FusionServerProfilesPage.ID_PROFILE_LIST_NAMES, False, False)]\n if profile.name not in profile_list:\n logger._warn(\"Profile '%s' does not exist\" % profile.name)\n continue\n # Select & Edit Server Profile\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_ELEMENT_PROFILE_NAME_BASE % profile.name)\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_MENU_MAIN_ACTION)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_MENU_MAIN_ACTION)\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_MENU_ACTION_EDIT)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_MENU_ACTION_EDIT)\n\n # Adding firmware baseline\n if profile.has_property(\"manageFirmware\") and profile.manageFirmware == \"true\":\n logger._log_to_console_and_log_file(\"Selecting firmware baseline..\")\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_DROPDOWN_BTN_FIRMWARE_BASELINE)\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_COMBO_FIRMWARE_BASELINE_LIST % profile.spp)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_COMBO_FIRMWARE_BASELINE_LIST % profile.spp)\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_DROPDOWN_FIRMWARE_BASELINE)\n selectedFW = selenium2lib.get_text(FusionServerProfilesPage.ID_DROPDOWN_FIRMWARE_BASELINE)\n logger._log_to_console_and_log_file(\"Selected firmware is %s \" % selectedFW)\n if not selectedFW == profile.spp:\n logger._warn(\"Failed to select preferred firmware bundle..'\" + profile.spp + \"' at the edit page\")\n continue\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_BTN_CONFIRM_UPDATE_FIRMWARE, PerfConstants.PROFILE_ACTIVITY)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_BTN_CONFIRM_UPDATE_FIRMWARE)\n if not ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_MAIN_PAGE, PerfConstants.PROFILE_ACTIVITY):\n if ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_ERROR_POPUP, PerfConstants.DEFAULT_SYNC_TIME):\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_ERROR_MSG, PerfConstants.DEFAULT_SYNC_TIME)\n error_msg = selenium2lib.get_text(FusionServerProfilesPage.ID_ERROR_MSG)\n logger._warn(\"Selected Bay: '\" + profile.name + \"' has encountered an error with the message : '\" + error_msg + \"' , may be the hardware is being managed by another system\")\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_BTN_CANCEL_UPDATE_FIRMWARE)\n logger._log_to_console_and_log_file(\"Firmware Update canceled\")\n continue\n if ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_STATUS_CHANGING, PerfConstants.PROFILE_ACTIVITY):\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_MAIN_PAGE)\n ui_lib.wait_for_element_visible(FusionDashboardPage.ID_LINK_ACTIVITY, PerfConstants.ACTIVITY)\n ui_lib.wait_for_element_and_click(FusionDashboardPage.ID_LINK_ACTIVITY)\n if ui_lib.wait_for_element(FusionServerProfilesPage.ID_NEW_ACTIVITY_PROGRESS % profile.name, PerfConstants.FIRMWARE_VALIDATION):\n start_time = selenium2lib.get_text(FusionServerProfilesPage.ID_NEW_ACTIVITY_TIMESTAMP % profile.name)\n logger._log_to_console_and_log_file(start_time)\n logger._log_to_console_and_log_file(\"Update Server Profile Firmware %s started......... \" % profile.name)\n if ui_lib.wait_for_element(FusionServerProfilesPage.ID_NEW_ACTIVITY_SUCCESS % (profile.name, start_time), PerfConstants.FIRMWARE_FAIL_PASS_VALIDATION):\n logger._log_to_console_and_log_file(\"Updating Server Profile Firmware %s done successfully\" % profile.name)\n elif ui_lib.wait_for_element(FusionServerProfilesPage.ID_NEW_ACTIVITY_ERROR % (profile.name, start_time), PerfConstants.FIRMWARE_ERROR_VALIDATION):\n logger._log_to_console_and_log_file(\"Update Server Profile Firmware %s done with errors\" % profile.name)\n else:\n logger._log_to_console_and_log_file(\"Update Server Profile Firmware %s done with warnings\" % profile.name)\n else:\n logger._log_to_console_and_log_file(\"Selected Bay: '\" + profile.name + \"' has already been updated with the firmware baseline : '\" + profile.spp + \"'\")\n continue\n else:\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_BTN_CANCEL_UPDATE_FIRMWARE)\n logger._log_to_console_and_log_file(\"Firmware Update canceled\")",
"def update(self):\n for component in self.components.values():\n try:\n component.update()\n except Exception as e:\n if self.ds.isFMSAttached():\n log.error(\"In subsystem %s: %s\" % (component, e))\n else:\n raise e",
"def updateDevice(self, serial: str, **kwargs):\n\n kwargs.update(locals())\n\n metadata = {\n 'tags': ['devices', 'configure'],\n 'operation': 'updateDevice'\n }\n resource = f'/devices/{serial}'\n\n body_params = ['name', 'tags', 'lat', 'lng', 'address', 'notes', 'moveMapMarker', 'switchProfileId', 'floorPlanId', ]\n payload = {k.strip(): v for k, v in kwargs.items() if k.strip() in body_params}\n action = {\n \"resource\": resource,\n \"operation\": \"update\",\n \"body\": payload\n }\n return action",
"async def update(self) -> None:\n # pause logic\n if not self.running.is_set():\n self.add_to_output(\"Paused...\")\n await self.running.wait()\n\n # tell the user we are updating\n self.add_to_output(f\"Updating...\")\n # create ssh connection to miner\n try:\n conn = await self.get_connection(\"root\", \"admin\")\n # tell the user we are sending the update file\n self.add_to_output(\"Sending upgrade file...\")\n # send the update file\n await self.send_file(UPDATE_FILE_S9, \"/tmp/firmware.tar\")\n # install the update and collect the result\n result = await conn.run(f'sysupgrade /tmp/firmware.tar')\n self.add_to_output(result.stdout.strip())\n # tell the user the update completed\n self.add_to_output(f\"Update completed...\")\n except OSError:\n self.add_to_output(f\"Unknown error...\")",
"def invalidate_firmware(self):\n self.exec_command('InvalidateFW')\n return None",
"def DoWriteFirmware(output, tools, fdt, flasher, file_list, image_fname,\n bundle, update=True, verify=False, dest=None,\n flash_dest=None, kernel=None, bootstub=None, servo='any',\n method='tegra'):\n write = WriteFirmware(tools, fdt, output, bundle)\n write.SelectServo(servo)\n write.update = update\n write.verify = verify\n if dest == 'usb':\n method = fdt.GetString('/chromeos-config', 'flash-method', method)\n if method == 'tegra':\n tools.CheckTool('tegrarcm')\n if flash_dest:\n write.text_base = bundle.CalcTextBase('flasher ', fdt, flasher)\n elif bootstub:\n write.text_base = bundle.CalcTextBase('bootstub ', fdt, bootstub)\n ok = write.NvidiaFlashImage(flash_dest, flasher, file_list['bct'],\n image_fname, bootstub)\n elif method == 'exynos':\n tools.CheckTool('lsusb', 'usbutils')\n tools.CheckTool('smdk-usbdl', 'smdk-dltool')\n ok = write.ExynosFlashImage(flash_dest, flasher,\n file_list['exynos-bl1'], file_list['exynos-bl2'], image_fname,\n kernel)\n else:\n raise CmdError(\"Unknown flash method '%s'\" % method)\n if ok:\n output.Progress('Image uploaded - please wait for flashing to '\n 'complete')\n else:\n raise CmdError('Image upload failed - please check board connection')\n elif dest == 'em100':\n # crosbug.com/31625\n tools.CheckTool('em100')\n write.Em100FlashImage(image_fname)\n elif dest.startswith('sd'):\n write.SendToSdCard(dest[2:], flash_dest, flasher, image_fname)\n else:\n raise CmdError(\"Unknown destination device '%s'\" % dest)",
"def fusion_api_get_server_hardware_firmware(self, uri, api=None, headers=None):\n return self.sh.get(uri=uri, api=api, headers=headers, param='/firmware')",
"def update_host(hostname, cpu_mhz, cpu_cores, ram):\n return update_host(hostname, cpu_mhz, cpu_cores, ram)",
"def element_cellular_modules_firmware(self, element_id, cellular_module_id, data, tenant_id=None, api_version=\"v2.0\"):\n\n if tenant_id is None and self._parent_class.tenant_id:\n # Pull tenant_id from parent namespace cache.\n tenant_id = self._parent_class.tenant_id\n elif not tenant_id:\n # No value for tenant_id.\n raise TypeError(\"tenant_id is required but not set or cached.\")\n cur_ctlr = self._parent_class.controller\n\n url = str(cur_ctlr) + \"/{}/api/tenants/{}/elements/{}/cellular_modules/{}/firmware\".format(api_version,\n tenant_id,\n element_id,\n cellular_module_id)\n\n api_logger.debug(\"URL = %s\", url)\n return self._parent_class.rest_call(url, \"put\", data=data)",
"def test_create_drives_drive_firmware_update_item(self):\n pass",
"def setup_sensors(self):\n super(EddRoach2ProductController, self).setup_sensors()\n self._firmware_server_sensor = Sensor.string(\n \"firmware-server\",\n description=\"The address of the firmware server started by this product\",\n default=\"\",\n initial_status=Sensor.UNKNOWN)\n self.add_sensor(self._firmware_server_sensor)\n self._parent.mass_inform(Message.inform('interface-changed'))",
"def test_list_drives_drive_firmware_update(self):\n pass",
"def fusion_api_edit_server_hardware_power_state(self, body, uri, api=None, headers=None):\n return self.sh.update(body, uri, api, headers, param='/powerState')",
"def get_firmware_version(self, component_name):\n raise NotImplementedError",
"def update(self):\n self.device = self._api.device_query(self._hardware_address, {})",
"def test_update_software_component_for_system_module(self):\n pass",
"def machine_cellular_modules_firmware(self, machine_id, cellular_module_id, data, tenant_id=None, api_version=\"v2.0\"):\n\n if tenant_id is None and self._parent_class.tenant_id:\n # Pull tenant_id from parent namespace cache.\n tenant_id = self._parent_class.tenant_id\n elif not tenant_id:\n # No value for tenant_id.\n raise TypeError(\"tenant_id is required but not set or cached.\")\n cur_ctlr = self._parent_class.controller\n\n url = str(cur_ctlr) + \"/{}/api/tenants/{}/machines/{}/cellular_modules/{}/firmware\".format(api_version,\n tenant_id,\n machine_id,\n cellular_module_id)\n\n api_logger.debug(\"URL = %s\", url)\n return self._parent_class.rest_call(url, \"put\", data=data)",
"def fusion_api_add_server_hardware(self, body, api=None, headers=None, param=''):\n return self.sh.post(body, api, headers, param)",
"def update(s_socket):\r\n dll = get_dll()\r\n bytes_value = to_bytes(len(dll) + 5, 4, 'little')\r\n s_socket.send('u' + bytes_value + dll)",
"def _get_firmware_update_service_resource(self):\n manager, uri = self._get_ilo_details()\n try:\n fw_uri = manager['Oem']['Hp']['links']['UpdateService']['href']\n except KeyError:\n msg = (\"Firmware Update Service resource not found.\")\n raise exception.IloCommandNotSupportedError(msg)\n return fw_uri",
"async def test_discovery_with_firmware_update(hass: HomeAssistant) -> None:\n with _patch_wizlight(\n device=None,\n extended_white_range=FAKE_EXTENDED_WHITE_RANGE,\n bulb_type=FAKE_RGBW_BULB,\n ):\n result = await hass.config_entries.flow.async_init(\n DOMAIN,\n context={\"source\": config_entries.SOURCE_INTEGRATION_DISCOVERY},\n data=INTEGRATION_DISCOVERY,\n )\n await hass.async_block_till_done()\n\n assert result[\"type\"] == FlowResultType.FORM\n assert result[\"step_id\"] == \"discovery_confirm\"\n\n # In between discovery and when the user clicks to set it up the firmware\n # updates and we now can see its really RGBWW not RGBW since the older\n # firmwares did not tell us how many white channels exist\n\n with patch(\n \"homeassistant.components.wiz.async_setup_entry\",\n return_value=True,\n ) as mock_setup_entry, patch(\n \"homeassistant.components.wiz.async_setup\", return_value=True\n ) as mock_setup, _patch_wizlight(\n device=None,\n extended_white_range=FAKE_EXTENDED_WHITE_RANGE,\n bulb_type=FAKE_RGBWW_BULB,\n ):\n result2 = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"],\n {},\n )\n await hass.async_block_till_done()\n\n assert result2[\"type\"] == \"create_entry\"\n assert result2[\"title\"] == \"WiZ RGBWW Tunable ABCABC\"\n assert result2[\"data\"] == {\n CONF_HOST: \"1.1.1.1\",\n }\n assert len(mock_setup.mock_calls) == 1\n assert len(mock_setup_entry.mock_calls) == 1",
"def update_worker(fastboot_device, product, error_dict, debug=0):\n\n err = Error()\n if product == 'MSM8909_CARBON_E500':\n product = 'Carbon_CM5'\n\n if (product != 'Carbon_8') and (product != 'Carbon_10') and (product != 'Carbon_CM5'):\n err.set_fail('Unknown product, exiting...')\n logging.debug('Unknown product {}, exiting...'.format(product))\n raise IOError\n\n else:\n err.set_pass()\n\n file_path = os.getcwd()\n err, efi_bootloader, bootloader, flash_image, sequencer_xml, recovery_image, cache_image, boot_image, system_image\\\n = read_flash_image_filenames(file_path, product, debug=debug)\n\n if err.error_flag:\n error_dict[fastboot_device] = err\n # raise IOError\n exit() # Exit if required flash image is missing\n\n # Retrieve sequence of functions to execute\n err, sequencer_list = read_sequencer_xml_file(sequencer_xml)\n\n try:\n i = 0\n while i < len(sequencer_list) and err.error_flag is not True:\n if sequencer_list[i] == 'fastboot_reboot_bootloader':\n if debug == 1:\n logging.debug('Envoking fastboot_reboot_bootloader')\n write_to_datalog('Envoking fastboot_reboot_bootloader')\n err = fastboot_reboot_bootloader(fastboot_device, debug=debug)\n elif sequencer_list[i] == 'fastboot_reboot_to_idle':\n if debug == 1:\n logging.debug('Envoking fastboot_reboot_to_idle')\n write_to_datalog('Envoking fastboot_reboot_to_idle')\n err = fastboot_reboot_to_idle(fastboot_device, debug=debug)\n elif sequencer_list[i] == 'fastboot_flash_bootloader':\n if debug == 1:\n logging.debug('Envoking fastboot_flash_bootloader')\n write_to_datalog('Envoking fastboot_flash_bootloader')\n err = fastboot_flash_bootloader(fastboot_device, bootloader, debug=debug)\n elif sequencer_list[i] == 'get_fastboot_devices':\n if debug == 1:\n logging.debug('Envoking get_fastboot_devices')\n write_to_datalog('Envoking get_fastboot_devices')\n err = fastboot_reboot_bootloader(fastboot_device, debug=debug)\n elif sequencer_list[i] == 'fastboot_erase_userdata':\n if debug == 1:\n logging.debug('Envoking fastboot_erase_userdata')\n write_to_datalog('Envoking fastboot_erase_userdata')\n err = fastboot_erase_userdata(fastboot_device, debug=debug)\n elif sequencer_list[i] == 'fastboot_flash_recovery':\n if debug == 1:\n logging.debug('Envoking fastboot_flash_recovery')\n write_to_datalog('Envoking fastboot_flash_recovery')\n err = fastboot_flash_partition(fastboot_device, recovery_image, partition='recovery', debug=debug)\n elif sequencer_list[i] == 'fastboot_flash_cache':\n if debug == 1:\n logging.debug('Envoking fastboot_flash_cache')\n write_to_datalog('Envoking fastboot_flash_cache')\n err = fastboot_flash_partition(fastboot_device, cache_image, partition='cache', debug=debug)\n elif sequencer_list[i] == 'fastboot_flash_boot':\n if debug == 1:\n logging.debug('Envoking fastboot_flash_boot')\n write_to_datalog('Envoking fastboot_flash_boot')\n err = fastboot_flash_partition(fastboot_device, boot_image, partition='boot', debug=debug)\n elif sequencer_list[i] == 'fastboot_flash_system':\n if debug == 1:\n logging.debug('Envoking fastboot_flash_system')\n write_to_datalog('Envoking fastboot_flash_system')\n err = fastboot_flash_partition(fastboot_device, system_image, partition='system', debug=debug)\n elif sequencer_list[i] == 'fastboot_flash_update_aos_image':\n if debug == 1:\n logging.debug('Envoking fastboot_flash_update_aos_image')\n write_to_datalog('Envoking fastboot_flash_update_aos_image')\n err = fastboot_flash_update_aos_image(fastboot_device, flash_image, debug=debug)\n\n else:\n logging.debug('Unknown function call {}'.format(sequencer_list[i]))\n write_to_datalog('Unknown function call {}'.format(sequencer_list[i]))\n exit()\n\n if err.error_flag:\n raise IOError\n\n i += 1\n\n except IOError as e:\n logging.debug('Error message: {}'.format(err.error_string))\n logging.debug('Unexpected exception in the thread {} \\nExiting...\\n'.format(err.error_string))\n write_to_datalog('Unexpected exception in the thread {} \\nExiting...\\n'.format(err.error_string))\n error_dict[fastboot_device] = err\n write_to_datalog(error_dict)",
"def create_test_firmware_component(**kw):\n fw_cmp_values = get_test_firmware_component(**kw)\n if 'id' not in kw:\n del fw_cmp_values['id']\n dbapi = db_api.get_instance()\n return dbapi.create_firmware_component(fw_cmp_values)",
"def update_mac_processor(interface, mac_profile):\n pass",
"def update(self):\n self.device.update()",
"def update(self):\n self.device.update()",
"def fusion_api_edit_power_device(self, body, uri, api=None, headers=None):\n return self.pd.update(body=body, uri=uri, api=api, headers=headers)",
"def software_api(self, install_params):\n try:\n self.sw = jnpr.junos.utils.sw.SW(self.dev)\n ok, msg_ret = self.sw.install(**install_params)\n if ok is not True:\n raise AnsibleError('Unable to install the software %s' % msg_ret)\n msg = 'Package %s successfully installed. Response from device is: %s' % (\n install_params.get('package') or\n install_params.get('pkg_set'),\n msg_ret)\n self.queue_message(\"log\", \"%s\" % msg)\n return msg\n except (self.pyez_exception.ConnectError,\n self.pyez_exception.RpcError) as ex:\n raise AnsibleError('Installation failed. Error: %s' % str(ex))",
"def update_battery(self, battery):\n headers = {'Content-type': 'application/json'}\n resp = requests.put(f\"http://localhost:5000/api/hydrometers/{self.hydrometer['id']}/battery\", data=json.dumps(battery), headers=headers)\n if resp.status_code == 200:\n print(f\"Battery reading {battery} - sucessfully updated\")\n else:\n print(f\"Battery reading {battery} - error updating\")",
"def update(self):\n try:\n if not self._sysinfo:\n self._sysinfo = self.smartplug.sys_info\n self._mac = self.smartplug.mac\n self._model = self.smartplug.model\n if self.smartplug.context is None:\n self._alias = self.smartplug.alias\n self._device_id = self._mac\n else:\n self._alias = self._plug_from_context[\"alias\"]\n self._device_id = self.smartplug.context\n\n if self.smartplug.context is None:\n self._state = self.smartplug.state == self.smartplug.SWITCH_STATE_ON\n else:\n self._state = self._plug_from_context[\"state\"] == 1\n\n if self.smartplug.has_emeter:\n emeter_readings = self.smartplug.get_emeter_realtime()\n\n self._emeter_params[ATTR_CURRENT_POWER_W] = \"{:.2f}\".format(\n emeter_readings[\"power\"]\n )\n self._emeter_params[ATTR_TOTAL_ENERGY_KWH] = \"{:.3f}\".format(\n emeter_readings[\"total\"]\n )\n self._emeter_params[ATTR_VOLTAGE] = \"{:.1f}\".format(\n emeter_readings[\"voltage\"]\n )\n self._emeter_params[ATTR_CURRENT_A] = \"{:.2f}\".format(\n emeter_readings[\"current\"]\n )\n\n emeter_statics = self.smartplug.get_emeter_daily()\n try:\n self._emeter_params[ATTR_TODAY_ENERGY_KWH] = \"{:.3f}\".format(\n emeter_statics[int(time.strftime(\"%e\"))]\n )\n except KeyError:\n # Device returned no daily history\n pass\n\n self._available = True\n\n except (SmartDeviceException, OSError) as ex:\n if self._available:\n _LOGGER.warning(\n \"Could not read state for %s: %s\", self.smartplug.host, ex\n )\n self._available = False",
"def update_device(device):\n payload = request.get_json()\n if ('name' in payload) and (payload['name'] != device):\n raise BadRequest(\n 'Device name does not match between URL and JSON payload')\n try:\n properties = devices.show(device)\n for k in payload:\n properties[k] = payload[k]\n except KeyDoesNotExist:\n properties = payload\n return _register_device(properties)",
"def update(self, dev_id, update):\n headers = self.get_headers()\n url = self.url + '/device/' + str(dev_id)\n rv = self.api_request('POST', url, update, headers)\n if rv is None:\n raise Exception('Failed to update device schema')\n return rv",
"def update(self, now=None):\n if self.protocol == \"udp\":\n # UDP only have 1 way attribute\n result = self._run_test(ATTR_DOWNLOAD)\n self.data[ATTR_DOWNLOAD] = self.data[ATTR_UPLOAD] = getattr(\n result, \"Mbps\", None\n )\n self.data[ATTR_VERSION] = getattr(result, \"version\", None)\n else:\n result = self._run_test(ATTR_DOWNLOAD)\n self.data[ATTR_DOWNLOAD] = getattr(result, \"received_Mbps\", None)\n self.data[ATTR_VERSION] = getattr(result, \"version\", None)\n self.data[ATTR_UPLOAD] = getattr(\n self._run_test(ATTR_UPLOAD), \"sent_Mbps\", None\n )\n\n dispatcher_send(self._hass, DATA_UPDATED, self.host)",
"async def _report_firmware(self, sysex_data):\n # first byte after command is major number\n major = sysex_data[1]\n version_string = str(major)\n\n # next byte is minor number\n minor = sysex_data[2]\n\n # append a dot to major number\n version_string += '.'\n\n # append minor number\n version_string += str(minor)\n # add a space after the major and minor numbers\n version_string += ' '\n\n # slice the identifier - from the first byte after the minor\n # number up until, but not including the END_SYSEX byte\n\n name = sysex_data[3:-1]\n\n # convert the identifier to printable text and add each character\n # to the version string\n for e in name:\n version_string += chr(e)\n\n # store the value\n self.query_reply_data[PrivateConstants.REPORT_FIRMWARE] = version_string",
"def test_update_software_components_for_system_module(self):\n pass",
"def test_update_device(self):\n pass",
"def test_update_device(self):\n pass",
"def flash_me(port, firmware_file, firmware_url, flash, flash_mode,\n common_files, code_dir):\n pprint.pprint(vars())\n firmware_path = Path(firmware_file)\n run(['ampy', '--version'])\n if flash:\n if not firmware_path.exists():\n # Download firmware\n response = requests.get(firmware_url)\n response.raise_for_status()\n firmware_path.write_bytes(response.content)\n run(['esptool.py',\n '--port', str(port),\n 'erase_flash'])\n run(['esptool.py',\n '--port', str(port),\n '--baud', str(SPEED),\n 'write_flash',\n '--flash_size=detect',\n '--flash_mode=' + flash_mode,\n '0',\n firmware_file])\n click.secho('== Reset board now; press enter ==', fg='yellow')\n click.pause()\n\n def upload(filepath):\n click.secho('Uploading ' + str(filepath))\n run(['ampy',\n '--port', port,\n 'put', str(filepath), str(filepath.name)])\n\n directories = list(code_dir)\n if common_files:\n directories.insert(0, BASE_PATH / 'common')\n\n config_path = BASE_PATH / 'config.py'\n if config_path:\n upload(config_path)\n\n for directory in directories:\n path = BASE_PATH / directory\n for filepath in path.glob('*.py'):\n upload(filepath)",
"def update_device(cls, params, device_id, token):\n try:\n content_type = params.get('content_type')\n data_request = params.get('data')\n\n device_data, json_payload = parse_payload(content_type, data_request, device_schema)\n validate_repeated_attrs(json_payload)\n\n tenant = init_tenant_context(token, db)\n old_orm_device = assert_device_exists(device_id)\n db.session.delete(old_orm_device)\n db.session.flush()\n\n # handled separately by parse_template_list\n device_data.pop('templates')\n updated_orm_device = Device(**device_data)\n parse_template_list(json_payload.get('templates', []), updated_orm_device)\n auto_create_template(json_payload, updated_orm_device)\n updated_orm_device.id = device_id\n updated_orm_device.updated = datetime.now()\n updated_orm_device.created = old_orm_device.created\n\n db.session.add(updated_orm_device)\n\n db.session.commit()\n except IntegrityError as error:\n handle_consistency_exception(error)\n except ValidationError as error:\n raise HTTPRequestError(400, error.messages)\n\n full_device = serialize_full_device(updated_orm_device, tenant)\n\n kafka_handler_instance = cls.kafka.getInstance(cls.kafka.kafkaNotifier)\n kafka_handler_instance.update(full_device, meta={\"service\": tenant})\n\n result = {\n 'message': 'device updated',\n 'device': serialize_full_device(updated_orm_device, tenant)\n }\n return result",
"def async_update_device(self) -> None:",
"async def _report_firmware(self, sysex_data):\n # first byte after command is major number\n firmware_report_iterator = iter(sysex_data)\n\n major = sysex_data[1]\n version_string = str(major)\n\n # next byte is minor number\n minor = sysex_data[2]\n\n # append a dot to major number\n version_string += '.'\n\n # append minor number\n version_string += str(minor)\n # add a space after the major and minor numbers\n version_string += ' '\n\n # slice the identifier - from the first byte after the minor\n # number up until, but not including the END_SYSEX byte\n\n name = sysex_data[3:-1]\n firmware_name_iterator = iter(name)\n # convert the identifier to printable text and add each character\n # to the version string\n for e in firmware_name_iterator:\n version_string += chr(e + (next(firmware_name_iterator) << 7))\n\n # store the value\n self.query_reply_data[PrivateConstants.REPORT_FIRMWARE] = version_string",
"def update(device_id, **params):\n params = _clean_salt_variables(params)\n\n api_response = requests.put(\n \"https://api.serverdensity.io/inventory/devices/\" + device_id,\n params={\"token\": get_sd_auth(\"api_token\")},\n data=params,\n )\n log.debug(\"Server Density API Response: %s\", api_response)\n log.debug(\"Server Density API Response content: %s\", api_response.content)\n if api_response.status_code == 200:\n try:\n return salt.utils.json.loads(api_response.content)\n except ValueError:\n log.error(\n \"Could not parse Server Density API Response content: %s\",\n api_response.content,\n )\n raise CommandExecutionError(\n \"Failed to create, API Response: {}\".format(api_response)\n )\n else:\n return None",
"def handle_device_parameter_update(self, device: SimplePeriphDev, parameter, value):\r\n update_data = {\r\n 'devices': [{\r\n 'name': device.description['name'],\r\n 'parameters': {\r\n parameter: value\r\n }\r\n }],\r\n 'controller_config': {}\r\n }\r\n self.handle_updates(update_data)\r\n self.update_callback(update_data)",
"def firmware_version(self):\n return self._get_system_status()[\"firmware\"]",
"def report_firmware(self, data):\n self.firmata_firmware.append(data[0]) # add major\n self.firmata_firmware.append(data[1]) # add minor\n\n # extract the file name string from the message\n # file name is in bytes 2 to the end\n name_data = data[2:]\n\n # constructed file name\n file_name = []\n\n # the file name is passed in with each character as 2 bytes, the high order byte is equal to 0\n # so skip over these zero bytes\n for i in name_data[::2]:\n file_name.append(chr(i))\n\n # add filename to tuple\n self.firmata_firmware.append(\"\".join(file_name))",
"def _on_firmware_update_status(self, status: FirmwareUpdateStatus) -> None:\n message = self.message_factory.make_from_firmware_update_status(status)\n if self.connectivity_service.is_connected():\n if not self.connectivity_service.publish(message):\n self.message_queue.put(message)\n else:\n self.message_queue.put(message)\n\n if (\n status.status == FirmwareUpdateStatusType.SUCCESS\n and self.firmware_update\n ):\n version = self.firmware_update.get_current_version()\n self.parameters.update({\"FIRMWARE_VERSION\": version})\n if self.connectivity_service.is_connected():\n message = self.message_factory.make_from_parameters(\n self.parameters\n )\n if not self.connectivity_service.publish(message):\n self.message_queue.put(message)",
"def fusion_api_create_firmware_bundle(self, body, api=None, headers=None):\n return self.driver.post(body, api, headers)",
"def update(self):\n\t\tfor x in range(self.leds):\n\t\t\tself.spi.write(self.buffer[x])\n\t\t\t#self.spi.flush()\n\t\t\t\n\t\tself.spi.write(bytearray(b'\\x00'))\n\t\tself.spi.flush()",
"async def async_device_control_fn(api: aiounifi.Controller, obj_id: str) -> None:\n await api.request(DeviceUpgradeRequest.create(obj_id))",
"def get(self, request):\n verify_secure(request)\n if not request.user.profile.is_device:\n raise rest_exceptions.NotAcceptable('Only for devices')\n assignment = None\n try:\n assignment = request.user.firmware\n except local_models.FirmwareAssignmentModel.DoesNotExist:\n raise rest_exceptions.NotFound({'firmware': 'Not available'})\n if assignment.value.hardware == local_models.FirmwareModel.HW_ESP8266_4MB:\n return esp8266.update(request, assignment)\n else:\n raise rest_exceptions.NotAcceptable('The [{:s}] is not supported'.format(assignment.value.hardware))",
"def fusion_api_upload_appliance_firmware(self, localfile, api=None, headers=None):\n return self.appfirmware.upload(localfile, api, headers)",
"def update_device(cls, device_uuid, values):\n return cls.dbdriver.update_device(device_uuid, values)",
"def _update(self, device=None):\n self._attr_available = True\n self.schedule_update_ha_state(True)",
"def _GetWhitelistVersion(self, component):\n firmware_info = os.path.join(pyauto.PyUITest.DataDir(),\n 'pyauto_private/chromeos/',\n 'chromeos_firmware_info.txt')\n assert os.path.exists(firmware_info), 'Data file does not exist.'\n return self.EvalDataFrom(firmware_info)[self.ChromeOSBoard()][component]",
"def install_firmware(self, image_path):\n \"\"\"Not Implement\"\"\"\n return False",
"def _edit_server_hardware(*profile_obj):\n selenium2lib = ui_lib.get_s2l()\n\n if isinstance(profile_obj, test_data.DataObj):\n profile_obj = [profile_obj]\n elif isinstance(profile_obj, tuple):\n profile_obj = list(profile_obj[0])\n\n for profile in profile_obj:\n if not selenium2lib._is_element_present(FusionServerHardwarePage.ID_PAGE_LABEL):\n base_page.navigate_base(FusionServerHardwarePage.ID_PAGE_LABEL,\n FusionUIBaseElements.ID_MENU_LINK_SERVER_HARDWARE, \"css=span.hp-page-item-count\")\n if not serverhardware.power_off_server_by_name(profile.server):\n logger._warn(\"Failed to powerOff the server %s\" % profile.server)\n logger._warn(\"Can't proceed with server profile creation on server %s\" % profile.server)\n continue\n # Navigating to Server profile page\n if not selenium2lib._is_element_present(FusionServerProfilesPage.ID_PAGE_LABEL):\n ui_lib.wait_for_element(FusionUIBaseElements.ID_MAIN_MENU_CONTROL, PerfConstants.DEFAULT_SYNC_TIME)\n navigate()\n\n profile_list = [el.text for el in selenium2lib._element_find(FusionServerProfilesPage.ID_PROFILE_LIST_NAMES, False, False)]\n if profile.profilename not in profile_list:\n logger._warn(\"Profile '%s' does not exist\" % profile.profilename)\n continue\n if profile.server == \"\":\n logger._warn(\"Mandatory fields to edit server hardware can't be empty\")\n continue\n\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_ELEMENT_PROFILE_NAME_BASE % profile.profilename)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_MENU_MAIN_ACTION)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_MENU_ACTION_EDIT)\n\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_EDIT_DROPDOWN_SEARCH_SERVER_HARDWARE)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_EDIT_SEARCH_HARDWARE)\n if profile.unassign == \"unassigned\":\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_ELEMENT_NETWORK_NAME_BASE % profile.unassign)\n logger._log_to_console_and_log_file(\"Unassigning the server profile\")\n else:\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_ELEMENT_NETWORK_NAME_BASE % profile.server)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_UPDATE_SERVER_PROFILE)\n\n if ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_PROFILE_OFF_ERROR):\n logger._log_to_console_and_log_file(\"Server is not powered off, and switching off now\")\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_PROFILE_OFF_ERROR)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_BTN_POWER_PRESS_AND_HOLD)\n ui_lib.wait_for_element(FusionServerProfilesPage.ID_SERVER_POWER_OFF_VALIDATE, PerfConstants.SERVER_POWER_OFF)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_EDIT_DROPDOWN_SEARCH_SERVER_HARDWARE)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_EDIT_SEARCH_HARDWARE)\n if profile.unassign == \"unassigned\":\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_ELEMENT_NETWORK_NAME_BASE % profile.unassign)\n logger._log_to_console_and_log_file(\"Unassigning the server profile\")\n else:\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_ELEMENT_NETWORK_NAME_BASE % profile.server)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_UPDATE_SERVER_PROFILE)\n\n if ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_PROFILE_OFF_ERROR):\n logger._warn(\"Failed to power off the server %s\" % profile.server)\n else:\n logger._log_to_console_and_log_file(\"Successfully server %s is powered off\" % profile.server)\n\n ui_lib.wait_for_element(FusionServerProfilesPage.ID_ADD_PROFILE_NOTIFICATION)\n # New Code\n if ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_ADD_PROFILE_NOTIFICATION):\n errMsg = selenium2lib._get_text(FusionServerProfilesPage.ID_ADD_PROFILE_NOTIFICATION_CONTENT)\n logger._warn(errMsg)\n logger._warn(\"Unable to edit profile server hardware %s\" % profile.profilename)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_BTN_CANCEL_SERVER_PROFILE)\n continue\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_UPDATE_PROFILE_TIMESTAMP)\n strTimeStamp = selenium2lib._get_text(FusionServerProfilesPage.ID_UPDATE_PROFILE_TIMESTAMP)\n logger._log_to_console_and_log_file(strTimeStamp)\n\n # Verify profile server hardware updation status in server profile page (Under Activity tab)\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_LINK_OVERVIEW)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_LINK_OVERVIEW)\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_LINK_ACTIVITY)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_LINK_ACTIVITY)\n\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_PROFILE_CREATION_STATUS % (\"Update\", strTimeStamp), PerfConstants.CREATE_SERVER_PROFILE_TIME)\n\n if selenium2lib._is_element_present(FusionServerProfilesPage.ID_PROFILE_CREATION_STATUS % (\"Update\", strTimeStamp)):\n logger._log_to_console_and_log_file(\"Server profile '%s' is edited successfully\" % profile.profilename)\n else:\n logger._warn(\"Failed to edit server profile '%s' hardware\" % profile.profilename)",
"def update_connected_device(service):\n\n update_obj = service.data.get('value')\n\n connected_devices = hass.states.get('connected_devices.connected_devices').as_dict()\n \n attributes = connected_devices[\"attributes\"]\n\n for obj in update_obj:\n # _LOGGER.info(\"update value: %s\", obj[\"value\"])\n # _LOGGER.info(\"target: %s\", obj[\"target\"])\n # _LOGGER.info(\"previous value: %s\", attributes[\"devices\"][obj[\"device\"]][obj[\"target\"]])\n\n attributes[\"devices\"][obj[\"device\"]][obj[\"target\"]] = obj[\"value\"]\n # _LOGGER.info(\"after update: %s\", attributes[\"devices\"][obj[\"device\"]][obj[\"target\"]])\n \n connected_devices[\"attributes\"] = attributes\n\n hass.states.set('connected_devices.connected_devices', 'On', attributes, True)",
"def update(self):\n start = time.time()\n device_data = self._client.get_device_attributes(self._id,\n UPDATE_ATTRIBUTES)\n end = time.time()\n elapsed = round(end - start, 3)\n _LOGGER.debug(\"Updating %s (%s sec): %s\",\n self._name, elapsed, device_data)\n if \"error\" not in device_data:\n if \"errorCode\" not in device_data:\n self._brightness_pct = device_data[ATTR_INTENSITY] if \\\n device_data[ATTR_INTENSITY] is not None else 0.0\n self._operation_mode = device_data[ATTR_POWER_MODE] if \\\n device_data[ATTR_POWER_MODE] is not None else MODE_MANUAL\n self._rssi = device_data[ATTR_RSSI]\n self._wattage_override = device_data[ATTR_WATTAGE_OVERRIDE]\n self._occupancy = device_data[ATTR_OCCUPANCY]\n return\n else:\n if device_data[\"errorCode\"] == \"ReadTimeout\":\n _LOGGER.warning(\"Error in reading device %s: (%s), too slow to respond or busy.\", self._name, device_data)\n else:\n _LOGGER.warning(\"Unknown errorCode, device: %s, error: %s\", self._name, device_data)\n return\n else:\n if device_data[\"error\"][\"code\"] == \"DVCCOMMTO\": \n _LOGGER.warning(\"Cannot update %s: %s. Device is busy or does not respond quickly enough.\", self._name, device_data)\n elif device_data[\"error\"][\"code\"] == \"SVCINVREQ\":\n _LOGGER.warning(\"Invalid or malformed request to Neviweb, %s:\", device_data)\n elif device_data[\"error\"][\"code\"] == \"DVCACTNSPTD\":\n _LOGGER.warning(\"Device action not supported, %s:\", device_data)\n elif device_data[\"error\"][\"code\"] == \"DVCUNVLB\":\n _LOGGER.warning(\"Device %s unavailable, Neviweb maintnance update, %s:\", self._name, device_data)\n elif device_data[\"error\"][\"code\"] == \"SVCERR\":\n _LOGGER.warning(\"Device %s statistics unavailables, %s:\", self._name, device_data)\n else:\n _LOGGER.warning(\"Unknown error, device: %s, error: %s\", self._name, device_data)",
"def update(self):\n if self._skip_update:\n self._skip_update = False\n return\n\n try:\n for prop in AIRER_PROPS:\n self.status[prop] = self.send('get_prop', [prop])[0]\n _LOGGER.debug(\"MiioDevice update: %s\", self.status)\n self.available = True\n self._retry = 0\n except Exception as exc:\n _LOGGER.error(\"Error on update: %s\", exc)\n self._retry += 1\n if self._retry > 3:\n self.available = False\n\n for entity in self.update_entities:\n entity.async_schedule_update_ha_state()",
"def update(self, data: bytes):\n self.send(data)",
"def firmware(self) -> str:\n return self._device_info[\"Firmware\"]",
"def Update(self, controller):\n pass",
"def update(self):\n self._device.update()",
"def fusion_api_get_appliance_firmware_upgrade_status(self, api=None, headers=None):\n param = '/notification'\n return self.appfirmware.get(api=api, headers=headers, param=param)",
"def on_lz_hardware_update(self, func):\n self._set_event_handler(\"lz\")\n self._events.on_lz_hardware_update(func)"
] | [
"0.74276894",
"0.741315",
"0.714987",
"0.69797605",
"0.6916566",
"0.6524832",
"0.651543",
"0.64290446",
"0.6362683",
"0.63187355",
"0.6297764",
"0.6142919",
"0.6138458",
"0.6109476",
"0.60932076",
"0.59088314",
"0.58943313",
"0.58743894",
"0.58460486",
"0.5842201",
"0.57846546",
"0.57419276",
"0.5687361",
"0.56826895",
"0.56416935",
"0.563446",
"0.5623241",
"0.5613606",
"0.5596295",
"0.5578245",
"0.5553061",
"0.55494237",
"0.55413395",
"0.5490244",
"0.5439425",
"0.5431478",
"0.5421727",
"0.540933",
"0.5401416",
"0.53879225",
"0.5384352",
"0.53843224",
"0.53129864",
"0.5312128",
"0.52842975",
"0.52652967",
"0.5258465",
"0.52446467",
"0.52359676",
"0.5224113",
"0.52195597",
"0.5213739",
"0.5202536",
"0.5197716",
"0.51449317",
"0.51094717",
"0.50943524",
"0.50917584",
"0.5088718",
"0.5084709",
"0.5084709",
"0.50786066",
"0.5075856",
"0.50708205",
"0.50662625",
"0.50565714",
"0.50495857",
"0.50479156",
"0.50393033",
"0.5034956",
"0.5027503",
"0.5027503",
"0.5026333",
"0.5021295",
"0.5021003",
"0.5014688",
"0.50096625",
"0.50053406",
"0.4997821",
"0.49974966",
"0.4986075",
"0.4978264",
"0.49638015",
"0.4960285",
"0.495923",
"0.4952477",
"0.4949534",
"0.49328476",
"0.49317122",
"0.49247777",
"0.49211895",
"0.49005604",
"0.48954338",
"0.48951235",
"0.48905936",
"0.48723593",
"0.48568457",
"0.4856229",
"0.48468387",
"0.48453987"
] | 0.7226055 | 2 |
Get the progress of the firmware update. | def get_firmware_update_progress(self):
try:
fw_update_uri = self._get_firmware_update_service_resource()
except exception.IloError as e:
LOG.debug(self._('Progress of firmware update not known: %s'),
str(e))
return "UNKNOWN", "UNKNOWN"
# perform the GET
status, headers, response = self._rest_get(fw_update_uri)
if status != 200:
msg = self._get_extended_error(response)
raise exception.IloError(msg)
fw_update_state = response.get('State')
fw_update_progress_percent = response.get('ProgressPercent')
LOG.debug(self._('Flashing firmware file ... in progress %d%%'),
fw_update_progress_percent)
return fw_update_state, fw_update_progress_percent | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_firmware_update_status(self):\n\n response = self.execute_command(CMD_GET_FIRMWARE_UPDATE_STATUS)[0]\n inprogress = (response & 0x80) == 0x80\n return {\n \"inprogress\": inprogress,\n \"error\": response & 0x7f,\n }",
"def GetProgress(self):\n return self.new_progress",
"def UpgradeProgress(self):\n if self.force_auto_sync:\n self.get('UpgradeProgress')\n return self._UpgradeProgress",
"def getProgress(self):",
"def progress(self):\n return self.runProgress",
"def progress(self):\n return self.runProgress",
"def progress(self):\n return self.progressValue",
"def get_progress(self):\n return self.cloudserver.progress",
"def progress(self):\n if self.dynamic:\n self._update_db_obj()\n return self._db_obj.progress",
"def progress(self) -> float:\n return self._progress",
"def progress(self) -> float:\n return self._progress",
"def progress(self) -> float:\n return self._progress",
"def progress(self) -> float:\n return self._progress",
"def getProgress(self):\n return self._progress",
"def get_progress(self):\n\t\treturn call_sdk_function('PrlJob_GetProgress', self.handle)",
"def progress(self):\n try:\n return 100.0 * (self.fields['sizeWhenDone'] - self.fields['leftUntilDone']) / float(self.fields['sizeWhenDone'])\n except ZeroDivisionError:\n return 0.0",
"def sound_install_progress(self):\n return SoundInstallStatus(self.send(\"get_sound_progress\")[0])",
"def percent_updated(self):\n return self.percent_complete - self.previous_percent_complete",
"def build_progress(self) -> Union[int, float]:\n return self.proto.build_progress",
"def get_progress(self):\n ret = self.state + \"\\n\"\n self.reset_progress()\n return ret",
"def status(self):\n\t\tstatus = self.thread.status()\n#\t\tprint_array(status)\n\t\tmessage = [\"------ RSYNC PROGRESS ------ \"]\n\t\tif self.log_message:\n\t\t\tmessage.append(self.log_message)\n\t\tmessage.append(\"Current file: %s\" % status['current_file'])\n\t\tmessage.append(\"\\tBytes Copied: %s\" % status['bytes_copied'])\n\t\tmessage.append(\"\\tPercent Done: %s\" % status['percent_done'])\n\t\tmessage.append(\"\\tTransfer Rate: %s\" % status['transfer_rate'])\n\t\tmessage.append(\"\\tTime Remaining: %s\" % status['est_remain'])\n\t\tmessage.append(\"\\tTransfer Number: %s\" % status['xfer_num'])\n\t\tmessage.append(\"\\tTransfers Remaining: %s\" % status['xfer_remain'])\n\t\tmessage.append(\"\\tTransfers Total: %s\" % status['xfer_total'])\n\t\tmessage.append(\"\\t----------------------------------\")\n\t\ttry:\n\t\t\toverall_percent = int(round((int(status['xfer_num'])*1.0)/int(status['xfer_total']),2)*100)\n\t\texcept: overall_percent = 0\n\t\tmessage.append(\"\\tTotal Rsync done: %s%%\\n\" % overall_percent)\n\t\tp = open(self.progress_file,'w+',0)\n\t\tfor line in message:\n\t\t\t#print line\n\t\t\tp.write(\"%s\\n\" % line)\n\t\tp.flush()\n\t\tp.close()",
"def get_progress(self):\r\n return None",
"def get_progress(self, pr, id):\n\t\treturn round((self.handler.file_progress()[id] / pr.length) * 100, )",
"def percentage_update(self):\n\n self.event_update()\n return self.percentage",
"def progress_bar_update() -> str:\n # As we get updates only when the progress bar is updated we need to fix the 'duration' and 'time remaining' parts\n # (time never stops)\n now = datetime.now()\n result = []\n for pb_id in sorted(_DASHBOARD_TQDM_DICT.keys()):\n progress = _DASHBOARD_TQDM_DICT.get(pb_id)\n if progress['success'] and progress['n'] != progress['total']:\n progress['duration'] = str(now - progress['started_raw']).rsplit('.', 1)[0]\n progress['remaining'] = (str(progress['finished_raw'] - now).rsplit('.', 1)[0]\n if progress['finished_raw'] is not None and progress['finished_raw'] > now\n else '-')\n result.append(progress)\n\n return jsonify(result=result)",
"def update_progress(self, value=None):\n if self.main_app is not None:\n if value is not None:\n self.main_app.update_progress(value)\n else:\n if self.total_files != 0:\n self.main_app.update_progress((self.current_file / self.total_files) * 100)",
"def get_status(self):\n return str(self.percentage) + \"%\", self.downloaded, self.speed",
"def progress(self):\n percent = self._infos.get(BulkInsertState.IMPORT_PROGRESS, \"0\")\n return int(percent)",
"def update_progress(self):\n report = self.build_progress_report()\n self.conduit.set_progress(report)",
"def update_progress(self):\n report = self.build_progress_report()\n self.conduit.set_progress(report)",
"def getFinalStatus():\n p = progressbar[:]\n p.insert(0, '[')\n p.insert(len(p), ']')\n return string.join(p, '')",
"def update_task_progress():\r\n current_time = datetime.now(UTC)\r\n progress = {\r\n 'action_name': action_name,\r\n 'attempted': num_attempted,\r\n 'succeeded': num_succeeded,\r\n 'failed': num_failed,\r\n 'total': num_total,\r\n 'duration_ms': int((current_time - start_time).total_seconds() * 1000),\r\n 'step': curr_step,\r\n }\r\n _get_current_task().update_state(state=PROGRESS, meta=progress)\r\n\r\n return progress",
"def get_progress_indicator(self):\n return self.__aceQLHttpApi.get_progress_indicator()",
"def percentage_progress(self):\n\n if self.total_progress != 0:\n return float(self.progress) / self.total_progress\n return self.progress",
"def percentage_progress(self):\n\n if self.total_progress != 0:\n return float(self.progress) / self.total_progress\n return self.progress",
"def value(self):\n\n return self._progress.value()",
"def acquisition_progress(self):\n acc = ct.c_long()\n series = ct.c_long()\n self.lib.GetAcquisitionProgress(ct.pointer(acc), ct.pointer(series))\n return acc.value, series.value",
"def progress_status(self):\n from tqdm import tqdm\n pbar_a = tqdm(total=len(self.jobs), position=0)\n pbar_a.set_description('Submitted jobs ...')\n pbar_b = tqdm(total=self.n_submit_script, position=1)\n pbar_b.set_description('Running jobs ...')\n pbar_c = tqdm(total=self.n_submit_script, position=2)\n pbar_c.set_description('Completed jobs ...')\n pbar_d = tqdm(total=self.n_submit_script, position=3)\n pbar_d.set_description('Failed? jobs ...')\n while self.n_completed < self.n_submit_script:\n pbar_a.n = self.n_submitted\n pbar_b.n = self.n_running\n pbar_c.n = self.n_completed\n pbar_d.n = self.n_failed + self.n_unknown\n pbar_a.refresh()\n pbar_b.refresh()\n pbar_c.refresh()\n pbar_d.refresh()\n sleep(5)\n self.update_status()",
"def status(self) -> NoReturn:\n\n curr_status= self.percent_done()\n while(curr_status < 100):\n\n update_status(name=self.name, status=curr_status)\n time.sleep(0.5)\n\n curr_status = self.percent_done()\n\n update_status(name=self.name, status=curr_status)",
"def calc_progress(self):\n if self.is_prepared():\n self._sync_info_from_disk()\n self._num_sown_batches = len(\n glob.glob(\n os.path.join(self.location, \"batches\", BTCH_NM.format(\"*\"))\n )\n )\n self._num_results = len(\n glob.glob(\n os.path.join(self.location, \"results\", RSLT_NM.format(\"*\"))\n )\n )\n else:\n self._num_sown_batches = -1\n self._num_results = -1",
"def get_current_download_progress(transfer):\n\n global progress\n\n for inst in progress:\n if transfer in inst:\n break\n\n data = json.dumps(inst)\n\n return data",
"def yt_dlp_progress_hook(self, data: Dict[str, Any]) -> None:\n\n if data[\"status\"] == \"downloading\":\n file_bytes = data.get(\"total_bytes\")\n if file_bytes is None:\n file_bytes = data.get(\"total_bytes_estimate\")\n\n downloaded_bytes = data.get(\"downloaded_bytes\")\n if self.parent.simple_tui and not self.parent.web_ui:\n self.progress = 50\n elif file_bytes and downloaded_bytes:\n self.progress = downloaded_bytes / file_bytes * 50\n\n self.update(\"Downloading\")",
"def getInstallProgress(self, from_line):\n calcEngine = CalcEngine.factory(self.client_session)\n result = calcEngine.getInstallProgress(from_line)\n return result",
"def fetch_progress(self):\n threads = len(opts.thread)\n files = len(self.files)\n t_width = len(str(threads))\n f_width = len(str(files))\n\n t_progress = f\"[{self.pos: >{t_width}}/{threads}]\"\n f_progress = f\"[{self.count: >{f_width}}/{files}]\"\n\n if self.count:\n progress = f\"{t_progress} {f_progress}\"\n else:\n progress = t_progress\n\n return progress",
"def update_progress(self, done):\r\n if done % 100 == 0:\r\n print >>sys.stderr, \" %d processed, run time %d secs\" % (done, (datetime.now() - self.started_at).seconds)",
"def get_percentComplete(self):\n val = self.resource.get_cdmi_sys_meta().get(\"cdmi_percentComplete\",\n \"100\")\n return val",
"def getNextUpdate(self):\n\n return self.get_POW().getNextUpdate()",
"def getNextUpdate(self):\n\n return self.get_POW().getNextUpdate()",
"def progress(self) -> int:\n return int(round(100 * self.somme() / self.finances))",
"def progress(self, job_id: str) -> Tuple[int, str]:\n session = self._session()\n response = session.get(self._status_url(job_id))\n if response.ok:\n return int(response.json()['progress']), response.json()['status']\n else:\n response.raise_for_status()",
"def property_updates_in_progress(self) -> 'outputs.PropertyUpdatesInProgressResponse':\n return pulumi.get(self, \"property_updates_in_progress\")",
"def transfer_progress(self, stats):",
"def get_build_progress_info(self, build_id):\n pass",
"def wait_progress(self):\n pass",
"def wait_progress(self):\n pass",
"def progress(request):\n file_id = request.GET['X-Progress-ID']\n session = DBSession()\n u = session.query(Upload).filter_by(id=file_id).one()\n data = {'state': u.state}\n if u.state == 'uploading':\n if not os.path.exists(u.tmp_path):\n # The temporary file has not been created yet or it has\n # already been renamed. We return 0 in both case, the\n # front-end code will know what to do.\n received = 0\n else:\n received = os.stat(u.tmp_path).st_size\n data.update({'size': u.size, 'received': received})\n return data",
"def update_progress(self, progress, message):\n assert 0 <= progress < 100\n self._progress = int(progress)\n self.logger.info(\n \"status: STARTED %d%% %s\", self._progress, message or \"\"\n )\n self._callback('on_progress_update', self._progress, message)\n return self.update_response(\n self.encoder.encode_started(self._progress, message)\n )",
"def func(progress_remaining: float) -> float:\n return progress_remaining * initial_value",
"def func(progress_remaining: float) -> float:\n return progress_remaining * initial_value",
"def func(progress_remaining: float) -> float:\n return progress_remaining * initial_value",
"def func(progress_remaining: float) -> float:\n return progress_remaining * initial_value",
"def update(self, steps):\n self.launch_progress += (steps)/self.total",
"def fusion_api_get_startup_progress(self, host, api=None, headers=None):\n return self.progress.get(host, api, headers)",
"def progress_update(self):\n self._window.scan_progress.setValue(self.scan_progress)",
"def firmware_version(self):\n return self._get_system_status()[\"firmware\"]",
"def current_progress_data(self):\n return self._current_progress_data",
"def update_progressbar(self, count, value):\n self.status(\"Progress %s/%s\" % (value, count))",
"def get_update_function(updateFrequency):\r\n totalTranf = 0\r\n intervalTansf = 0 \r\n intervalStart = time.monotonic()\r\n \r\n def update(transfered):\r\n nonlocal totalTranf\r\n nonlocal intervalTansf\r\n nonlocal intervalStart\r\n \r\n totalTranf += transfered\r\n intervalTansf += transfered\r\n \r\n if intervalStart + updateFrequency > time.monotonic():\r\n return\r\n \r\n print(\"transfered {}\\t@ {:.1f} MiB/s\".format(\r\n format_file_size(totalTranf), \r\n (intervalTansf /(time.monotonic() - intervalStart)) / 1024 / 1024))\r\n \r\n intervalTansf = 0\r\n intervalStart = time.monotonic()\r\n \r\n return update",
"def task_progress(project):\n complete = Task.objects.filter(project=project, status='C').count()\n total = Task.objects.filter(project=project).count()\n if total == 0:\n return 0\n\n return round(complete/total * 100, 2)",
"def onTransferUpdate(self, api, transfer):\n logging.info('Transfer update ({} {});'\n ' Progress: {} KB of {} KB, {} KB/s'\n .format(transfer,\n transfer.getFileName(),\n transfer.getTransferredBytes() / 1024,\n transfer.getTotalBytes() / 1024,\n transfer.getSpeed() / 1024))",
"def get_percentComplete(self):\n val = self.collection.get_cdmi_sys_meta().get(\"cdmi_percentComplete\",\n \"100\")\n return val",
"def update_percent(self):",
"async def get_firmware_version(self):\n current_time = time.time()\n #logstring(\"setting current time {}\".format(current_time))\n #logstring(\"1\")\n if self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE) == '':\n #logstring(\"2\")\n #logstring(\"checking time now 1 {}\".format(time.time()))\n await self._send_sysex(PrivateConstants.REPORT_FIRMWARE, None)\n #logstring(\"checking time now 2 {}\".format(time.time()))\n #logstring(\"3\")\n if self.serial_port.IsPortOpen == False:\n #logstring(\"Looks like that port wasn't working!!!!!!!!!!!!!????\")\n return None\n while self.query_reply_data.get(\n PrivateConstants.REPORT_FIRMWARE) == '':\n #logstring(\"4\")\n elapsed_time = time.time()\n #logstring(\"setting elapsed time {}\".format(elapsed_time))\n #logstring(\"5\")\n if elapsed_time - current_time > 3:\n #logstring(\"really took too long: {} {} {}\".format(elapsed_time, current_time, elapsed_time - current_time))\n return None\n #logstring(\"7\")\n if self.serial_port.IsPortOpen == False:\n #logstring(\"Looks like that port wasn't working!!!!!!!!!!!!!\")\n return None\n await asyncio.sleep(self.sleep_tune)\n #logstring(\"8\")\n #logstring(\"Geez, that took: {} {} {} ??????????????????\".format(elapsed_time, current_time, elapsed_time - current_time))\n\n reply = ''\n #logstring(\"9\")\n for x in self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE):\n reply_data = ord(x)\n if reply_data:\n reply += chr(reply_data)\n self.query_reply_data[PrivateConstants.REPORT_FIRMWARE] = reply\n #logstring(\"10\")\n return self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE)",
"def Update(self, newProgress):\n self.progress = newProgress\n required = self.length * (newProgress)\n delta = int(required - self.nbits)\n if delta > 0:\n sys.stdout.write(self.char * delta)\n self.nbits += delta",
"def update(self,pTransfer=None,pStorage=None,pNetwork=None,pPower=None):\n image = self.progressImage.copy()\n draw = ImageDraw.Draw(image)\n # Upload/Downlaod\n if pTransfer == None:\n pTransfer = self.pTransfer\n else:\n self.pTransfer = pTransfer\n # Storage\n if pStorage == None:\n pStorage = self.pStorage\n else:\n self.pStorage = pStorage\n # Network\n if pNetwork == None:\n pNetwork = self.pNetwork\n else:\n self.pNetwork = pNetwork\n # Power\n if pPower == None:\n pPower = self.pPower\n else:\n self.pPower = pPower\n\n ps = [pTransfer, pStorage, pNetwork, pPower]\n for i in range(0,4):\n x0 = self.bx0\n y0 = i*self.barh + self.by0\n x1 = self.bx0 + ps[i]*(self.bx1 - self.bx0)\n y1 = i*self.barh + self.by1\n draw.rectangle((x0,y0,x1,y1),outline=255,fill=255)\n # Need to offset text by -2 in order to display properly\n draw.text((12,y0-2), '{:3d}%'.format(int(100*ps[i])), font=self.font, fill=255)\n self.disp.image(image.rotate(180))\n self.disp.display()\n return",
"def reportProgress(self):\n \n pass",
"def _update(self):\n\n # Read the power supply status\n # TODO: Display power icon while charging\n plugged_in = open('/sys/class/power_supply/AC0/online').readline().strip() # pylint: disable=unused-variable\n power_percent = atoi(open('/sys/class/power_supply/BAT0/capacity').readline().strip())\n\n self.window.pcBatteryDisplay.setValue(power_percent)\n\n # Set color based on power_level\n if power_percent <= 25:\n self.window.pcBatteryDisplay.setStyleSheet('QProgressBar::chunk {{background-color: #{:06x}}}'.format(\n gui_utils.Color.RED))\n elif power_percent <= 60:\n self.window.pcBatteryDisplay.setStyleSheet('QProgressBar::chunk {{background-color: #{:06x}}}'.format(\n gui_utils.Color.ORANGE))\n else:\n self.window.pcBatteryDisplay.setStyleSheet('QProgressBar::chunk {{background-color: #{:06x}}}'.format(\n gui_utils.Color.BAR_GREEN))\n\n # Compute the CPU usage\n with open('/proc/stat') as f:\n\n # Parse the data from the file\n fields = [float(column) for column in f.readline().strip().split()[1:]]\n idle, total = fields[3], sum(fields)\n idle_delta = idle - self.cpu_last_idle\n total_delta = total - self.cpu_last_total\n self.cpu_last_idle = idle\n self.cpu_last_total = total\n\n # Calulate the utilisation\n utilisation = 100.0 * (1.0 - idle_delta / total_delta)\n self.cpu_buffer.append(utilisation)\n\n self.window.pcCpuDisplay.setValue(sum(self.cpu_buffer) / len(self.cpu_buffer))",
"def fusion_api_get_appliance_firmware_upgrade_status(self, api=None, headers=None):\n param = '/notification'\n return self.appfirmware.get(api=api, headers=headers, param=param)",
"def get_task_progress():\r\n current_time = time()\r\n progress = {'action_name': action_name,\r\n 'attempted': num_attempted,\r\n 'succeeded': num_succeeded,\r\n 'skipped': num_skipped,\r\n 'failed': num_failed,\r\n 'total': num_total,\r\n 'duration_ms': int((current_time - start_time) * 1000),\r\n }\r\n return progress",
"def copy_progress(self) -> Sequence['outputs.DataBoxDiskCopyProgressResponse']:\n return pulumi.get(self, \"copy_progress\")",
"def percent_busy(self):\n return self._percent_busy",
"async def update(self) -> None:\n # pause logic\n if not self.running.is_set():\n self.add_to_output(\"Paused...\")\n await self.running.wait()\n\n # tell the user we are updating\n self.add_to_output(f\"Updating...\")\n # create ssh connection to miner\n try:\n conn = await self.get_connection(\"root\", \"admin\")\n # tell the user we are sending the update file\n self.add_to_output(\"Sending upgrade file...\")\n # send the update file\n await self.send_file(UPDATE_FILE_S9, \"/tmp/firmware.tar\")\n # install the update and collect the result\n result = await conn.run(f'sysupgrade /tmp/firmware.tar')\n self.add_to_output(result.stdout.strip())\n # tell the user the update completed\n self.add_to_output(f\"Update completed...\")\n except OSError:\n self.add_to_output(f\"Unknown error...\")",
"def get_progress(self):\r\n score_dict = self.get_score()\r\n score = score_dict['score']\r\n total = score_dict['total']\r\n\r\n if total > 0:\r\n if self.weight is not None:\r\n # Progress objects expect total > 0\r\n if self.weight == 0:\r\n return None\r\n\r\n # scale score and total by weight/total:\r\n score = score * self.weight / total\r\n total = self.weight\r\n\r\n try:\r\n return Progress(score, total)\r\n except (TypeError, ValueError):\r\n log.exception(\"Got bad progress\")\r\n return None\r\n return None",
"def status():\n used = get_space_used()\n avail = get_space_available()\n allowed = config.download.space_to_use\n print \"Space used by downloaded files: %.2f GB of %.2f GB (%.2f%%)\" % \\\n (used/1024.0**3, allowed/1024.0**3, 100.0*used/allowed)\n print \"Space available on file system: %.2f GB\" % (avail/1024.0**3)\n\n numwait = jobtracker.query(\"SELECT COUNT(*) FROM requests \" \\\n \"WHERE status='waiting'\", \\\n fetchone=True)\n numfail = jobtracker.query(\"SELECT COUNT(*) FROM requests \" \\\n \"WHERE status='failed'\", \\\n fetchone=True)\n print \"Number of requests waiting: %d\" % numwait\n print \"Number of failed requests: %d\" % numfail\n\n numdlactive = jobtracker.query(\"SELECT COUNT(*) FROM files \" \\\n \"WHERE status='downloading'\", \\\n fetchone=True)\n numdlfail = jobtracker.query(\"SELECT COUNT(*) FROM files \" \\\n \"WHERE status='failed'\", \\\n fetchone=True)\n print \"Number of active downloads: %d\" % numdlactive\n print \"Number of failed downloads: %d\" % numdlfail",
"def get_update_number( self ):",
"def updt(total, progress):\n barLength, status = 20, \"\"\n progress = float(progress) / float(total)\n if progress >= 1.:\n progress, status = 1, \"\\r\\n\"\n block = int(round(barLength * progress))\n text = \"\\r[{}] {:.0f}% {}\".format(\n \"#\" * block + \"-\" * (barLength - block), round(progress * 100, 0),\n status)\n sys.stdout.write(text)\n sys.stdout.flush()",
"def updt(total, progress):\n barLength, status = 20, \"\"\n progress = float(progress) / float(total)\n if progress >= 1.:\n progress, status = 1, \"\\r\\n\"\n block = int(round(barLength * progress))\n text = \"\\r[{}] {:.0f}% {}\".format(\n \"#\" * block + \"-\" * (barLength - block), round(progress * 100, 0),\n status)\n sys.stdout.write(text)\n sys.stdout.flush()",
"def updateProgress (self, iteration, total, prefix='Progress', suffix='complete', decimals=1, length=100, fill='█', printEnd=\"\\r\"):\n if iteration == 0:\n self.start_time = timer()\n ETC = '' #Estimated Time to Completion\n if (iteration/total)*100 >= self.updates[self.update_counter]:\n elapsed = timer() - self.start_time\n if iteration != 0:\n minutes = int((elapsed * total/iteration - elapsed)//60)\n seconds = int((elapsed * total/iteration - elapsed)%60)\n ETC = \"(~{:d} mins {:d}s left)\".format(minutes, seconds)\n percent = (\"{0:.\" + str(decimals) + \"f}\").format(100 * (iteration / float(total)))\n filledLength = int(length * iteration // total)\n bar = fill * filledLength + '-' * (length - filledLength)\n # Unfortunately \\r doesn't work in the pycharm console, so we have to reprint the whole bar everytime,\n # clogging the console.\n #print(f'\\r{prefix} |{bar}| {percent}% {suffix} {ETC}', end = printEnd)\n print(f'{prefix} |{bar}| {percent}% {suffix} {ETC}')\n # Print New Line on Complete\n if iteration == total:\n print()\n self.update_counter += 1",
"def percent_done(self) -> int:\n percent = (self.downloaded_images/self.total_images) * 100\n return int(percent)",
"def updates(_progress, _tag, _summary):\n log.msg(\"%d%%: %s\" % (_progress, _summary))",
"def percent_complete(self) -> int:\n return pulumi.get(self, \"percent_complete\")",
"def get_progress(count, block_size, total_size) -> None:\r\n percent = int(count * block_size * 100 / total_size)\r\n print(f\"Downloading clip... {percent}%\", end=\"\\r\", flush=True)",
"def returnProgress(foamCase):\n status = checkIfExist(foamCase)\n if status==1:\n endTime = readInput('controlDict', 'endTime', foamCase=foamCase)\n logFile = foamCase + '/log'\n f = open(logFile)\n lines = tailFile(f, 100)\n f.close()\n # proc = subprocess.Popen(['tail', '-n100', logFile], stdout=subprocess.PIPE)\n # lines = proc.stdout.readlines()\n expr = '(?<=Time = ).*'\n for line in lines:\n # print('\\n'+line)\n # 'Time = ' in line\n if line.startswith('Time'):\n Time = float(re.search(expr, line).group(0))\n progress = Time/float(endTime)*100\n elif status==0:\n progress = 100.\n elif status==2:\n progress = 0.\n return progress",
"def _update(self, data):\n self.status = data['status']\n self.progress = data['progress']",
"def GetProgress(self):\n return self.objects_finished",
"def update(self):\n self.cpus_current = psutil.cpu_percent(percpu=True)\n assert len(self.cpus_current) == len(self.cpus)\n return self.cpus_current",
"async def get_firmware_version(self):\n current_time = time.time()\n if self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE) == '':\n await self._send_sysex(PrivateConstants.REPORT_FIRMWARE, None)\n while self.query_reply_data.get(\n PrivateConstants.REPORT_FIRMWARE) == '':\n elapsed_time = time.time()\n if elapsed_time - current_time > 2:\n return None\n await asyncio.sleep(self.sleep_tune)\n reply = ''\n for x in self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE):\n reply_data = ord(x)\n if reply_data:\n reply += chr(reply_data)\n self.query_reply_data[PrivateConstants.REPORT_FIRMWARE] = reply\n return self.query_reply_data.get(PrivateConstants.REPORT_FIRMWARE)",
"def _dl_progress_bar(self):\n if not self.show_progress:\n return\n\n if self.file_size:\n ratio = float(self.bytes_read) / self.file_size\n else:\n ratio = 1\n percent = int(ratio * 100)\n\n bar_len = 60\n done = int(bar_len * ratio)\n bar = ('=' * done) + (' ' * (bar_len - done))\n\n progress = '{percent: >3}%: [{bar}]'.format(percent=percent, bar=bar)\n backspace = '\\b' * len(progress)\n print(backspace + '\\r', end='')\n print(progress, end='')",
"def update_progress(self, argv):\n data = {}\n arg_types = [{\n \"type\": \"cognitive\",\n \"rating\": argv.c,\n \"description\": argv.cd\n },\n {\n \"type\": \"emotional\",\n \"rating\": argv.e,\n \"description\": argv.ed\n }\n , {\n \"type\": \"physical\",\n \"rating\": argv.p,\n \"description\": argv.pd\n }\n\n ]\n\n for data_type in arg_types:\n if data_type[\"rating\"]:\n if 100 >= int(data_type[\"rating\"]) > 0:\n data[data_type[\"type\"]] = {\n \"rating\": data_type[\"rating\"],\n \"description\": data_type[\"description\"],\n \"sentiment\": self.get_sentiment(data_type[\"description\"])\n }\n else:\n logging.error(\"Invalid Rating, must be 1-10\")\n exit(1)\n today = datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M\")\n insert_data = {today: {\"data\": data, \"weather\": Weather.get_weather()}}\n logging.info(\"Updating Progress\")\n self.prog_logs.insert(insert_data)",
"def report_rest_percentage(self):\n self._logger.info(\"Running report for device {}\".format(self.name))\n tot_time_since_bday = (datetime.utcnow() - self.bday).total_seconds() # duration\n time_at_rest = self.total_seconds_rested\n return time_at_rest / float(tot_time_since_bday) * 100"
] | [
"0.71777356",
"0.7156413",
"0.69528806",
"0.69204676",
"0.6803651",
"0.6803651",
"0.6798143",
"0.67429936",
"0.67102855",
"0.67044973",
"0.67044973",
"0.67044973",
"0.67044973",
"0.66923326",
"0.6637259",
"0.6631922",
"0.662532",
"0.6559995",
"0.65389353",
"0.6411595",
"0.6337092",
"0.6326037",
"0.630838",
"0.62910026",
"0.62078714",
"0.61872536",
"0.6162816",
"0.61385465",
"0.608866",
"0.608866",
"0.59922194",
"0.59735763",
"0.5961052",
"0.59380347",
"0.59380347",
"0.59332895",
"0.5922927",
"0.58769584",
"0.58595914",
"0.5832136",
"0.58240235",
"0.58113205",
"0.5802312",
"0.5791267",
"0.5790684",
"0.5757613",
"0.5742852",
"0.5742852",
"0.5742353",
"0.57369757",
"0.5702445",
"0.56992906",
"0.5687867",
"0.56695807",
"0.56695807",
"0.56611264",
"0.5658618",
"0.56501526",
"0.56501526",
"0.56501526",
"0.56501526",
"0.56494105",
"0.56395245",
"0.5638747",
"0.563015",
"0.5627521",
"0.56203544",
"0.5616278",
"0.5609052",
"0.56044537",
"0.5583163",
"0.5582211",
"0.55801886",
"0.55796134",
"0.5576992",
"0.55752",
"0.55684584",
"0.5562325",
"0.55558634",
"0.55508345",
"0.55452603",
"0.55429375",
"0.5537319",
"0.55322754",
"0.5515819",
"0.55020237",
"0.55020237",
"0.55003375",
"0.5495643",
"0.54865825",
"0.5486077",
"0.54761547",
"0.5475619",
"0.5474985",
"0.5469858",
"0.5466992",
"0.5464519",
"0.54607385",
"0.54535073",
"0.5448232"
] | 0.8577004 | 0 |
get the number of GPU devices connected. | def _get_number_of_gpu_devices_connected(self):
gpu_devices = self._get_gpu_pci_devices()
gpu_devices_count = len(gpu_devices)
return {'pci_gpu_devices': gpu_devices_count} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def device_count() -> int:\n return flow._oneflow_internal.CudaGetDeviceCount()",
"def num_devices(self):\n\t\t\treturn cuda.Device.count()",
"def countGPUs(self):\n return libnao_gpu.CountDevices()",
"def get_gpu_count():\n\n gpu_count = 0\n\n env_cuda_devices = os.environ.get('CUDA_VISIBLE_DEVICES', None)\n if env_cuda_devices is not None:\n assert isinstance(env_cuda_devices, str)\n try:\n if not env_cuda_devices:\n return 0\n gpu_count = len(\n [x for x in env_cuda_devices.split(',') if int(x) >= 0])\n logger.info(\n 'CUDA_VISIBLE_DEVICES found gpu count: {}'.format(gpu_count))\n except:\n logger.info('Cannot find available GPU devices, using CPU now.')\n gpu_count = 0\n else:\n try:\n gpu_count = str(subprocess.check_output([\"nvidia-smi\",\n \"-L\"])).count('UUID')\n logger.info('nvidia-smi -L found gpu count: {}'.format(gpu_count))\n except:\n logger.info('Cannot find available GPU devices, using CPU now.')\n gpu_count = 0\n return gpu_count",
"def get_gpu_count():\n\n gpu_count = 0\n\n env_cuda_devices = os.environ.get('CUDA_VISIBLE_DEVICES', None)\n if env_cuda_devices is not None:\n assert isinstance(env_cuda_devices, str)\n try:\n if not env_cuda_devices:\n return 0\n gpu_count = len(\n [x for x in env_cuda_devices.split(',') if int(x) >= 0])\n logger.info(\n 'CUDA_VISIBLE_DEVICES found gpu count: {}'.format(gpu_count))\n except:\n logger.info(\n 'Cannot find available GPU devices, using CPU or other devices now.'\n )\n gpu_count = 0\n else:\n try:\n gpu_count = str(subprocess.check_output([\"nvidia-smi\",\n \"-L\"])).count('UUID')\n logger.info('nvidia-smi -L found gpu count: {}'.format(gpu_count))\n except:\n logger.info(\n 'Cannot find available GPU devices, using CPU or other devices now. (Please check whether you can execute `nvidia-smi` command.)'\n )\n gpu_count = 0\n return gpu_count",
"def get_device_count():\n c_num = ct.c_int(0)\n safe_call(backend.get().af_get_device_count(ct.pointer(c_num)))\n return c_num.value",
"def get_count():\n _check_init()\n return _pypm.CountDevices()",
"def num_gpus():\n count = ctypes.c_int()\n check_call(_LIB.MXGetGPUCount(ctypes.byref(count)))\n return count.value",
"def get_number_of_devices(self):\n return self.drt_manager.get_number_of_devices()",
"def get_device_count():\n debug(\"MpOrLibUsb.get_device_count()\")\n num = MpOrLibUsb.__get_device_count_Mpusb()\n num = num + MpOrLibUsb.__get_device_count_Libusb()\n return num\n #end get_device_count()",
"def get_number_devices(self):\n no_devices = c_int()\n self._dll.ShamrockGetNumberDevices(byref(no_devices))\n return no_devices.value",
"def num_gpus() -> int:\n gpus = list(range(len(os.environ[\"NVIDIA_VISIBLE_DEVICES\"].split(\",\"))))\n return len(gpus)",
"def num_devices(self):\n # put every device into bypass mode (IR = all 1's)\n tdi = bits.bits()\n tdi.ones(_flush_size)\n self.driver.scan_ir(tdi)\n # now each DR is a single bit\n # the DR chain length is the number of devices\n return self.dr_length()",
"def get_available_device():\n if torch.cuda.is_available():\n free_mem, device_idx = 0.0, 0\n for d in range(torch.cuda.device_count()):\n mem = torch.cuda.get_device_properties(d).total_memory - torch.cuda.memory_allocated(d)\n if mem > free_mem:\n device_idx = d\n free_mem = mem\n return torch.device(f'cuda:{device_idx}')\n else:\n return torch.device('cpu')",
"def get_gpus():\n try:\n re = subprocess.check_output([\"nvidia-smi\", \"-L\"], universal_newlines=True)\n except OSError:\n return []\n return range(len([i for i in re.split('\\n') if 'GPU' in i]))",
"def gpu_devices(self):\n return self._gpu_devices",
"def get_free_gpu():\n\tos.system('nvidia-smi -q -d Memory |grep -A4 GPU|grep Free >tmp')\n\tif os.path.exists('tmp'):\n\t\tmemory_available = [int(x.split()[2]) for x in open('tmp', 'r').readlines()]\n\t\tos.remove('tmp')\n\t\treturn np.argmax(memory_available)\n\treturn 0",
"def CUDA_VISIBLE_DEVICES(self):\n return self._CUDA_VISIBLE_DEVICES",
"def _get_max_gpu_processes(self):\n mem_usage = self._get_gpu_mem_usage()\n print('Mem Usage:', mem_usage)\n\n num_processes = int(1 / mem_usage)\n return num_processes",
"def _count_devices(self):\n number_of_devices = ctypes.c_uint()\n\n if ctypes.windll.user32.GetRawInputDeviceList(\n ctypes.POINTER(ctypes.c_int)(),\n ctypes.byref(number_of_devices),\n ctypes.sizeof(RawInputDeviceList)) == -1:\n warn(\"Call to GetRawInputDeviceList was unsuccessful.\"\n \"We have no idea if a mouse or keyboard is attached.\",\n RuntimeWarning)\n return\n\n devices_found = (RawInputDeviceList * number_of_devices.value)()\n\n if ctypes.windll.user32.GetRawInputDeviceList(\n devices_found,\n ctypes.byref(number_of_devices),\n ctypes.sizeof(RawInputDeviceList)) == -1:\n warn(\"Call to GetRawInputDeviceList was unsuccessful.\"\n \"We have no idea if a mouse or keyboard is attached.\",\n RuntimeWarning)\n return\n\n for device in devices_found:\n if device.dwType == 0:\n self._raw_device_counts['mice'] += 1\n elif device.dwType == 1:\n self._raw_device_counts['keyboards'] += 1\n elif device.dwType == 2:\n self._raw_device_counts['otherhid'] += 1\n else:\n self._raw_device_counts['unknown'] += 1",
"def get_usb_devices_count(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetUsbDevicesCount', self.handle)",
"def num_supported_devices(self):\n return int(self._dll.JLINKARM_DEVICE_GetInfo(-1, 0))",
"def num_connected_emulators(self):\n return self._dll.JLINKARM_EMU_GetNumDevices()",
"def count_free_gpus():\n return len(get_free_gpus())",
"def get_free_gpu(self):\r\n output = subprocess.Popen('nvidia-smi -q -d Memory |grep -A4 GPU|grep Free', stdout=subprocess.PIPE,\r\n shell=True).communicate()[0]\r\n output = output.decode(\"ascii\")\r\n\r\n # assumes that it is on the popiah server and the last gpu is not used\r\n memory_available = [int(x.split()[2]) for x in output.split(\"\\n\")[:-2]]\r\n\r\n if memory_available:\r\n print(\"Setting GPU to use to PID {}\".format(np.argmax(memory_available)))\r\n return np.argmax(memory_available)\r\n\r\n if not memory_available:\r\n print('No GPU memory available')",
"def get_available_devices():\n executable_path = os.path.join(os.path.dirname(__file__), 'build')\n try:\n num_devices = int(subprocess.check_output(\n [\"{}/query_devices\".format(executable_path)]))\n except subprocess.CalledProcessError as e:\n return [0]\n\n FNULL = open(os.devnull, 'w')\n\n available_devices = []\n for i in range(num_devices):\n try:\n if b\"NVIDIA\" in subprocess.check_output(\n [\"{}/test_device\".format(executable_path),\n str(i)], stderr=FNULL):\n available_devices.append(i)\n logging.info('Device {} is available for rendering'.format(i))\n except subprocess.CalledProcessError as e:\n logging.info(e)\n logging.info('Device {} is not available for rendering'.format(i))\n FNULL.close()\n\n return available_devices",
"def get_devs_count(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetDevsCount', self.handle)",
"def totalDevices(self):\n if not self.exists:\n return self._totalDevices\n else:\n return len(self.parents)",
"def detect_gpus():\n def worker(q):\n # `device_lib` will not release the memory it took,\n # so we run it in a sub-process.\n try:\n from tensorflow.python.client import device_lib\n\n if is_tensorflow_version_higher_or_equal('1.8.0'):\n config = tf.ConfigProto()\n config.gpu_options.allow_growth = True\n devices = list(device_lib.list_local_devices(config))\n else:\n devices = list(device_lib.list_local_devices())\n gpus = [\n (device.name, device)\n for device in devices\n if device.device_type == 'GPU'\n ]\n union_set = {i: i for i in range(len(gpus))}\n\n for i, (name, device) in enumerate(gpus):\n assert (device.name == '/device:GPU:{}'.format(i))\n for link in device.locality.links.link:\n if link.device_id != i:\n union_set[i] = union_set[link.device_id]\n\n for i in six.iterkeys(union_set):\n while union_set[i] != union_set[union_set[i]]:\n union_set[i] = union_set[union_set[i]]\n\n root_devices = sorted(set(union_set.values()))\n gpu_groups = [[] for _ in range(len(root_devices))]\n dev_to_group = {j: i for i, j in enumerate(root_devices)}\n for i, (name, device) in enumerate(gpus):\n gpu_groups[dev_to_group[union_set[i]]].append(name)\n\n q.put((1, gpu_groups))\n except Exception:\n q.put((0, traceback.format_exc()))\n\n q = mp.Queue()\n p = mp.Process(target=worker, args=(q,))\n\n try:\n p.start()\n result = q.get()\n if result[0] == 1:\n return result[1]\n else:\n raise RuntimeError(\n 'Failed to retrieve GPU information, the traceback of '\n 'sub-process is:\\n {}'.\n format('\\n '.join(result[1].split('\\n')))\n )\n finally:\n p.terminate()\n p.join()",
"def return_free_GPU():\r\n if torch.cuda.is_available():\r\n gpu_num = torch.cuda.device_count()\r\n device = torch.device('cuda:{}'.format(gpu_num-1))\r\n print('Using GPU:[{}]/[{}] for training...'.format(gpu_num-1,gpu_num-1))\r\n return device\r\n \r\n raise ValueError('GPU not available for training. Check CUDA env with function \"check_cuda_env\"')",
"def cudaMemGetInfo(mb=False):\n print 'gpu: '\n free = ctypes.c_size_t()\n total = ctypes.c_size_t()\n ret = cuda.cudaMemGetInfo(ctypes.byref(free), ctypes.byref(total))\n\n if ret != 0:\n err = cuda.cudaGetErrorString(status)\n raise RuntimeError(\"CUDA Error (%d): %s\" % (status, err))\n\n if mb:\n scale = 1024.0**2\n return free.value / scale, total.value / scale\n else:\n return free.value, total.value",
"def get_display_devs_count(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetDisplayDevsCount', self.handle)",
"def magma_getdevice():\n\n dev = c_int_type()\n _libmagma.magma_getdevice(ctypes.byref(dev))\n return dev.value",
"def _get_device(self, n_gpu_use):\n n_gpu = torch.cuda.device_count()\n if n_gpu_use > 0 and n_gpu == 0:\n self.logger.warning(\"Warning: There\\'s no GPU available on this machine,\"\n \"training will be performed on CPU.\")\n n_gpu_use = 0\n if n_gpu_use > n_gpu:\n self.logger.warning(f\"Warning: The number of GPU\\'s configured to use is {n_gpu_use}, \"\n f\"but only {n_gpu} are available on this machine.\")\n n_gpu_use = n_gpu\n device = torch.device('cuda:0' if n_gpu_use > 0 else 'cpu')\n list_ids = list(range(n_gpu_use))\n self.logger.info(f'Using device: {device}, {list_ids}')\n return device, list_ids",
"def devices(self):\n return self.enumerate_devices()",
"def get_usb_devs_count(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetUsbDevsCount', self.handle)",
"def gpu_mem_usage():\n mem_usage_bytes = torch.cuda.max_memory_allocated()\n return mem_usage_bytes / _B_IN_MB",
"def gpu_mem_usage():\n mem_usage_bytes = torch.cuda.max_memory_allocated()\n return mem_usage_bytes / _B_IN_MB",
"def get_num_channels():\r\n check_mixer()\r\n return sdl.Mix_GroupCount(-1)",
"def getGpus():\n nvmlInit()\n gpu_list = []\n for i in range(0, nvmlDeviceGetCount()):\n handle = nvmlDeviceGetHandleByIndex(i)\n gpu_list.append(NvidiaGPU(handle))\n return gpu_list",
"def get_available_gpus():\n local_device_protos = device_lib.list_local_devices()\n return [x.name for x in local_device_protos if x.device_type == \"GPU\"]",
"def _get_available_gpus():\n #global _LOCAL_DEVICES\n if tfback._LOCAL_DEVICES is None:\n devices = tf.config.list_logical_devices()\n tfback._LOCAL_DEVICES = [x.name for x in devices]\n return [x for x in tfback._LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def _get_available_gpus():\n #global _LOCAL_DEVICES\n if tfback._LOCAL_DEVICES is None:\n devices = tf.config.list_logical_devices()\n tfback._LOCAL_DEVICES = [x.name for x in devices]\n return [x for x in tfback._LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def gpu_per_unit(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"gpu_per_unit\")",
"def gpu_per_unit(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"gpu_per_unit\")",
"def gpu_per_unit(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"gpu_per_unit\")",
"def get_ncpu():\n from multiprocessing import cpu_count\n return cpu_count()",
"def GetGPU():\n return option['device_id']",
"def _get_available_gpus():\r\n #global _LOCAL_DEVICES\r\n if tfback._LOCAL_DEVICES is None:\r\n devices = tf.config.list_logical_devices()\r\n tfback._LOCAL_DEVICES = [x.name for x in devices]\r\n return [x for x in tfback._LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def _get_available_gpus():\n # global _LOCAL_DEVICES\n if tf_back._LOCAL_DEVICES is None:\n devices = tf.config.list_logical_devices()\n tf_back._LOCAL_DEVICES = [x.name for x in devices]\n return [x for x in tf_back._LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def get_generic_pci_devices_count(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetGenericPciDevicesCount', self.handle)",
"def devices(self):\n\t\t\tdevices = []\n\t\t\tnum = cuda.Device.count()\n\t\t\tfor id in range(num):\n\t\t\t\tname = cuda.Device(id).name()\n\t\t\t\tmemory = cuda.Device(id).total_memory()\n\t\t\t\tdevices.append((memory, name, id))\n\t\t\treturn devices",
"def _get_gpu_mem_usage(self):\n assert self.network_generator is not None, \\\n \"Unable to measure network memory utilization without generator function\"\n\n dispatcher = MulticoreDispatcher(1)\n dispatcher.run(get_model_gpu_allocation, self.network_generator)\n mem_usage = dispatcher.join()[0]\n mem_usage = math.ceil(mem_usage / .1) * .1 #Round up to nearest 10%\n dispatcher.shutdown()\n return mem_usage",
"def _get_available_gpus():\n # global _LOCAL_DEVICES\n if tfback._LOCAL_DEVICES is None:\n devices = tf.config.list_logical_devices()\n tfback._LOCAL_DEVICES = [x.name for x in devices]\n return [x for x in tfback._LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def getConnectedUsersCount(self):\n\n\t\treturn len(self.connectedUsers)",
"def gpus_used(self):\n\n return list(self._gpu_data.keys())",
"def numcpu () :\n import multiprocessing\n return multiprocessing.cpu_count()",
"def _get_available_gpus():\n global _LOCAL_DEVICES\n if _LOCAL_DEVICES is None:\n if _is_tf_1():\n devices = get_session().list_devices()\n _LOCAL_DEVICES = [x.name for x in devices]\n else:\n _LOCAL_DEVICES = tf.config.experimental_list_devices()\n return [x for x in _LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def memUsedGpu(self):\n return None # amount not known",
"def prepare_device(n_gpu_use):\n n_gpu = torch.cuda.device_count()\n if n_gpu_use > 0 and n_gpu == 0:\n print(\"Warning: There\\'s no GPU available on this machine,\"\n \"training will be performed on CPU.\")\n n_gpu_use = 0\n if n_gpu_use > n_gpu:\n print(f\"Warning: The number of GPU\\'s configured to use is {n_gpu_use}, but only {n_gpu} are \"\n \"available on this machine.\")\n n_gpu_use = n_gpu\n device = torch.device('cuda:0' if n_gpu_use > 0 else 'cpu')\n list_ids = list(range(n_gpu_use))\n return device, list_ids",
"def prepare_device(n_gpu_use):\n n_gpu = torch.cuda.device_count()\n if n_gpu_use > 0 and n_gpu == 0:\n print(\"Warning: There\\'s no GPU available on this machine,\"\n \"training will be performed on CPU.\")\n n_gpu_use = 0\n if n_gpu_use > n_gpu:\n print(f\"Warning: The number of GPU\\'s configured to use is {n_gpu_use}, but only {n_gpu} are \"\n \"available on this machine.\")\n n_gpu_use = n_gpu\n device = torch.device('cuda:0' if n_gpu_use > 0 else 'cpu')\n list_ids = list(range(n_gpu_use))\n return device, list_ids",
"def _get_available_gpus():\n #global _LOCAL_DEVICES\n if tfback._LOCAL_DEVICES is None:\n devices = tf.config.list_logical_devices()\n tfback._LOCAL_DEVICES = [x.name for x in devices]\n return [x for x in tfback._LOCAL_DEVICES if 'device:gpu' in x.lower()]",
"def device_count():\n apipath = \"/targets/devices\"\n url = SERVER + apipath\n params = {\n 'q': '(deviceType:ASA)',\n 'agg': 'count'}\n headers = {\n 'Accept': \"application/json\",\n 'Content-Type': \"application/json\",\n 'Authorization': \"bearer {}\".format(token)}\n response = requests.get(url, verify=False, stream=True, headers=headers, params=params)\n getstatuscode = response.status_code\n getresponse = response.json()\n if getstatuscode == 200:\n return getresponse\n else:\n response.raise_for_status()",
"def enumerate_devices():\n devices = list(\n map(XInputJoystick, list(range(XInputJoystick.max_devices))))\n return [device for device in devices if device.is_connected()]",
"def prepare_device(n_gpu_use):\n n_gpu = torch.cuda.device_count()\n if n_gpu_use > 0 and n_gpu == 0:\n print(\"Warning: There\\'s no GPU available on this machine,\"\n \"training will be performed on CPU.\")\n n_gpu_use = 0\n if n_gpu_use > n_gpu:\n print(\"Warning: The number of GPU\\'s configured to use is {}, but only {} are available \"\n \"on this machine.\".format(n_gpu_use, n_gpu))\n n_gpu_use = n_gpu\n device = torch.device('cuda:0' if n_gpu_use > 0 else 'cpu')\n list_ids = list(range(n_gpu_use))\n return device, list_ids",
"def cpu_count():\n num_available_cores = multiprocessing.cpu_count()\n return num_available_cores",
"def try_all_gpus(): #@save\n num_gpus = len(tf.config.experimental.list_physical_devices('GPU'))\n devices = [tf.device(f'/GPU:{i}') for i in range(num_gpus)]\n return devices if devices else [tf.device('/CPU:0')]",
"def get_free_gpu_memory(cuda_device_index):\n if sys.platform == \"darwin\":\n # No GPUs on darwin...\n return 0\n result = sp.check_output('nvidia-smi --query-gpu=memory.free '\n '--format=csv,nounits,noheader',\n shell=True)\n result = result.decode('utf-8').split('\\n')[:-1]\n log.verbose(f'The system has {len(result)} gpu(s).')\n free_mem = int(result[cuda_device_index])\n log.info(f'The {cuda_device_index}-th GPU has {free_mem} MB free.')\n if cuda_device_index >= len(result):\n raise ValueError(f\"Couldn't parse result for GPU #{cuda_device_index}\")\n return int(result[cuda_device_index])",
"def get_available_gpus() -> List[int]:\n orig_visible_devices = os.environ[f\"{CUDA_ENVVAR}\"]\n available_gpus = [int(g.strip()) for g in orig_visible_devices.split(\",\") if g and not g.isspace()]\n return available_gpus",
"def get_cuda_device(minor_idx):\n\n executable_path = os.path.join(os.path.dirname(__file__), 'build')\n try:\n num_devices = int(subprocess.check_output(\n [\"{}/query_devices\".format(executable_path)]))\n except subprocess.CalledProcessError as e:\n return 0\n\n for i in range(num_devices):\n output = subprocess.check_output([\"nvidia-smi\", '-q', '-i', str(i)])\n output_list = output.decode(\"utf-8\").split('\\n')\n output_list = [item for item in output_list if 'Minor' in item]\n num = int(output_list[0].split(':')[-1])\n if num == minor_idx:\n return i\n return 0",
"def get_generic_pci_devs_count(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetGenericPciDevsCount', self.handle)",
"def num_drivers(self):\n num = c_int()\n ckresult(_dll.FMOD_System_GetNumDrivers(self._ptr, byref(num)))\n return num.value",
"def enumerateDevices():\r\n \r\n return tuple((dev,dev) for dev in pygame.camera.list_cameras())",
"def is_gpu_available() -> bool:\n return torch.cuda.is_available()",
"def _display_cuda_devices():\n\n cuda_query_output = subprocess.run(\"nvidia-smi --query-gpu=gpu_uuid,gpu_name,compute_mode --format=csv\", shell=True, capture_output=True, text=True)\n # Check if command worked\n if cuda_query_output.returncode == 0:\n # Split by line jump and comma\n cuda_devices_list = [entry for entry in cuda_query_output.stdout.splitlines()]\n logger.debug(f\"CUDA devices available: {*cuda_devices_list,}\")\n # We only support \"Default\" and not \"Exclusive_Process\" for the compute mode\n if \"Default\" not in cuda_query_output.stdout:\n logger.warning(f\"GPU in 'Exclusive_Process' mode (or Prohibited), one context is allowed per device. This may prevent some openmmtools features from working. GPU must be in 'Default' compute mode\")\n # Handel the case where the command had some error\n else:\n logger.debug(f\"nvidia-smi command failed: {cuda_query_output.stderr}, this is expected if there is no GPU available\")",
"def deviceMemory(self):\n return 1",
"def detect_available():\n global _CUDA_AVAILABLE\n if _CUDA_AVAILABLE is not None: return _CUDA_AVAILABLE\n _CUDA_AVAILABLE = shell.run('{} -c \"import torch;print(torch.cuda.is_available())\"'.format(sys.executable)).strip('\\n') == 'True'\n return _CUDA_AVAILABLE",
"def get_device_of(tensor: torch.Tensor) -> int:\n if not tensor.is_cuda:\n return -1\n else:\n return tensor.get_device()",
"def _get_num_processors():\n cores = 0\n try:\n cores = len(os.sched_getaffinity(0))\n except AttributeError:\n cores = cpu_count()\n return cores",
"def cuda_set_n_free_gpus(num_gpu = None, verbose=False):\n free = get_free_gpus()\n if num_gpu != None:\n check_num(num_gpu)\n if len(free) < num_gpu:\n raise ValueError(\"Not enough free GPUs available - only %d available - use count_free_gpus() or pass 'None' as num_gpu for maximum number\"%len(free))\n os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\"\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = \",\".join((map(str,free)))\n if verbose:\n print(get_tf_visible_gpus())",
"def get_num_joysticks(self) -> int:\n return self._pygame.joystick.get_count()",
"def ncameras(self):\n n = ct.c_long()\n self.lib.GetAvailableCameras(ct.pointer(n))\n return n.value",
"def GetInstanceCount():\n return _gmat_py.GmatBase_GetInstanceCount()",
"def device(self):\n return torch.cuda.current_device()",
"def count_cpus():\r\n try:\r\n return multiprocessing.cpu_count()\r\n except Exception:\r\n logging.exception('can not get cpu count from'\r\n ' multiprocessing.cpu_count()')\r\n cpuinfo = get_cpuinfo()\r\n # Returns at least one cpu. Check comment #1 in crosbug.com/p/9582.\r\n return len(cpuinfo) or 1",
"def max_gpu(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"max_gpu\")",
"def _prepare_device(self, n_gpu_use):\n n_gpu = torch.cuda.device_count()\n if n_gpu_use > 0 and n_gpu == 0:\n self.logger.warning(\n \"Warning: There\\'s no GPU available on this machine, training will be performed on CPU.\")\n n_gpu_use = 0\n if n_gpu_use > n_gpu:\n self.logger.warning(\n \"Warning: The number of GPU\\'s configured to use is {}, but only {} are available on this machine.\".format(\n n_gpu_use, n_gpu))\n n_gpu_use = n_gpu\n device = torch.device('cuda:0' if n_gpu_use > 0 else 'cpu')\n list_ids = list(range(n_gpu_use))\n return device, list_ids",
"def get_cameras_number():\n lib.initlib()\n return lib.is_GetNumberOfCameras()",
"def list_devices():\r\n DeviceManagerCLI.BuildDeviceList()\r\n return DeviceManagerCLI.GetDeviceList()",
"def getGpuPowerUsage(self):\n gpuPower = int(str(nvmlDeviceGetPowerUsage(self.handle)))\n gpuPower = int(round(gpuPower / 1000))\n return gpuPower",
"def _prepare_device(self, n_gpu_use):\n n_gpu = torch.cuda.device_count()\n if n_gpu_use > 0 and n_gpu == 0:\n self.logger.warning(\"Warning: There\\'s no GPU available on this machine,\"\n \"training will be performed on CPU.\")\n n_gpu_use = 0\n if n_gpu_use > n_gpu:\n self.logger.warning(\"Warning: The number of GPU\\'s configured to use is {}, but only {} are available \"\n \"on this machine.\".format(n_gpu_use, n_gpu))\n n_gpu_use = n_gpu\n device = torch.device('cuda:0' if n_gpu_use > 0 else 'cpu')\n list_ids = list(range(n_gpu_use))\n return device, list_ids",
"def get_devices_status(self):\n return {\n\n dev_name:{\n 'running_processes': self.get_running_processes(dev_handler),\n 'gpu_memory_free': utils.psutil_parse_readable_bytes(\n NvmlHandler.exec_nvml_function(nvmlDeviceGetMemoryInfo, dev_handler, 'free')\n ),\n 'gpu_memory_used': utils.psutil_parse_readable_bytes(\n NvmlHandler.exec_nvml_function(nvmlDeviceGetMemoryInfo, dev_handler, 'used')\n )\n } for dev_name, dev_handler in self.devices.items()\n }",
"def has_cuda_context():\n init_once()\n if not nvmlInitialized:\n return False\n for index in range(device_get_count()):\n handle = pynvml.nvmlDeviceGetHandleByIndex(index)\n if hasattr(pynvml, \"nvmlDeviceGetComputeRunningProcesses_v2\"):\n running_processes = pynvml.nvmlDeviceGetComputeRunningProcesses_v2(handle)\n else:\n running_processes = pynvml.nvmlDeviceGetComputeRunningProcesses(handle)\n for proc in running_processes:\n if os.getpid() == proc.pid:\n return index\n return False",
"def error_device_count(self):\n if \"errorDeviceCount\" in self._prop_dict:\n return self._prop_dict[\"errorDeviceCount\"]\n else:\n return None",
"def get_connected_users_count(room: PublicChatRoom) -> int:\n return room.users.count()",
"def compliant_device_count(self):\n if \"compliantDeviceCount\" in self._prop_dict:\n return self._prop_dict[\"compliantDeviceCount\"]\n else:\n return None",
"def channels(self) -> int:\n return len(self._channel_arrays)",
"def numberOfCamera():\n return numCams",
"def sockets(self):\n return int(self.num_cpu_sockets) # type: ignore",
"def getThreads():\r\n return multiprocessing.cpu_count()"
] | [
"0.8710016",
"0.8510717",
"0.8488628",
"0.83665913",
"0.83246",
"0.79626137",
"0.79313564",
"0.7867789",
"0.7816904",
"0.76557755",
"0.746821",
"0.73369837",
"0.71500075",
"0.6916099",
"0.6856592",
"0.6846394",
"0.68300164",
"0.68286896",
"0.6778741",
"0.6757387",
"0.66655433",
"0.66613066",
"0.66248727",
"0.65919375",
"0.6507337",
"0.6488157",
"0.64831185",
"0.64489144",
"0.64026296",
"0.6390918",
"0.6390116",
"0.6350713",
"0.63335323",
"0.6328702",
"0.63215625",
"0.63176316",
"0.6313786",
"0.6313786",
"0.6290233",
"0.62474275",
"0.62411",
"0.62241423",
"0.62241423",
"0.6221206",
"0.6221206",
"0.6221206",
"0.62171566",
"0.62017703",
"0.6193072",
"0.6181298",
"0.6177179",
"0.6168067",
"0.6166398",
"0.61557746",
"0.6147121",
"0.6143029",
"0.6140786",
"0.6125265",
"0.6113432",
"0.61093396",
"0.61093396",
"0.610898",
"0.60913694",
"0.6077812",
"0.6072871",
"0.60713077",
"0.6067843",
"0.60531604",
"0.6051328",
"0.604578",
"0.60273224",
"0.6026068",
"0.60226727",
"0.6019988",
"0.6017558",
"0.60073805",
"0.6006239",
"0.6000118",
"0.5992003",
"0.59680957",
"0.59635776",
"0.59634644",
"0.5954505",
"0.5948042",
"0.59435797",
"0.594345",
"0.59255683",
"0.59129596",
"0.59112865",
"0.5906275",
"0.59054214",
"0.58886284",
"0.58799857",
"0.5876209",
"0.58748543",
"0.5874699",
"0.5874495",
"0.5864589",
"0.58548856",
"0.5854546"
] | 0.8072107 | 5 |
Retrieves if server is TPM capable or not. | def _get_tpm_capability(self):
tpm_values = {"NotPresent": False,
"PresentDisabled": True,
"PresentEnabled": True}
try:
tpm_state = self._get_bios_setting('TpmState')
except exception.IloCommandNotSupportedError:
tpm_state = "NotPresent"
tpm_result = tpm_values[tpm_state]
return tpm_result | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_available():",
"def is_vtd_supported(self):\n\t\treturn bool(call_sdk_function('PrlSrvCfg_IsVtdSupported', self.handle))",
"def evaluate_hardware_support(self):\n return hardware.HardwareSupport.SERVICE_PROVIDER",
"def is_available(self) -> bool:\n return (\n len(self._gpu_ids) > 1\n and \"TORCHELASTIC_RUN_ID\"\n not in os.environ # If otx is executed by torchrun, then otx multi gpu interface is disabled.\n )",
"def is_ctu_capable():\n\n context = package_context.get_context()\n ctu_func_map_cmd = context.ctu_func_map_cmd\n try:\n version = subprocess.check_output([ctu_func_map_cmd, '-version'])\n except (subprocess.CalledProcessError, OSError):\n version = 'ERROR'\n return version != 'ERROR'",
"def available_on_system(cls):\n return (cls.reason_to_be_disabled() is None)",
"def is_vserver_kernel():\n\n kinfo = commands.getoutput('/bin/uname -a').split()[2]\n return '-vs' in kinfo",
"def available_t5():\n return _t5_availability",
"def _get_system_hardware(self):\n return self._get_system_status()[\"hardware\"]",
"def check_hyperv() -> bool:\n try:\n out = subprocess.check_output(\n ['DISM', '/Online', '/Get-FeatureInfo', '/FeatureName:Microsoft-Hyper-V']\n )\n except subprocess.CalledProcessError:\n return False\n\n if 'State : Disabled' in out.decode():\n return False\n\n return True",
"def available(self) -> bool:\n return self._tm_client.api.available",
"def sstcp_enabled():\n return common.POWER_CAP in SYSTEM_CAPS",
"def is_system(self) -> bool:",
"def detect_available():\n global _CUDA_AVAILABLE\n if _CUDA_AVAILABLE is not None: return _CUDA_AVAILABLE\n _CUDA_AVAILABLE = shell.run('{} -c \"import torch;print(torch.cuda.is_available())\"'.format(sys.executable)).strip('\\n') == 'True'\n return _CUDA_AVAILABLE",
"def hasaccelerator():\n\n return torch.cuda.is_available() or torch.backends.mps.is_available() or bool(Models.finddevice())",
"def is_available(cls):\n\n try:\n proc = subprocess.Popen(\n ['systemctl', 'status', 'NetworkManager'],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n )\n proc.communicate()\n return proc.returncode == 0\n except OSError:\n return False",
"def is_supported(self) -> bool:\n\n # TODO logging ?\n # TODO ICMP error if ttl is zero\n return self._version == 4 and self._ihl >= 5 and self._ttl != 0",
"def _get_cpu_virtualization(self):\n try:\n cpu_vt = self._get_bios_setting('ProcVirtualization')\n except exception.IloCommandNotSupportedError:\n return False\n if cpu_vt == 'Enabled':\n vt_status = True\n else:\n vt_status = False\n return vt_status",
"def IsAvailable():\n return settings.user.ui.Get('opportunities_showTemp', False)",
"def is_available() -> bool:\n # This function never throws and returns 0 if driver is missing or can't\n # be initialized\n return device_count() > 0",
"def is_available() -> bool:\n return HAVE_RLE",
"def is_on(self):\n return self._client.get_power()",
"def enable_tpu(self) -> bool:\n return pulumi.get(self, \"enable_tpu\")",
"def is_host_on(self):\n status = False\n cmd = \"/usr/local/bin/wedge_power.sh status\"\n data = run_shell_cmd(cmd)\n Logger.info(\"[FSCD Testing] Executing cmd= [{}]\".format(cmd))\n Logger.info(\"[FSCD Testing] Received data= [{}]\".format(data))\n if \"on\" in data:\n status = True\n Logger.info(\"[FSCD Testing] userver power status {}\".format(status))\n return status",
"def is_nvme(self):\n if self.server_params[-1].bdev_class.value == \"nvme\":\n return True\n return False",
"def get_server_capabilities(self):\n capabilities = {}\n system = self._get_host_details()\n capabilities['server_model'] = system['Model']\n rom_firmware_version = (\n system['Oem']['Hp']['Bios']['Current']['VersionString'])\n capabilities['rom_firmware_version'] = rom_firmware_version\n capabilities.update(self._get_ilo_firmware_version())\n capabilities.update(self._get_number_of_gpu_devices_connected())\n if self._get_tpm_capability():\n capabilities['trusted_boot'] = 'true'\n\n if self._get_cpu_virtualization():\n capabilities['cpu_vt'] = 'true'\n if self._get_nvdimm_n_status():\n capabilities['nvdimm_n'] = 'true'\n try:\n self.get_secure_boot_mode()\n capabilities['secure_boot'] = 'true'\n except exception.IloCommandNotSupportedError:\n # If an error is raised dont populate the capability\n # secure_boot\n pass\n if self._is_sriov_enabled():\n capabilities['sriov_enabled'] = 'true'\n return capabilities",
"def get_capabilities(disk):\n\n #TODO\n return \"Unknown\"",
"def available(self) -> bool:\n return pulumi.get(self, \"available\")",
"def is_system(self) -> undefined.UndefinedOr[bool]:",
"def otp_is_verified(request):\n auth = JSONWebTokenAuthentication()\n jwt_value = auth.get_jwt_value(request)\n if jwt_value is None:\n return False\n\n payload = jwt_decode_handler(jwt_value)\n persistent_id = payload.get('otp_device_id')\n\n if persistent_id:\n device = Device.from_persistent_id(persistent_id)\n if device is not None and device.user_id != request.user.id:\n return False\n # Valid device in JWT\n return True\n return False",
"def status(self):\n try:\n capabilities = []\n with manager.connect(host=netconf_server_ip,\n port=int(netconf_server_port),\n username= netconf_server_username,\n password=netconf_server_password,\n hostkey_verify=False) as m:\n\n for c in m.server_capabilities:\n capabilities.append(c)\n return capabilities\n\n except:\n return \"Can not establish connection with the server, something went wrong\"",
"def test_get_node_hardware_fast(self):\n pass",
"def is_gpu_available() -> bool:\n return torch.cuda.is_available()",
"def HasSystemd(self):\n _, stderr = self.RunCmdOnDevice(['systemctl'], quiet=True)\n return stderr == ''",
"def HasSystemd(self):\n _, stderr = self.RunCmdOnDevice(['systemctl'], quiet=True)\n return stderr == ''",
"def non_root_available(self):\n return self._adb_available and self._dev_emu",
"def isSciServerComputeEnvironment():\n if os.path.isfile(\"/home/idies/keystone.token\"):\n return True\n else:\n return False",
"def available(self):\n return True if self._device.status == \"AVAILABLE\" else False",
"def available(self):\n return self._adb_available and self._dev_emu and (self._is_root\n or self._is_su)",
"def available(self):\n if self._ser is not None:\n return self._ser.dtr\n else:\n return True",
"def available(self) -> bool:\n return self._thermostat.online",
"def _checkTorcsServer(self):\n isRunning = False\n if self.torcsServerProcess is not None:\n if self.torcsServerProcess.poll() is None:\n isRunning = True\n return isRunning",
"def olpc_xo_1():\n return os.path.exists('/etc/olpc-release') or \\\n os.path.exists('/sys/power/olpc-pm')",
"def _check_status(self):\n self.system_status_lock.acquire()\n info = self.system_status_proxy._getvalue()\n self.system_status_lock.release()\n return info",
"def is_dhcpserver_enabled(self):\n\t\treturn bool(call_sdk_function('PrlVirtNet_IsDHCPServerEnabled', self.handle))",
"def CheckKVM():\n return os.path.exists('/dev/kvm')",
"def is_system(self):\n\t\treturn self.__is_system",
"def available(self) -> bool:\n return self._product.online",
"def available(self) -> bool:\n return self._product.online",
"def etm_supported(self):\n res = self._dll.JLINKARM_ETM_IsPresent()\n if (res == 1):\n return True\n\n # JLINKARM_ETM_IsPresent() only works on ARM 7/9 devices. This\n # fallback checks if ETM is present by checking the Cortex ROM table\n # for debugging information for ETM.\n info = ctypes.c_uint32(0)\n index = enums.JLinkROMTable.ETM\n res = self._dll.JLINKARM_GetDebugInfo(index, ctypes.byref(info))\n if (res == 1):\n return False\n\n return True",
"def mwa_available():\n try:\n urllib2.urlopen(pref('ServerURL'), timeout=1)\n return True\n except urllib2.HTTPError, e:\n if str(e.code) == \"401\":\n return True\n else:\n return False\n except urllib2.URLError as err: \n return False",
"def is_tev_supported(self, tev):\n return self.get_handler_class().is_tev_supported(tev)",
"def is_server_crypto(self, username):\n try:\n user_option = super(UserOptionsManager, self).get(\n email=username, option_key=KEY_SERVER_CRYPTO)\n return bool(int(user_option.option_val))\n except UserOptions.DoesNotExist:\n raise CryptoOptionNotSetError",
"def _auth_plugin_available(ext):\n return ext.obj.available",
"def get_available(self) -> bool:\n return self._available",
"def can_reach_metadata_server():\n try:\n http_client = _obtain_http_client(hostname=_METADATA_IP)\n http_client.request('GET', '/', headers=REQUIRED_METADATA_HEADER)\n response = http_client.getresponse()\n metadata_flavor = response.getheader(_METADATA_FLAVOR_HEADER, '')\n return (response.status == http.HTTPStatus.OK and\n metadata_flavor == _METADATA_FLAVOR_VALUE)\n\n except (socket.error, http.client.HTTPException) as e:\n LOGGER.warning('Compute Engine Metadata server unreachable: %s', e)\n return False",
"def supported_capabilities(self) -> Optional['outputs.SupportedCapabilitiesResponse']:\n return pulumi.get(self, \"supported_capabilities\")",
"def check_for_tvh(conf):\n\n logging.info(\"Verificando TVHeadend\")\n\n resp = False\n\n logging.info(\"TVHeadend running\")\n try:\n req = urllib2.Request(\n \"http://\" + conf['tvheadendAddress'] + \":\" + conf['tvheadendPort'] + '/api/serverinfo')\n urllib2.urlopen(req)\n except urllib2.HTTPError as e_error:\n logging.info(\"TVHeadend com autenticação, utilize --help\")\n logging.info('Error code: %s', e_error.code)\n except urllib2.URLError as e_error:\n logging.info(\"TVHeadend nao encontrado\")\n logging.info('Reason: %s', e_error.reason)\n else:\n resp = True\n\n return resp",
"def get_host_power_status(self):\n\n data = self._get_host_details()\n return data['Power'].upper()",
"def required():\n kernel = __salt__['grains.item']('os') # pylint: disable=E0602,E0603\n\n # Disable rebooting for HDP clusters until that works reliably\n hadoop_distro = __salt__['pillar.get']('hadoop.distro') # pylint: disable=E0602,E0603\n if hadoop_distro == 'HDP':\n return False\n\n if kernel['os'] == \"CentOS\" or kernel['os'] == \"RedHat\":\n try:\n current_version = __salt__['cmd.run']('uname -r') # pylint: disable=E0602,E0603\n latest_version = __salt__['cmd.run']('rpm -q --last kernel') # pylint: disable=E0602,E0603\n latest_version = latest_version.split(\" \")\n latest_version = [\n version for version in latest_version if 'kernel' in version]\n latest_version = str(latest_version[0]).strip('kernel-') # pylint: disable=E1310\n if current_version == latest_version:\n return False\n except: # pylint: disable=W0702\n return False\n return True\n\n return __salt__['file.file_exists']('/var/run/reboot-required') # pylint: disable=E0602,E0603",
"async def is_premium(self) -> bool:\n e = await self.request.request(url=f'https://www.roblox.com/mobileapi/userinfo', method='get')\n return e['IsPremium']",
"def verify_capabilities(self, capabilities) -> bool:\n _pinfo = self.provider_features()\n not_supported = {} # type: Dict[str, Union[str, List[str]]]\n for key, val in capabilities.items():\n if isinstance(val, str):\n if val not in _pinfo.get(key, \"\"):\n not_supported[key] = val\n elif isinstance(val, bool):\n if not _pinfo.get(key) and val:\n not_supported[key] = \"\"\n elif isinstance(val, list):\n unsup = []\n for v in val:\n if v not in _pinfo.get(key, \"\"):\n unsup.append(v)\n if unsup:\n not_supported[key] = unsup\n if not_supported:\n logger.error(\n \"Server does not support the following features: %s\", not_supported\n )\n return False\n return True",
"def available(self) -> bool:\n return self._product and self._product.online",
"def device_available(self):\n return self._device_available",
"def is_capable(cls, requested_capability):\r\n for c in requested_capability:\r\n if not c in cls.capability:\r\n return False\r\n return True",
"def poll():\n try:\n self.connect(VAULT_TOKEN).sys.list_enabled_audit_devices()\n return True\n except hvac.exceptions.InternalServerError as ex:\n if str(ex).startswith('local node not active but active cluster node not found'):\n return False\n raise",
"def on_powerpc():\n return processor() == 'powerpc' or machine().startswith('ppc')",
"def is_powered(self) -> bool:\n return self.proto.is_powered",
"def get_tls(self):\n configured_value = self.charm_config[\"enable-tls\"]\n if configured_value:\n return configured_value\n return False",
"def available(self) -> bool:\n return self._device.is_online",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def test_guest_os(self):\n self.check_guest_os()",
"def gpu_availability():\n # assume if using tensorflow-gpu, then Nvidia GPU is available\n if is_built_with_cuda():\n return len(tf.config.list_physical_devices(\"GPU\")) > 0\n else:\n return False",
"def sox_check_is_available(self):\n result = self._process_command('sox -h', PIPE, supress_dry_run=True)\n return result[0] == 0",
"def check_passive(self) -> pulumi.Output[bool]:\n return pulumi.get(self, \"check_passive\")",
"def isSonyMtpAppInstaller(info):\n operations = frozenset([\n SonyMtpAppInstaller.PTP_OC_GetProxyMessageInfo,\n SonyMtpAppInstaller.PTP_OC_GetProxyMessage,\n SonyMtpAppInstaller.PTP_OC_SendProxyMessageInfo,\n SonyMtpAppInstaller.PTP_OC_SendProxyMessage,\n ])\n return info.manufacturer == SONY_MANUFACTURER and 'sony.net/SEN_PRXY_MSG:' in info.vendorExtension and operations <= info.operationsSupported",
"def test_get_node_hardware(self):\n pass",
"def is_power_limited(self):\n status = self.get_status_response()\n return ((status[1] & 0x10) == 0x10)\n #end is_power_limited()",
"def is_high_temp(self):\n status = self.get_status_response()\n return ((status[1] & 0x20) == 0x20)\n #end is_power_limited()",
"def is_http_boot_requested(node):\n http_boot_requested = (\n str(node.driver_info.get('enable_uefi_httpboot', 'false')).lower())\n return http_boot_requested == 'true'",
"def has_stp_cli(self):\n if self.is_escom_l:\n cmd = self.cli(\"show spanning-tree\")\n return \"Spanning tree enabled\" in cmd\n else:\n cmd = self.cli(\"show spanning-tree active\")\n return \" enabled \" in cmd",
"def is_available(self):\n\n return not rospy.is_shutdown()",
"def get_host_power_status(self):\n sushy_system = self._get_sushy_system()\n return GET_POWER_STATE_MAP.get(sushy_system.power_state)",
"def detect_supported_caps():\n result = []\n # generate list of supported capabilities\n\n # Intel RDT L3 CAT\n if common.PQOS_API.is_l3_cat_supported():\n result.append(common.CAT_L3_CAP)\n\n # Intel RDT L2 CAT\n if common.PQOS_API.is_l2_cat_supported():\n result.append(common.CAT_L2_CAP)\n\n # Intel RDT MBA\n if common.PQOS_API.is_mba_supported():\n result.append(common.MBA_CAP)\n\n if sstbf.is_sstbf_enabled():\n result.append(common.SSTBF_CAP)\n\n if power.is_sstcp_enabled():\n result.append(common.POWER_CAP)\n\n return result",
"def is_available(self) -> bool:\n return self.on_hand > self.warn_limit",
"def available(self) -> bool:\n is_avail = True\n if self.entity_description.available_fn is not None:\n is_avail = self.entity_description.available_fn(self._wrap_device)\n return self._api.available and is_avail"
] | [
"0.63089687",
"0.6192992",
"0.6138347",
"0.5952284",
"0.59215266",
"0.58764535",
"0.58491164",
"0.58059984",
"0.5796468",
"0.5784329",
"0.57265705",
"0.56829214",
"0.5666347",
"0.56659436",
"0.56633973",
"0.5661813",
"0.56546235",
"0.56484747",
"0.5633898",
"0.55766743",
"0.5540979",
"0.55345356",
"0.5526762",
"0.5501204",
"0.5492155",
"0.545134",
"0.5440849",
"0.5439995",
"0.5423846",
"0.54234016",
"0.5416403",
"0.54046017",
"0.5402865",
"0.5387529",
"0.5387529",
"0.538061",
"0.5376282",
"0.5372927",
"0.5335609",
"0.5321836",
"0.5320724",
"0.53116494",
"0.5304607",
"0.52920777",
"0.528959",
"0.5289149",
"0.528283",
"0.52819794",
"0.52819794",
"0.5263583",
"0.5260041",
"0.52547354",
"0.52525127",
"0.5252182",
"0.5251275",
"0.52449316",
"0.5238959",
"0.52354354",
"0.5233537",
"0.5222755",
"0.52111274",
"0.52025694",
"0.5191746",
"0.5186011",
"0.518265",
"0.51514506",
"0.5147733",
"0.51458114",
"0.5141865",
"0.51347536",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.5132476",
"0.51273084",
"0.51239747",
"0.5122666",
"0.512129",
"0.5119091",
"0.51125485",
"0.5104412",
"0.5099882",
"0.509302",
"0.5059031",
"0.5056694",
"0.5055715",
"0.5054608",
"0.504853"
] | 0.7238231 | 0 |
get cpu virtualization status. | def _get_cpu_virtualization(self):
try:
cpu_vt = self._get_bios_setting('ProcVirtualization')
except exception.IloCommandNotSupportedError:
return False
if cpu_vt == 'Enabled':
vt_status = True
else:
vt_status = False
return vt_status | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def status(self):\n if self.qemu.is_running():\n status = 0\n self.log.info(\"vm-status\", result=\"online\")\n for device in list(self.qemu.block_info().values()):\n self.log.info(\n \"disk-throttle\",\n device=device[\"device\"],\n iops=device[\"inserted\"][\"iops\"],\n )\n else:\n status = 1\n self.log.info(\"vm-status\", result=\"offline\")\n for volume in self.ceph.volumes:\n locker = volume.lock_status()\n self.log.info(\"rbd-status\", volume=volume.fullname, locker=locker)\n consul = locate_live_service(self.consul, \"qemu-\" + self.name)\n if consul:\n self.log.info(\n \"consul\", service=consul[\"Service\"], address=consul[\"Address\"]\n )\n else:\n self.log.info(\"consul\", service=\"<not registered>\")\n return status",
"def get_cpu_hvt(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetCpuHvt', self.handle)",
"def get_cpu_count(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetCpuCount', self.handle)",
"def getVirtualStatus(self,node,vmid):\n data = self.connect('get','nodes/%s/qemu/%s/status/current' % (node,vmid),None)\n return data",
"def get_cpu_mode(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetCpuMode', self.handle)",
"def VMStatus(self):\n try:\n status = self.vmInstance.get_status()\n LOGGER.info('Current status of virtual machine \"{}\": {}'.format(VM_NAME, status))\n\n except Exception as e:\n status = None\n LOGGER.debug(e)\n LOGGER.error(traceback.format_exc())\n LOGGER.error('An error occured while getting status of virtual machine \"{}\"!'.format(VM_NAME))\n\n return status",
"def get_cpu_accel_level(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetCpuAccelLevel', self.handle)",
"def get_cpu_count(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetCpuCount', self.handle)",
"def get_cpu_mode(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetCpuMode', self.handle)",
"def cpu(self) -> int:\n return pulumi.get(self, \"cpu\")",
"def cpu(self) -> Optional[Any]:\n return pulumi.get(self, \"cpu\")",
"def cpu(self) -> Optional[Any]:\n return pulumi.get(self, \"cpu\")",
"def get_cpu(self):\n pass",
"def checkCpu(self):\n cpu = self.getCpu()\n err_msg = []\n task_result = device_status = 0\n\n if cpu is None:\n err_msg.append('Get CPU info failed')\n task_result = device_status = 1\n else:\n # 以后可扩展告警条件\n pass\n return cpu, err_msg, task_result, device_status",
"def get_status(self):\n if self.vm.get_cloud_status() != \"ACTIVE\":\n return \"stopped\"\n #wait for the vm to be ready and SSH-able\n self.vm.wait_ready()\n status = self.vm.run_command(\"ctool status\", indent=0, prefix='')\n return status.strip()",
"def vcpus(self):\n return self._vcpus",
"def cpu(self):\r\n return self._cpu",
"def get_cpu_info():\n try:\n cpu_info = subprocess.check_output('lscpu')\n return cpu_info\n except OSError:\n return None",
"def get_cpu_units(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetCpuUnits', self.handle)",
"def get_cpu_limit(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetCpuLimit', self.handle)",
"def cpu():\n sin = psutil.cpu_percent()\n return round(sin / 100, 3)",
"def _compute_status(self, instance, zone):\n if self.compute_service is None:\n logging.warning('Service unavailable: unable to start GCE VM: %s (%s)',\n instance, zone)\n return\n\n info = self.compute_service.instances().get(\n project=app_identity.get_application_id(),\n instance=instance,\n zone=zone).execute()\n return info[COMPUTE_STATUS]",
"def get_cpu_model(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetCpuModel', self.handle)",
"def getcpuspeed():\n f = os.popen(\"/opt/vc/bin/vcgencmd get_config arm_freq\")\n cpu = f.read()\n return cpu",
"def get_cpu_usage(self):\n\t\treturn call_sdk_function('PrlStatCpu_GetCpuUsage', self.handle)",
"def getCpu(self):\n # todo: 完善不同设备获取信息的方法\n cpu = None\n if self.type in ['E', 'T', 'S', 'K', 'A', 'AX', 'W']:\n m = \"Current cpu utilization :\\s*([\\d\\.]+)%\"\n rt = re.search(m, self.dut.cli(\"show cpu\"))\n if rt:\n cpu = float(rt.groups()[0])\n return cpu",
"def cpu_count_logical():\n return cext.cpu_count_logical()",
"def getcpuusage(self):\n return ord(self.reg(0x11, write=1))",
"def cpuInfo(self, json, i3status_config):\n response = {'full_text': '', 'name': 'cpu_usage'}\n cpu_total, cpu_idle = self.data.cpu()\n used_cpu_percent = 1 - float(cpu_idle-self.cpu_idle)/float(cpu_total-self.cpu_total)\n self.cpu_total = cpu_total\n self.cpu_idle = cpu_idle\n\n \"\"\"\n if used_cpu_percent <= 40/100.0:\n response['color'] = i3status_config['color_good']\n elif used_cpu_percent <= 75/100.0:\n response['color'] = i3status_config['color_degraded']\n else:\n response['color'] = i3status_config['color_bad']\n \"\"\"\n response['color'] = \"#6c71c4\"\n #cpu temp\n CPUTEMP=False\n if CPUTEMP:\n cputemp=subprocess.check_output('sensors | grep \"CPU Temp\" | cut -f 2 -d \"+\" | cut -f 1 -d \" \"',shell=True)\n cputemp=cputemp[:-1].decode('utf-8')\n response['full_text'] = \" %.2f%%\" % (used_cpu_percent*100) +\" \"+cputemp\n else:\n \tresponse['full_text'] = \" %.2f%%\" % (used_cpu_percent*100)\n\n #cache the status for 10 seconds\n response['cached_until'] = time() + 10\n\n return (0, response)",
"def get_online():\n print( \"Online CPUs:\" + \"\".join( f\" {cpu}\" for cpu in _cpu.get_online_cpus() ) )",
"async def get_status(self) -> str:\n return await self.hw_device.status()",
"def get_status(self):\n return self.read_register(259, 0, 3)",
"def _getvmstat(self):\n\n vmstat_cmd = \"/usr/bin/vmstat -s\"\n\n (retval, output) = utils.safe_getstatusoutput( vmstat_cmd )\n\n if retval != 0:\n log.log( \"<system>system._getvmstat(): error calling '%s'\"%(vmstat_cmd), 5 )\n return None\n\n vmstat_dict = {}\n\n for l in string.split( output, '\\n' ):\n if string.find( l, 'swap ins' ) != -1:\n vmstat_dict['ctr_swap_ins'] = long(string.split(l)[0])\n elif string.find( l, 'swap outs' ) != -1:\n vmstat_dict['ctr_swap_outs'] = long(string.split(l)[0])\n elif string.find( l, 'pages swapped in' ) != -1:\n vmstat_dict['ctr_pages_swapped_in'] = long(string.split(l)[0])\n elif string.find( l, 'pages swapped out' ) != -1:\n vmstat_dict['ctr_pages_swapped_out'] = long(string.split(l)[0])\n elif string.find( l, 'total address trans. faults taken' ) != -1:\n vmstat_dict['ctr_total_address_trans_faults_taken'] = long(string.split(l)[0])\n elif string.find( l, 'page ins' ) != -1:\n vmstat_dict['ctr_page_ins'] = long(string.split(l)[0])\n elif string.find( l, 'page outs' ) != -1:\n vmstat_dict['ctr_page_outs'] = long(string.split(l)[0])\n elif string.find( l, 'pages paged in' ) != -1:\n vmstat_dict['ctr_pages_paged_in'] = long(string.split(l)[0])\n elif string.find( l, 'pages paged out' ) != -1:\n vmstat_dict['ctr_pages_paged_out'] = long(string.split(l)[0])\n elif string.find( l, 'reclaims from free list' ) != -1:\n vmstat_dict['ctr_reclaims_from_free_list'] = long(string.split(l)[0])\n elif string.find( l, 'total page reclaims' ) != -1:\n vmstat_dict['ctr_total_page_reclaims'] = long(string.split(l)[0])\n elif string.find( l, 'intransit blocking page faults' ) != -1:\n vmstat_dict['ctr_intransit_blocking_page_faults'] = long(string.split(l)[0])\n elif string.find( l, 'zero fill pages created' ) != -1:\n vmstat_dict['ctr_zero_fill_pages_created'] = long(string.split(l)[0])\n elif string.find( l, 'zero fill page faults' ) != -1:\n vmstat_dict['ctr_zero_fill_page_faults'] = long(string.split(l)[0])\n elif string.find( l, 'executable fill pages created' ) != -1:\n vmstat_dict['ctr_executable_fill_pages_created'] = long(string.split(l)[0])\n elif string.find( l, 'executable fill page faults' ) != -1:\n vmstat_dict['ctr_executable_fill_page_faults'] = long(string.split(l)[0])\n elif string.find( l, 'swap text pages found in free list' ) != -1:\n vmstat_dict['ctr_swap_text_pages_found_in_free_list'] = long(string.split(l)[0])\n elif string.find( l, 'inode text pages found in free list' ) != -1:\n vmstat_dict['ctr_inode_text_pages_found_in_free_list'] = long(string.split(l)[0])\n elif string.find( l, 'revolutions of the clock hand' ) != -1:\n vmstat_dict['ctr_revolutions_of_the_clock_hand'] = long(string.split(l)[0])\n elif string.find( l, 'pages scanned for page out' ) != -1:\n vmstat_dict['ctr_pages_scanned_for_page_out'] = long(string.split(l)[0])\n elif string.find( l, 'pages freed by the clock daemon' ) != -1:\n vmstat_dict['ctr_pages_freed_by_the_clock_daemon'] = long(string.split(l)[0])\n elif string.find( l, 'cpu context switches' ) != -1:\n vmstat_dict['ctr_cpu_context_switches'] = long(string.split(l)[0])\n elif string.find( l, 'device interrupts' ) != -1:\n vmstat_dict['ctr_device_interrupts'] = long(string.split(l)[0])\n elif string.find( l, 'traps' ) != -1:\n vmstat_dict['ctr_traps'] = long(string.split(l)[0])\n elif string.find( l, 'system calls' ) != -1:\n vmstat_dict['ctr_system_calls'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Successes for Page size 4K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Successes_for_Page_size_4K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Successes for Page size 16K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Successes_for_Page_size_16K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Successes for Page size 64K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Successes_for_Page_size_64K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Successes for Page size 256K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Successes_for_Page_size_256K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Failures for Page size 16K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Failures_for_Page_size_16K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Failures for Page size 64K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Failures_for_Page_size_64K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Failures for Page size 256K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Failures_for_Page_size_256K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Allocate Successes for Page size 4K' ) != -1:\n vmstat_dict['ctr_Page_Allocate_Successes_for_Page_size_4K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Allocate Successes for Page size 16K' ) != -1:\n vmstat_dict['ctr_Page_Allocate_Successes_for_Page_size_16K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Allocate Successes for Page size 64K' ) != -1:\n vmstat_dict['ctr_Page_Allocate_Successes_for_Page_size_64K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Allocate Successes for Page size 256K' ) != -1:\n vmstat_dict['ctr_Page_Allocate_Successes_for_Page_size_256K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Allocate Successes for Page size 64M' ) != -1:\n vmstat_dict['ctr_Page_Allocate_Successes_for_Page_size_64M'] = long(string.split(l)[0])\n elif string.find( l, 'Page Demotions for Page size 16K' ) != -1:\n vmstat_dict['ctr_Page_Demotions_for_Page_size_16K'] = long(string.split(l)[0])\n\n return vmstat_dict",
"def GetCpuStats(self, pid):\n class ProcTaskInfo(ctypes.Structure):\n \"\"\"Struct for proc_pidinfo() call.\"\"\"\n _fields_ = [(\"pti_virtual_size\", ctypes.c_uint64),\n (\"pti_resident_size\", ctypes.c_uint64),\n (\"pti_total_user\", ctypes.c_uint64),\n (\"pti_total_system\", ctypes.c_uint64),\n (\"pti_threads_user\", ctypes.c_uint64),\n (\"pti_threads_system\", ctypes.c_uint64),\n (\"pti_policy\", ctypes.c_int32),\n (\"pti_faults\", ctypes.c_int32),\n (\"pti_pageins\", ctypes.c_int32),\n (\"pti_cow_faults\", ctypes.c_int32),\n (\"pti_messages_sent\", ctypes.c_int32),\n (\"pti_messages_received\", ctypes.c_int32),\n (\"pti_syscalls_mach\", ctypes.c_int32),\n (\"pti_syscalls_unix\", ctypes.c_int32),\n (\"pti_csw\", ctypes.c_int32),\n (\"pti_threadnum\", ctypes.c_int32),\n (\"pti_numrunning\", ctypes.c_int32),\n (\"pti_priority\", ctypes.c_int32)]\n PROC_PIDTASKINFO = 4\n def __init__(self):\n self.size = ctypes.sizeof(self)\n super(ProcTaskInfo, self).__init__() # pylint: disable=bad-super-call\n\n proc_info = ProcTaskInfo()\n if not self.libproc:\n self.libproc = ctypes.CDLL(ctypes.util.find_library('libproc'))\n self.libproc.proc_pidinfo(pid, proc_info.PROC_PIDTASKINFO, 0,\n ctypes.byref(proc_info), proc_info.size)\n\n # Convert nanoseconds to seconds.\n cpu_time = (proc_info.pti_total_user / 1000000000.0 +\n proc_info.pti_total_system / 1000000000.0)\n results = {'CpuProcessTime': cpu_time,\n 'ContextSwitches': proc_info.pti_csw}\n\n # top only reports idle wakeup count starting from OS X 10.9.\n if self.GetOSVersionName() >= os_version_module.MAVERICKS:\n results.update({'IdleWakeupCount': self._GetIdleWakeupCount(pid)})\n return results",
"def cpuinfo(self):\n \n command = 'cat /proc/cpuinfo'\n\tpipe = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n stdout, stderr = pipe.communicate()\n\tinfo = stdout.strip()\n cpu_type = None\n\tn_proc = 0\n\tfor line in info.split('\\n'):\n if 'model name' in line:\n\t n_proc += 1\n if cpu_type is None:\n\t\t cpu_type = ' '.join(line.split(':')[-1].strip().split())\n\t\n\treturn (cpu_type, n_proc)",
"def cpu_count(self) -> pulumi.Output[int]:\n return pulumi.get(self, \"cpu_count\")",
"def update(self):\n self.cpus_current = psutil.cpu_percent(percpu=True)\n assert len(self.cpus_current) == len(self.cpus)\n return self.cpus_current",
"def eval_cpuset():\n\tnum_cpu = run('grep -c ^processor /proc/cpuinfo',quiet=True,warn_only=True)\n\tprint(red('Number of cpus : \\t'+num_cpu))",
"def get_cpu_stat(self, nIndex):\n\t\treturn handle_to_object(call_sdk_function('PrlStat_GetCpuStat', self.handle, nIndex))",
"def get_provisioning_state(self):\n url = \"/api/v1/machine/{}\".format(self.machine_id)\n return self.urlhandler.get(url)",
"def cpu_online_map():\r\n cpuinfo = get_cpuinfo()\r\n cpus = []\r\n for cpu in cpuinfo:\r\n cpus.append(cpu['processor']) # grab cpu number\r\n return cpus",
"def _check_status(self):\n self.system_status_lock.acquire()\n info = self.system_status_proxy._getvalue()\n self.system_status_lock.release()\n return info",
"async def sysinfo(self, ctx: Context):\n\t\tstart = time.perf_counter()\n\t\tend = time.perf_counter()\n\t\tduration = (end - start) * 1000\n\t\tcpuavg = psutil.cpu_percent(interval=None)\n\t\tmem = psutil.virtual_memory()[2]\n\t\tdurround = round(duration, 3)\n\t\tosun = os.uname()\n\t\tawait self.send(f\"System Info | CPU: {cpuavg}% | RAM: {mem}% | Latency: {durround * 1000}ms | OS: {sys.platform}\", whisper=[ctx.author.id])",
"def get_cpu_usage():\n return psutil.cpu_percent()",
"def get_cpu_use():\n cpu_cent = psutil.cpu_percent()\n return str(cpu_cent)",
"def get_isolate_cpus(self):\n\n command = \"cat /proc/cpuinfo | grep processor | awk '{print $NF}'\"\n out = run_and_getout(command)\n str_out = out.decode(self.default_code).replace('\\n', ' ').strip()\n str_out = str(str_out)\n if str_out[0] == \"0\":\n return str_out[2:]\n else:\n return str_out",
"def get_status(self):\n return self.o.read_register(self.dev_id, STATUS)",
"def allocated_cpu(self):\n return self._allocated_cpu",
"def get_cpu_speed(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetCpuSpeed', self.handle)",
"def cpu(self) -> List[float]:\n return list(map(attrgetter(\"cpu\"), self.stats))",
"def CPUStats(cls):\n\t\t# From <http://ubuntuforums.org/showthread.php?t=148781>\n\t\ttime_list = cat(\"/proc/stat\").split(\"\\n\")[0].split(\" \")[2:6]\n\t\tres = map(int, time_list)\n\t\tcls.LAST_CPU_STAT = res\n\t\treturn res",
"def update_cpu(self, vm):\n try:\n cpu_spec = self.client.get_cpu(vm.backend_id)\n if (\n cpu_spec['cores_per_socket'] != vm.cores_per_socket\n or cpu_spec['count'] != vm.cores\n ):\n self.client.update_cpu(\n vm.backend_id,\n {\n 'cores_per_socket': vm.cores_per_socket,\n 'count': vm.cores,\n },\n )\n except VMwareError as e:\n raise VMwareBackendError(e)",
"def get_cpu_usage():\n process_details = RU_OBJ.get_curr_processes()\n return json.dumps(sorted(process_details, key=lambda k: k['name']))",
"def status(name='default'):\n machine_states = dict(_status())\n return machine_states[name]",
"def test_vm_cpu_limitation_after_cpu_hot_plug(self):\n host = ll_vms.get_vm_host(vm_name=conf.QOS_VMS[0])\n host_cpu = ll_hosts.get_host_processing_units_number(host_name=host)\n testflow.step(\"Hotplug CPU to VM %s\", conf.QOS_VMS[0])\n vm_cpu_sockets = min(8, host_cpu)\n assert ll_vms.updateVm(\n positive=True, vm=conf.QOS_VMS[0], cpu_socket=vm_cpu_sockets\n )\n expected_dict = self.calculate_expected_values(\n load_dict=self.load_dict\n )\n assert sla_helpers.load_vm_and_check_the_load(\n load_dict=self.load_dict, expected_values=expected_dict\n )",
"def get_cpus():\n\n # Get the list of offline CPU cores\n offline_cpus = subprocess.check_output(\n \"lscpu | grep '^Off-line CPU(s) list:' | awk -F: '{print $2}'\",\n shell=True\n ).strip().decode()\n\n # Get the number of total CPU cores\n total_cpus = subprocess.check_output(\n \"lscpu | grep '^CPU(s):' | awk '{print $2}'\",\n shell=True\n ).strip().decode()\n\n return total_cpus, offline_cpus",
"def node_num_cpu(self) -> int:\n stdout, _, _ = RunKubectlCommand(\n ['get', 'nodes', '-o', 'jsonpath={.items[0].status.capacity.cpu}'])\n return int(stdout)",
"def get_status(self):\n\n return self._system",
"def cpus(self):\n return self.__cpus",
"def calculate_score_vm(self, vm):\n vm_cpu_utilization = self.ceilometer. \\\n statistic_aggregation(\n resource_id=vm.uuid,\n meter_name=self.INSTANCE_CPU_USAGE_METRIC_NAME,\n period=\"7200\",\n aggregate='avg'\n )\n if vm_cpu_utilization is None:\n LOG.error(\n _LE(\"No values returned by %(resource_id)s \"\n \"for %(metric_name)s\"),\n resource_id=vm.uuid,\n metric_name=self.INSTANCE_CPU_USAGE_METRIC_NAME,\n )\n vm_cpu_utilization = 100\n\n cpu_capacity = self.model.get_resource_from_id(\n resource.ResourceType.cpu_cores).get_capacity(vm)\n\n total_cores_used = cpu_capacity * (vm_cpu_utilization / 100.0)\n\n return self.calculate_weight(vm, total_cores_used, 0, 0)",
"def status(self, arguments):\n instance_name = arguments['<instance>']\n instance_name = self.activate(instance_name)\n\n vmrun = VMrun(self.vmx, user=self.user, password=self.password)\n\n box_name = self.box_name\n lookup = self.get(\"enable_ip_lookup\", False)\n ip = vmrun.getGuestIPAddress(wait=False, quiet=True, lookup=lookup)\n state = vmrun.checkToolsState(quiet=True)\n\n print(\"Current machine states:\" + os.linesep)\n if ip is None:\n ip = \"poweroff\"\n elif not ip:\n ip = \"unknown\"\n print(\"%s\\t%s\\t(VMware Tools %s)\" % (box_name, ip, state))\n\n if ip == \"poweroff\":\n print(os.linesep + \"The VM is powered off. To restart the VM, simply run `mech up`\")\n elif ip == \"unknown\":\n print(os.linesep + \"The VM is on. but it has no IP to connect to, VMware Tools must be installed\")\n elif state in (\"installed\", \"running\"):\n print(os.linesep + \"The VM is ready. Connect to it using `mech ssh`\")",
"def get_vm_status(self, device='FLOPPY'):\n response, vm_device_uri = self._get_vm_device_status(device)\n\n # Create RIBCL equivalent response\n # RIBCL provides this data in VM status\n # VM_APPLET = CONNECTED | DISCONNECTED\n # DEVICE = FLOPPY | CDROM\n # BOOT_OPTION = BOOT_ALWAYS | BOOT_ONCE | NO_BOOT\n # WRITE_PROTECT = YES | NO\n # IMAGE_INSERTED = YES | NO\n response_data = {}\n\n if response.get('WriteProtected', False):\n response_data['WRITE_PROTECT'] = 'YES'\n else:\n response_data['WRITE_PROTECT'] = 'NO'\n\n if response.get('BootOnNextServerReset', False):\n response_data['BOOT_OPTION'] = 'BOOT_ONCE'\n else:\n response_data['BOOT_OPTION'] = 'BOOT_ALWAYS'\n\n if response.get('Inserted', False):\n response_data['IMAGE_INSERTED'] = 'YES'\n else:\n response_data['IMAGE_INSERTED'] = 'NO'\n\n if response.get('ConnectedVia') == 'NotConnected':\n response_data['VM_APPLET'] = 'DISCONNECTED'\n # When media is not connected, it's NO_BOOT\n response_data['BOOT_OPTION'] = 'NO_BOOT'\n else:\n response_data['VM_APPLET'] = 'CONNECTED'\n\n response_data['IMAGE_URL'] = response['Image']\n response_data['DEVICE'] = device\n\n # FLOPPY cannot be a boot device\n if ((response_data['BOOT_OPTION'] == 'BOOT_ONCE') and\n (response_data['DEVICE'] == 'FLOPPY')):\n response_data['BOOT_OPTION'] = 'NO_BOOT'\n\n return response_data",
"def get_state(self):\n\t\treturn call_sdk_function('PrlVmInfo_GetState', self.handle)",
"def cpu_info(node):\n\n cpu = CpuUtils.get_cpu_info_per_node(node)\n\n item = \"Model name\"\n if item in cpu:\n print(\"{:>20}: {}\".format(item, cpu[item]))\n item = \"CPU(s)\"\n if item in cpu:\n print(\"{:>20}: {}\".format(item, cpu[item]))\n item = \"Thread(s) per core\"\n if item in cpu:\n print(\"{:>20}: {}\".format(item, cpu[item]))\n item = \"Core(s) per socket\"\n if item in cpu:\n print(\"{:>20}: {}\".format(item, cpu[item]))\n item = \"Socket(s)\"\n if item in cpu:\n print(\"{:>20}: {}\".format(item, cpu[item]))\n item = \"NUMA node(s)\"\n numa_nodes = 0\n if item in cpu:\n numa_nodes = int(cpu[item])\n for i in range(0, numa_nodes):\n item = \"NUMA node{} CPU(s)\".format(i)\n print(\"{:>20}: {}\".format(item, cpu[item]))\n item = \"CPU max MHz\"\n if item in cpu:\n print(\"{:>20}: {}\".format(item, cpu[item]))\n item = \"CPU min MHz\"\n if item in cpu:\n print(\"{:>20}: {}\".format(item, cpu[item]))\n\n if node[\"cpu\"][\"smt_enabled\"]:\n smt = \"Enabled\"\n else:\n smt = \"Disabled\"\n print(\"{:>20}: {}\".format(\"SMT\", smt))\n\n # VPP Threads\n print(\"\\nVPP Threads: (Name: Cpu Number)\")\n vpp_processes = cpu[\"vpp_processes\"]\n for i in vpp_processes.items():\n print(\" {:10}: {:4}\".format(i[0], i[1]))",
"def cpu(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cpu\")",
"def cpu(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cpu\")",
"def cpu(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cpu\")",
"def microvm(self):\n return self._context.get(\"microvm\", None)",
"async def psutil(self):\n\n # CPU\n cpu_cs = (\"CPU Count\"\n \"\\n\\t{0:<9}: {1:>2}\".format(\"Physical\", psutil.cpu_count(logical=False)) +\n \"\\n\\t{0:<9}: {1:>2}\".format(\"Logical\", psutil.cpu_count()))\n psutil.cpu_percent(interval=None, percpu=True)\n await asyncio.sleep(1)\n cpu_p = psutil.cpu_percent(interval=None, percpu=True)\n cpu_ps = (\"CPU Usage\"\n \"\\n\\t{0:<8}: {1}\".format(\"Per CPU\", cpu_p) +\n \"\\n\\t{0:<8}: {1:.1f}%\".format(\"Overall\", sum(cpu_p)/len(cpu_p)))\n cpu_t = psutil.cpu_times()\n width = max([len(\"{:,}\".format(int(n))) for n in [cpu_t.user, cpu_t.system, cpu_t.idle]])\n cpu_ts = (\"CPU Times\"\n \"\\n\\t{0:<7}: {1:>{width},}\".format(\"User\", int(cpu_t.user), width=width) +\n \"\\n\\t{0:<7}: {1:>{width},}\".format(\"System\", int(cpu_t.system), width=width) +\n \"\\n\\t{0:<7}: {1:>{width},}\".format(\"Idle\", int(cpu_t.idle), width=width))\n\n # Memory\n mem_v = psutil.virtual_memory()\n width = max([len(self._size(n)) for n in [mem_v.total, mem_v.available, (mem_v.total - mem_v.available)]])\n mem_vs = (\"Virtual Memory\"\n \"\\n\\t{0:<10}: {1:>{width}}\".format(\"Total\", self._size(mem_v.total), width=width) +\n \"\\n\\t{0:<10}: {1:>{width}}\".format(\"Available\", self._size(mem_v.available), width=width) +\n \"\\n\\t{0:<10}: {1:>{width}} {2}%\".format(\"Used\", self._size(mem_v.total - mem_v.available),\n mem_v.percent, width=width))\n mem_s = psutil.swap_memory()\n width = max([len(self._size(n)) for n in [mem_s.total, mem_s.free, (mem_s.total - mem_s.free)]])\n mem_ss = (\"Swap Memory\"\n \"\\n\\t{0:<6}: {1:>{width}}\".format(\"Total\", self._size(mem_s.total), width=width) +\n \"\\n\\t{0:<6}: {1:>{width}}\".format(\"Free\", self._size(mem_s.free), width=width) +\n \"\\n\\t{0:<6}: {1:>{width}} {2}%\".format(\"Used\", self._size(mem_s.total - mem_s.free),\n mem_s.percent, width=width))\n\n # Open files\n open_f = psutil.Process().open_files()\n open_fs = \"Open File Handles\\n\\t\"\n if open_f:\n common = os.path.commonpath([f.path for f in open_f])\n if hasattr(open_f[0], \"mode\"):\n open_fs += \"\\n\\t\".join([\"{0} [{1}]\".format(f.path.replace(common, '.'), f.mode) for f in open_f])\n else:\n open_fs += \"\\n\\t\".join([\"{0}\".format(f.path.replace(common, '.')) for f in open_f])\n else:\n open_fs += \"None\"\n\n # Disk usage\n disk_u = psutil.disk_usage(os.path.sep)\n width = max([len(self._size(n)) for n in [disk_u.total, disk_u.free, disk_u.used]])\n disk_us = (\"Disk Usage\"\n \"\\n\\t{0:<6}: {1:>{width}}\".format(\"Total\", self._size(disk_u.total), width=width) +\n \"\\n\\t{0:<6}: {1:>{width}}\".format(\"Free\", self._size(disk_u.free), width=width) +\n \"\\n\\t{0:<6}: {1:>{width}} {2}%\".format(\"Used\", self._size(disk_u.used),\n disk_u.percent, width=width))\n\n # Network\n net_io = psutil.net_io_counters()\n width = max([len(self._size(n)) for n in [net_io.bytes_sent, net_io.bytes_recv]])\n net_ios = (\"Network\"\n \"\\n\\t{0:<11}: {1:>{width}}\".format(\"Bytes sent\", self._size(net_io.bytes_sent), width=width) +\n \"\\n\\t{0:<11}: {1:>{width}}\".format(\"Bytes recv\", self._size(net_io.bytes_recv), width=width))\n\n # Boot time\n boot_s = (\"Boot Time\"\n \"\\n\\t{0}\".format(datetime.fromtimestamp(\n psutil.boot_time()).strftime(\"%Y-%m-%d %H:%M:%S\")))\n\n await self.bot.say(\"```\" +\n \"\\n\\n\".join([cpu_cs, cpu_ps, cpu_ts, mem_vs, mem_ss, open_fs, disk_us, net_ios, boot_s]) +\n \"```\")\n\n return",
"def get_total_n_cpu(self) -> int:",
"def getContainerStatus(self,node,vmid):\n data = self.connect('get','nodes/%s/lxc/%s/status/current' % (node,vmid),None)\n return data",
"def get_cpu_core():\n processor_info = subprocess.getoutput('dmidecode -t processor')\n cpu_core_value = re.findall(r'(?i)Core Count:\\s+(.*?)\\n', processor_info, re.S)[0]\n log.info('cpu_core value:{}'.format(cpu_core_value))\n if cpu_core_value:\n cpu_core = cpu_core_value\n else:\n cpu_core = ''\n return cpu_core",
"def status(self):\n return self.microblaze.state",
"def list_cpus():\n online_cpus = osutil.get_online_cpus()\n offline_cpus = POSSIBLE_CPUS - online_cpus\n print(\"Online: CPU \", shorten_cores(online_cpus))\n print(\"Offline: CPU \", shorten_cores(offline_cpus))",
"def get_cpu_usage(conn):\n prev_idle = 0\n prev_total = 0\n cpu = conn.getCPUStats(-1, 0)\n if type(cpu) == dict:\n for num in range(2):\n idle = list(conn.getCPUStats(-1, 0).values())[1]\n total = sum(list(conn.getCPUStats(-1, 0).values()))\n diff_idle = idle - prev_idle\n diff_total = total - prev_total\n diff_usage = (1000 * (diff_total - diff_idle) / diff_total + 5) / 10\n prev_total = total\n prev_idle = idle\n if num == 0:\n time.sleep(1)\n else:\n if diff_usage < 0:\n diff_usage = 0\n else:\n return {'usage': None}\n return {'usage': diff_usage}",
"def core_cpu(self):\n return self._dll.JLINKARM_CORE_GetFound()",
"def _getSystemUtil(self) -> float:\n\t\treturn psutil.virtual_memory().percent",
"def _do_get_status(self):\n logging.info(__name__ + ' : Get status of the device.')\n result = self._execute('X')\n usage = {\n 0: \"Channel not in use\",\n 1: \"Channel used for Nitrogen level\",\n 2: \"Channel used for Helium Level (Normal pulsed operation)\",\n 3: \"Channel used for Helium Level (Continuous measurement)\",\n 9: \"Error on channel (Usually means probe unplugged)\"\n }\n # current_flowing = {\n # 0 : \"Curent not flowing in Helium Probe Wire\",\n # 1 : \"Curent not flowing in Helium Probe Wire\"\n # }\n # auto_fill_status = {\n # 00 : \"End Fill (Level > FULL)\",\n # 01 : \"Not Filling (Level < FULL, Level > FILL)\",\n # 10 : \"Filling (Level < FULL, Level > FILL)\",\n # 11 : \"Start Filling (Level < FILL)\"\n # }\n return usage.get(int(result[1]), \"Unknown\")",
"def get_state(self):\n\t\treturn Job(SDK.PrlVm_GetState(self.handle)[0])",
"def virtual_machine(self) -> pulumi.Output['outputs.VirtualMachineResponse']:\n return pulumi.get(self, \"virtual_machine\")",
"def test_get_virtual_machine_count_metrics(self):\n pass",
"def status(self) -> VacuumStatus:\n return VacuumStatus(self.send(\"get_status\")[0])",
"def is_cpu_vtx_enabled(self):\n\t\treturn bool(call_sdk_function('PrlVmCfg_IsCpuVtxEnabled', self.handle))",
"def status(self) -> pulumi.Output['outputs.VirtualHardDiskStatusResponse']:\n return pulumi.get(self, \"status\")",
"def cpu_usage():\n return str(_cpu_usage())",
"def _apply_vpp_cpu(node):\n\n # Get main core\n cpu = \"\\n\"\n if \"vpp_main_core\" in node[\"cpu\"]:\n vpp_main_core = node[\"cpu\"][\"vpp_main_core\"]\n else:\n vpp_main_core = 0\n if vpp_main_core != 0:\n cpu += \" main-core {}\\n\".format(vpp_main_core)\n\n # Get workers\n vpp_workers = node[\"cpu\"][\"vpp_workers\"]\n vpp_worker_len = len(vpp_workers)\n if vpp_worker_len > 0:\n vpp_worker_str = \"\"\n for i, worker in enumerate(vpp_workers):\n if i > 0:\n vpp_worker_str += \",\"\n if worker[0] == worker[1]:\n vpp_worker_str += \"{}\".format(worker[0])\n else:\n vpp_worker_str += \"{}-{}\".format(worker[0], worker[1])\n\n cpu += \" corelist-workers {}\\n\".format(vpp_worker_str)\n\n return cpu",
"def _get_system_hardware(self):\n return self._get_system_status()[\"hardware\"]",
"def get_cpu_percent():\n return psutil.cpu_percent(interval=1, percpu=True)",
"def cpu_count():\r\n if mp is None:\r\n return 1\r\n return mp.cpu_count()",
"def cpu_count(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"cpu_count\")",
"def status(self):\n ret = self.dev.ctrl_transfer(0xc0, 0x01, 0x0081, 0x0000, 0x0001)\n if ret[0] == 0xa0:\n return self.POWER_ON\n return self.POWER_OFF",
"def cpu_time(self):",
"def get_cpu_count():\n\n # #Check nproc. I have found it respecting the visible CPUs in SLURM:\n # try:\n # m = subprocess.run(['nproc'], stdout=subprocess.PIPE)\n # if m:\n # res = int(m.stdout.decode('ascii').replace(\"\\n\", \"\"))\n # if res > 0:\n # return res\n # except:\n # pass\n \n\n # cpuset\n # cpuset may restrict the number of *available* processors\n try:\n m = re.search(r'(?m)^Cpus_allowed:\\s*(.*)$',\n open('/proc/self/status').read())\n if m:\n res = bin(int(m.group(1).replace(',', ''), 16)).count('1')\n if res > 0:\n return res\n except IOError:\n pass\n\n # Python 2.6+\n try:\n import multiprocessing\n return multiprocessing.cpu_count()\n except (ImportError, NotImplementedError):\n pass\n\n # https://github.com/giampaolo/psutil\n try:\n import psutil\n return psutil.cpu_count() # psutil.NUM_CPUS on old versions\n except (ImportError, AttributeError):\n pass\n\n # POSIX\n try:\n res = int(os.sysconf('SC_NPROCESSORS_ONLN'))\n\n if res > 0:\n return res\n except (AttributeError, ValueError):\n pass\n\n # Windows\n try:\n res = int(os.environ['NUMBER_OF_PROCESSORS'])\n\n if res > 0:\n return res\n except (KeyError, ValueError):\n pass\n\n # jython\n try:\n from java.lang import Runtime\n runtime = Runtime.getRuntime()\n res = runtime.availableProcessors()\n if res > 0:\n return res\n except ImportError:\n pass\n\n # BSD\n try:\n sysctl = subprocess.Popen(['sysctl', '-n', 'hw.ncpu'],\n stdout=subprocess.PIPE)\n scStdout = sysctl.communicate()[0]\n res = int(scStdout)\n\n if res > 0:\n return res\n except (OSError, ValueError):\n pass\n\n # Linux\n try:\n res = open('/proc/cpuinfo').read().count('processor\\t:')\n\n if res > 0:\n return res\n except IOError:\n pass\n\n # Solaris\n try:\n pseudoDevices = os.listdir('/devices/pseudo/')\n res = 0\n for pd in pseudoDevices:\n if re.match(r'^cpuid@[0-9]+$', pd):\n res += 1\n\n if res > 0:\n return res\n except OSError:\n pass\n\n # Other UNIXes (heuristic)\n try:\n try:\n dmesg = open('/var/run/dmesg.boot').read()\n except IOError:\n dmesgProcess = subprocess.Popen(['dmesg'], stdout=subprocess.PIPE)\n dmesg = dmesgProcess.communicate()[0]\n\n res = 0\n while '\\ncpu' + str(res) + ':' in dmesg:\n res += 1\n\n if res > 0:\n return res\n except OSError:\n pass\n\n raise Exception('Can not determine number of CPUs on this system')",
"def getCpuNum(self):\n return len(psutil.cpu_percent(interval=None, percpu=True))",
"def status(vm='', env=''):\n local( main_dir + '/vagrant/bin/vm.sh status ' + str(vm) + ' ' + str(env) )",
"def read_status(ctl):\n\tr = ctl.bus_read_struct_coherent(tm.status_addr, 'BBBBI')\n\treturn r",
"def __call__(self):\n status = self.os.popen('circusctl status monitor').read().strip()\n\n if status == 'active':\n return True\n elif status == 'stopped':\n return False",
"def health():\n return jsonify(hostname=hostname, uptime=uptime(), \\\n cpu_percent=int(cpu_percent(interval=None, percpu=False)))",
"def cpu_info():\n \n with open(Path.proc_cpuinfo()) as f:\n cpuinfo = {'processor_count': 0}\n for line in f:\n if ':' in line:\n fields = line.replace('\\t', '').strip().split(': ')\n # count processores and filter out core specific items\n if fields[0] == 'processor':\n cpuinfo['processor_count'] += 1\n elif fields[0] != 'core id':\n try:\n cpuinfo[fields[0]] = fields[1]\n except IndexError:\n pass\n return cpuinfo",
"def required_vcpu_minimum(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"required_vcpu_minimum\")"
] | [
"0.7207931",
"0.71718997",
"0.70512587",
"0.7005931",
"0.696061",
"0.67730594",
"0.66261756",
"0.66120964",
"0.6603125",
"0.65707415",
"0.655431",
"0.655431",
"0.65318954",
"0.6502956",
"0.6492939",
"0.647564",
"0.64564735",
"0.630386",
"0.62673694",
"0.62613034",
"0.6251442",
"0.624607",
"0.6228366",
"0.6196415",
"0.615268",
"0.6134088",
"0.61061525",
"0.610488",
"0.60846835",
"0.60351473",
"0.60218656",
"0.59811723",
"0.5979067",
"0.5973756",
"0.59583324",
"0.5931663",
"0.59193885",
"0.59011024",
"0.5900305",
"0.58953774",
"0.5886048",
"0.5876964",
"0.58693266",
"0.5854472",
"0.58518565",
"0.58412445",
"0.58342063",
"0.5831814",
"0.5819918",
"0.5807916",
"0.57965434",
"0.57880974",
"0.5768298",
"0.5748416",
"0.5744126",
"0.57405365",
"0.5736707",
"0.572841",
"0.5728273",
"0.571855",
"0.57152784",
"0.57127345",
"0.57036334",
"0.57006645",
"0.57000005",
"0.57000005",
"0.57000005",
"0.5689029",
"0.5676619",
"0.56747675",
"0.5671993",
"0.56636757",
"0.56515276",
"0.5636127",
"0.56353486",
"0.5634801",
"0.5631576",
"0.56276244",
"0.56187785",
"0.5604476",
"0.5601905",
"0.55998695",
"0.5593807",
"0.55561966",
"0.55392843",
"0.5534789",
"0.5532902",
"0.553051",
"0.5525095",
"0.55239",
"0.5521557",
"0.5521445",
"0.5516894",
"0.55137527",
"0.55110943",
"0.54921764",
"0.5486119",
"0.5480583",
"0.54738027",
"0.5472625"
] | 0.8119382 | 0 |
Get status of NVDIMM_N. | def _get_nvdimm_n_status(self):
try:
nvdimm_n_status = self._get_bios_setting('NvDimmNMemFunctionality')
if nvdimm_n_status == 'Enabled':
nvn_status = True
else:
nvn_status = False
except exception.IloCommandNotSupportedError:
nvn_status = False
return nvn_status | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getnumbarvar(self):\n numbarvar_ = ctypes.c_int32()\n res = __library__.MSK_XX_getnumbarvar(self.__nativep,ctypes.byref(numbarvar_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n numbarvar_ = numbarvar_.value\n _numbarvar_return_value = numbarvar_\n return (_numbarvar_return_value)",
"def get_status(self):\n return self.o.read_register(self.dev_id, STATUS)",
"def node_status(self) -> Optional['outputs.CSIPowerMaxStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def node_status(self) -> Optional['outputs.CSIVXFlexOSStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def node_status(self) -> Optional['outputs.CSIIsilonStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def node_status(self) -> Optional['outputs.CSIPowerStoreStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def getNDV(self):\n return len(self.globalDVList)",
"def _do_get_status(self):\n logging.info(__name__ + ' : Get status of the device.')\n result = self._execute('X')\n usage = {\n 0: \"Channel not in use\",\n 1: \"Channel used for Nitrogen level\",\n 2: \"Channel used for Helium Level (Normal pulsed operation)\",\n 3: \"Channel used for Helium Level (Continuous measurement)\",\n 9: \"Error on channel (Usually means probe unplugged)\"\n }\n # current_flowing = {\n # 0 : \"Curent not flowing in Helium Probe Wire\",\n # 1 : \"Curent not flowing in Helium Probe Wire\"\n # }\n # auto_fill_status = {\n # 00 : \"End Fill (Level > FULL)\",\n # 01 : \"Not Filling (Level < FULL, Level > FILL)\",\n # 10 : \"Filling (Level < FULL, Level > FILL)\",\n # 11 : \"Start Filling (Level < FILL)\"\n # }\n return usage.get(int(result[1]), \"Unknown\")",
"def robotiq_get_status(self, number_of_registers=3):\r\n return self._arm.robotiq_get_status(number_of_registers=number_of_registers)",
"def nwmetricmepstatus(self) :\n\t\ttry :\n\t\t\treturn self._nwmetricmepstatus\n\t\texcept Exception as e:\n\t\t\traise e",
"def getnumbarvar(self): # 3\n res,resargs = self.__obj.getnumbarvar()\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _numbarvar_return_value = resargs\n return _numbarvar_return_value",
"def get_status(self):\n return self.read_register(259, 0, 3)",
"def getVirtualStatus(self,node,vmid):\n data = self.connect('get','nodes/%s/qemu/%s/status/current' % (node,vmid),None)\n return data",
"def getnumvar(self):\n numvar_ = ctypes.c_int32()\n res = __library__.MSK_XX_getnumvar(self.__nativep,ctypes.byref(numvar_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n numvar_ = numvar_.value\n _numvar_return_value = numvar_\n return (_numvar_return_value)",
"def node_status(self) -> Optional['outputs.CSIUnityStatusNodeStatus']:\n return pulumi.get(self, \"node_status\")",
"def _nvidia_smi():\n\n status = check_output(['nvidia-smi', \n '--query-gpu=utilization.gpu,utilization.memory', \n '--format=csv'])\n status = pd.read_csv(StringIO(status.decode('utf-8')))\n \n # Reformat column names.\n # (Need the col.strip() because sometimes there are preceding spaces)\n map_cols = {'utilization.gpu [%]': 'Utilization (%)',\n 'utilization.memory [%]': 'Memory (%)'}\n status.columns = [map_cols[col.strip()] for col in status.columns]\n\n # Convert to numerical data\n for col in status.columns:\n status[col] = status[col].apply(lambda x: int(x.rstrip('%')))\n\n return status",
"def get_state(self):\n\t\treturn call_sdk_function('PrlVmInfo_GetState', self.handle)",
"def queryStatus (self) :\n\n return self.sendCommand(\"CMD_IN_QUERY_STATUS\", \"\")",
"def getnumbarcnz(self):\n nz_ = ctypes.c_int64()\n res = __library__.MSK_XX_getnumbarcnz(self.__nativep,ctypes.byref(nz_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n nz_ = nz_.value\n _nz_return_value = nz_\n return (_nz_return_value)",
"def getnumbaranz(self):\n nz_ = ctypes.c_int64()\n res = __library__.MSK_XX_getnumbaranz(self.__nativep,ctypes.byref(nz_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n nz_ = nz_.value\n _nz_return_value = nz_\n return (_nz_return_value)",
"def read_connected_emu_snr(self):\n snr = ctypes.c_uint32()\n\n result = self._lib.NRFJPROG_read_connected_emu_snr(ctypes.byref(snr))\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)\n\n return snr.value",
"async def get_status(self) -> str:\n return await self.hw_device.status()",
"def calc_nmi(ground_truth, communities_detected):\n calculated_nmi = nmi(ground_truth, communities_detected)\n return calculated_nmi",
"def get_nix(self):\n return self.dim",
"def _get_status(self, numBytes=6):\n numRegs = int(ceil(numBytes/2.0))\n\n # To do!: Implement try/except\n # Get status from the device\n response = self.client.read_holding_registers(0x07D0, numRegs, unit=0x0009)\n\n # Instantiate output as an empty list\n output = []\n\n # Fill the output with the bytes in the appropriate order\n for i in range(0, numRegs):\n output.append((response.getRegister(i) & 0xFF00) >> 8)\n output.append(response.getRegister(i) & 0x00FF)\n\n # Output the result\n return output",
"def status(self) -> pulumi.Output['outputs.VirtualHardDiskStatusResponse']:\n return pulumi.get(self, \"status\")",
"def vnN(self):\n return np.array(\n [x for x in [self.nNx, self.nNy, self.nNz] if x is not None],\n dtype=int\n )",
"def getnumintvar(self):\n numintvar_ = ctypes.c_int32()\n res = __library__.MSK_XX_getnumintvar(self.__nativep,ctypes.byref(numintvar_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n numintvar_ = numintvar_.value\n _numintvar_return_value = numintvar_\n return (_numintvar_return_value)",
"def handle_nmi(self):\n print \"NMI HANDLER\"\n self.push_word(self.get_register('PC'))\n self.push_byte(self.get_register('P'))\n self.set_flag('I', 1)\n\n # MMM: somewhere we should check if NMIs are disabled in the status register?\n # jump to the NMI vector\n target = self.read_mem_word(self.nmi_vector)\n self.set_pc(target)\n return True",
"def getN(self):\r\n return self.N",
"def xqsystem_vpn_status(self) -> models.VPNStatusResponse:\n return apply_model(\n models.VPNStatusResponse,\n self.do_get_request(\"/xqsystem/vpn_status\")\n )",
"def calculate_ndvi(self):\n self.ndvi = (self.bands[\"n\"].astype(float) - self.bands[\"r\"].astype(float)) \\\n / (self.bands[\"n\"].astype(float) + self.bands[\"r\"].astype(float))",
"def getStatus(self, numBytes):\n numRegs = int(ceil(numBytes/2.0))\n\n #To do!: Implement try/except\n #Get status from the device\n with self.lock:\n response = self.client.read_input_registers(0, numRegs)\n\n #Instantiate output as an empty list\n output = []\n\n #Fill the output with the bytes in the appropriate order\n for i in range(0, numRegs):\n output.append((response.getRegister(i) & 0xFF00) >> 8)\n output.append( response.getRegister(i) & 0x00FF)\n\n #Output the result\n return output",
"def get_full_juju_status():\n\n status = model.get_status(lifecycle_utils.get_juju_model())\n return status",
"def getnumbarcnz(self): # 3\n res,resargs = self.__obj.getnumbarcnz()\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _nz_return_value = resargs\n return _nz_return_value",
"def status(self):\n return self.readvar('\\x5F\\x95',0)",
"def n_uv(self):\n if self._n_uv is None:\n return self._instr_core.n_cells\n else:\n return self._n_uv",
"def node_statuses(self) -> pulumi.Output[Sequence['outputs.NodeBalancerConfigNodeStatus']]:\n return pulumi.get(self, \"node_statuses\")",
"def getNX(self):\n return self._get_nx( )",
"def test_controller_status_from_knx(self):\n assert DPTControllerStatus.from_knx((0x21,)) == HVACOperationMode.COMFORT\n assert DPTControllerStatus.from_knx((0x22,)) == HVACOperationMode.STANDBY\n assert DPTControllerStatus.from_knx((0x24,)) == HVACOperationMode.NIGHT\n assert (\n DPTControllerStatus.from_knx((0x28,)) == HVACOperationMode.FROST_PROTECTION\n )",
"def _nodata_value(self):\n try:\n nodata = float(self._info[\"bands\"][0][\"noDataValue\"])\n except KeyError:\n nodata = None\n return nodata",
"def enum_emu_snr(self):\n serial_numbers_len = ctypes.c_uint32(127)\n serial_numbers = (ctypes.c_uint32 * serial_numbers_len.value)()\n num_available = ctypes.c_uint32()\n\n result = self._lib.NRFJPROG_enum_emu_snr(ctypes.byref(serial_numbers), serial_numbers_len, ctypes.byref(num_available))\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)\n\n snr = [int(serial_numbers[i]) for i in range(0, min(num_available.value, serial_numbers_len.value))]\n\n if len(snr) == 0:\n return None\n else:\n return snr",
"def vnstat(unit, fmt='json'):\n assert unit == 'h' or unit == 'd' or unit == 'm'\n assert fmt == 'xml' or fmt == 'json'\n stat = subprocess.check_output([\"vnstat\", '--' + fmt, unit])\n return json.loads(stat.decode(\"utf-8\"))",
"def get_output(self):\r\n _debug('simq03b_api.get_output')\r\n \r\n x = self.query('OUTP:STAT?')\r\n if x == None: return None\r\n print('Result is ', x) # For knowing the bug that we something have\r\n return int(x)",
"def read_status(ctl):\n\tr = ctl.bus_read_struct_coherent(tm.status_addr, 'BBBBI')\n\treturn r",
"def status(self):\n ret = self.dev.ctrl_transfer(0xc0, 0x01, 0x0081, 0x0000, 0x0001)\n if ret[0] == 0xa0:\n return self.POWER_ON\n return self.POWER_OFF",
"def n(self) :\n\t\ttry :\n\t\t\treturn self._n\n\t\texcept Exception as e:\n\t\t\traise e",
"def nvr(self):\n return surrogate(self.hdr[rpm.RPMTAG_NVR])",
"def getnumbaranz(self): # 3\n res,resargs = self.__obj.getnumbaranz()\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _nz_return_value = resargs\n return _nz_return_value",
"def getnumqobjnz(self): # 3\n res,resargs = self.__obj.getnumqobjnz64()\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _numqonz_return_value = resargs\n return _numqonz_return_value",
"def get_nonzeros(self):\n return self.tape.get_nonzeros(self.machine.eval_symbol,\n self.machine.eval_state(self.state))",
"def status(self):\n url = API_PATH[\"node_status\"].format(tuneUuid=self._parentTune.uuid())\n rsp_json = self._parse(self._get(url))\n\n for status_obj in rsp_json:\n if status_obj[\"nodeUuid\"] == self.uuid():\n return self._new_instance(NodeStatus, status_obj, node=self)\n return None",
"def _get_nport(self):\n return self.__nport",
"def get_status(self):\n request_format = \"{oscillating:01d} {initialising:01d} {initialised:01d} {width:03d} \" \\\n \"{offset:+04d} {speed:02d} {acceleration:03d} {cycles:05d} {backlash:03d}\"\n status_string = request_format.format(\n oscillating=int(self.device.is_oscillating()),\n initialising=int(self.device.is_initialising()),\n initialised=int(self.device.has_been_initialised()),\n width=int(self.device.get_window_width()),\n offset=int(self.device.get_offset()),\n speed=int(self.device.get_speed()),\n acceleration=int(self.device.get_acceleration()),\n cycles=int(self.device.get_complete_cycles()),\n backlash=int(self.device.get_backlash())\n )\n return status_string",
"def getnumqobjnz(self):\n numqonz_ = ctypes.c_int64()\n res = __library__.MSK_XX_getnumqobjnz64(self.__nativep,ctypes.byref(numqonz_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n numqonz_ = numqonz_.value\n _numqonz_return_value = numqonz_\n return (_numqonz_return_value)",
"def n_value(self) -> int:\n return self.my_n",
"def getNodeStatus(self,node):\n data = self.connect('get','nodes/%s/status' % (node),None)\n return data",
"def do_rxn_status(self, arg):\n\n cmd_call = 'rxn_status'\n return self.run_cmd(arg, cmd_call)",
"def read_magnetometer_status(self):\n data = self.mag.read_byte(Register.STATUS_REG_M)\n return MagnetometerStatus(data)",
"def status(self):\n if self._vsanobj.id is None:\n raise VsanNotPresent(\"Vsan \" + str(self._vsanobj._id) + \" is not present on the switch.\")\n out = self.__show_zone_status()\n return out.get(get_key(zonekeys.STATUS, self._SW_VER), None)",
"def getnumvar(self): # 3\n res,resargs = self.__obj.getnumvar()\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _numvar_return_value = resargs\n return _numvar_return_value",
"def _get_status(self) -> dict[str, str] | None:\n if self._alias is None:\n self._alias = self._get_alias()\n\n try:\n status: dict[str, str] = self._client.list_vars(self._alias)\n except (PyNUTError, ConnectionResetError) as err:\n _LOGGER.debug(\"Error getting NUT vars for host %s: %s\", self._host, err)\n return None\n\n return status",
"def getStatus(self):\r\n return self.controller.getStatus()",
"def robotiq_status(self):\r\n return self._arm.robotiq_status",
"def visibility_status(self, uv):\n result = self._trimmed.Perform(gp_Pnt2d(uv[0], uv[1]))\n return int(result)",
"async def device_status(self, value):\n if value is None:\n return\n \n binvalue = str(bin(value))\n binarr = binvalue[::-1]\n binarr = binarr[:len(DEVICE_STATUS)]\n return_value = []\n for x in range(len(DEVICE_STATUS)):\n if binarr[len(binarr) - 1 - x] == \"1\":\n return_value.append(DEVICE_STATUS[x])\n\n return return_value",
"def get(self, request, nnid, wfver, desc):\n try:\n return_data = NNCommonManager().get_nn_node_info(nnid, wfver, desc)\n return Response(json.dumps(return_data))\n except Exception as e:\n return_data = {\"status\": \"404\", \"result\": str(e)}\n return Response(json.dumps(return_data))",
"def state(self) -> object:\n # pylint: disable=protected-access\n val = self._node.status._val\n raw_units = self._node.uom\n\n if raw_units in [TEMP_CELSIUS, TEMP_FAHRENHEIT]:\n return self.hass.config.units.temperature(val, raw_units)\n return val",
"def get_vncmode(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetVNCMode', self.handle)",
"def status(self):\n if self.qemu.is_running():\n status = 0\n self.log.info(\"vm-status\", result=\"online\")\n for device in list(self.qemu.block_info().values()):\n self.log.info(\n \"disk-throttle\",\n device=device[\"device\"],\n iops=device[\"inserted\"][\"iops\"],\n )\n else:\n status = 1\n self.log.info(\"vm-status\", result=\"offline\")\n for volume in self.ceph.volumes:\n locker = volume.lock_status()\n self.log.info(\"rbd-status\", volume=volume.fullname, locker=locker)\n consul = locate_live_service(self.consul, \"qemu-\" + self.name)\n if consul:\n self.log.info(\n \"consul\", service=consul[\"Service\"], address=consul[\"Address\"]\n )\n else:\n self.log.info(\"consul\", service=\"<not registered>\")\n return status",
"def status(self) -> Optional[int]:\n return pulumi.get(self, \"status\")",
"def status(self):\n return self._bp.get_motor_status(self._port)",
"def test_device_status(self):\n #071031031E3067\n self.ms.add_response({'\\x14071031031E3067\\x0D': 'PA\\x0D'})\n # Network / Device ID\n response = self.upb.status((49, 3))\n self.assertTrue(response)",
"def n():\n # For documentation purposes",
"def state(self):\n return self.device.status(station=self.station_number)",
"def is_nvme(self):\n if self.server_params[-1].bdev_class.value == \"nvme\":\n return True\n return False",
"def n(self):\n return self.module.n",
"def GetNBit(self, *args, **kwargs):\n pass",
"def show_vnic(client, resource_group_name, vm_name, nic_name):\n virtual_machine = client.get(resource_group_name, vm_name)\n for nic in virtual_machine.nics:\n if nic.virtual_nic_name == nic_name:\n return nic\n return None",
"def state(self):\n data = self.coordinator.data[self._host_name][self._node_name][self._vm_id]\n if data[\"status\"] == \"running\":\n return STATE_ON\n return STATE_OFF",
"def parse_nnvmmk():\n out = lspci_run('-nnvmmk')\n pcibus = list()\n\n blocks = out.split('\\n\\n')\n\n for block in blocks:\n device = dict()\n for element in block.splitlines():\n split_element = element.split(':')\n key = split_element[0]\n data = ':'.join(split_element[1:]).strip()\n if key in ('Slot', 'ProgIf', 'Driver'):\n device[key.lower()] = data\n continue\n if key in ('Class', 'Vendor', 'Device', 'SVendor', 'SDevice'):\n key_prefix = key.lower()\n device[key_prefix + '_name'] = _get_lspci_name(data)\n device[key_prefix + '_id'] = _get_lspci_id(data)\n continue\n if key == 'Rev':\n device['revision'] = data\n continue\n if not device:\n continue\n pcibus.append(device)\n\n return pcibus",
"def status(self):\n \n tmpl1 = \"\"\"%-20s%-52s[%s]\"\"\"\n tmpl2 = \"\"\"%-20s%-52s\\n\"\"\"\n # print tmpl1 % (\"Machine Name\", \"IP Addresses\", \"Status\")\n # print 80 * \"-\"\n # print self.get_image()\n if self.cloudserver:\n # let's build the IPs first\n status = self.cloudserver.status\n \n else:\n status = \"OFF\"\n\n res2=\"\"\n ip1 = \"%s:%s\" % (self.networks[0], self.ip_addresses[self.networks[0]])\n if len(self.networks) > 1:\n res2 += \"\\n\"\n for network in self.networks[1:]:\n ipstr = \"%s:%s\" % (network, self.ip_addresses[network])\n res2+=tmpl2 % (\"-\", ipstr)\n # print res2\n # if len(self.ip_addresses.keys()) > 1:\n # ip1 = self.ip_addresses.values()[0]\n res1 = tmpl1 % (self.machine_name, ip1, status)\n return res1 + res2",
"def _check_status(self):\n self.system_status_lock.acquire()\n info = self.system_status_proxy._getvalue()\n self.system_status_lock.release()\n return info",
"def get_pir_status(self):\n response = self.parent.pir.status()\n return response[0]",
"def print_status(self, status):\n\n print \"Status (%X): \" % status\n if (status & STATUS_MEM_0_EMPTY) > 0:\n print \"\\tMEMORY 0 IS EMPTY\"\n if (status & STATUS_MEM_1_EMPTY) > 0:\n print \"\\tMEMORY 1 IS EMPTY\"",
"def update_status(self):\n num_nbrs = len(self.neighbors)\n if not 2 <= num_nbrs <= 3:\n self.status = 0\n elif num_nbrs == 3:\n self.status = 1",
"def get_num_hidden(self):\n return self.rnn_config.num_hidden",
"def getNonlinearProcessingMode(self, channel, unitCode=0):\n resp = self.XAPCommand('NLP', channel, unitCode=unitCode)\n return int(resp)",
"def nspin_defaults(n):\n v, J = getWINDNMRdefault(n)\n v_ppm = v / 300\n return v_ppm, J",
"def check_reboot_in_progress(con):\n k, v = con.kv.get(\"service/rebootmgr/reboot_in_progress\")\n if v and \"Value\" in v.keys() and v[\"Value\"]:\n return v[\"Value\"].decode()\n return False",
"def get_qnet_status(self, request, suffix=''):\n abs_path = self.qnet_domain + self.qnet_status\n # check absolute path and used element\n if abs_path != '' and self.qnet_element != '':\n # try to request\n try:\n url = self._format_api_url(abs_path)\n response = self._request_get(url)\n except Exception as e:\n return HTTPServerError(body = \"GET Qnet status error: %s\" % str(e))\n\n # return result\n return HTTPOk(headers={'Content-Type': 'application/json'},\n body=json.dumps(response['wstatus']))\n\n else:\n return HTTPServerError(body=\"Bad request to the Qnet platform\")",
"def get_vm_status(self, device='FLOPPY'):\n response, vm_device_uri = self._get_vm_device_status(device)\n\n # Create RIBCL equivalent response\n # RIBCL provides this data in VM status\n # VM_APPLET = CONNECTED | DISCONNECTED\n # DEVICE = FLOPPY | CDROM\n # BOOT_OPTION = BOOT_ALWAYS | BOOT_ONCE | NO_BOOT\n # WRITE_PROTECT = YES | NO\n # IMAGE_INSERTED = YES | NO\n response_data = {}\n\n if response.get('WriteProtected', False):\n response_data['WRITE_PROTECT'] = 'YES'\n else:\n response_data['WRITE_PROTECT'] = 'NO'\n\n if response.get('BootOnNextServerReset', False):\n response_data['BOOT_OPTION'] = 'BOOT_ONCE'\n else:\n response_data['BOOT_OPTION'] = 'BOOT_ALWAYS'\n\n if response.get('Inserted', False):\n response_data['IMAGE_INSERTED'] = 'YES'\n else:\n response_data['IMAGE_INSERTED'] = 'NO'\n\n if response.get('ConnectedVia') == 'NotConnected':\n response_data['VM_APPLET'] = 'DISCONNECTED'\n # When media is not connected, it's NO_BOOT\n response_data['BOOT_OPTION'] = 'NO_BOOT'\n else:\n response_data['VM_APPLET'] = 'CONNECTED'\n\n response_data['IMAGE_URL'] = response['Image']\n response_data['DEVICE'] = device\n\n # FLOPPY cannot be a boot device\n if ((response_data['BOOT_OPTION'] == 'BOOT_ONCE') and\n (response_data['DEVICE'] == 'FLOPPY')):\n response_data['BOOT_OPTION'] = 'NO_BOOT'\n\n return response_data",
"def node_statuses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NodeBalancerConfigNodeStatusArgs']]]]:\n return pulumi.get(self, \"node_statuses\")",
"def getN(self)->int:\n return self.n",
"def get_value(self, m: int, n: int) -> int:\n\t\treturn self.matrix[m][n]",
"def getNodeStatus(self,status = 0):\n if status:\n self.node_status = status\n return self.node_status",
"def get_vmedia_status(self):\n\n try:\n sushy_system = self._get_sushy_system()\n vmedia_status = sushy_system.vmedia\n except sushy.exceptions.SushyError as e:\n msg = (self._('The vmedia is not found. Error '\n '%(error)s') %\n {'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)\n return vmedia_status",
"def getClusterStatus(self):\n data = self.connect('get','cluster/status', None)\n return data",
"def getdimbarvarj(self,j_):\n dimbarvarj_ = ctypes.c_int32()\n res = __library__.MSK_XX_getdimbarvarj(self.__nativep,j_,ctypes.byref(dimbarvarj_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n dimbarvarj_ = dimbarvarj_.value\n _dimbarvarj_return_value = dimbarvarj_\n return (_dimbarvarj_return_value)",
"def get_vncport(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetVNCPort', self.handle)"
] | [
"0.5753764",
"0.57007307",
"0.5678485",
"0.56483656",
"0.5635664",
"0.5618904",
"0.559551",
"0.5575454",
"0.5551961",
"0.5545708",
"0.5526919",
"0.55176646",
"0.54776466",
"0.54100347",
"0.54067737",
"0.5321118",
"0.5198335",
"0.51954293",
"0.51463753",
"0.512988",
"0.5129837",
"0.5119609",
"0.5119074",
"0.5078924",
"0.50723445",
"0.5028012",
"0.50212806",
"0.5001112",
"0.49810457",
"0.49771798",
"0.4973879",
"0.49728945",
"0.4967822",
"0.49622327",
"0.49614185",
"0.49527147",
"0.49333632",
"0.49326897",
"0.49258876",
"0.49243647",
"0.49191967",
"0.49105635",
"0.48908544",
"0.48900676",
"0.48836166",
"0.48793346",
"0.4877207",
"0.48752493",
"0.48702204",
"0.48693013",
"0.48692584",
"0.48629245",
"0.48586297",
"0.48563886",
"0.48484182",
"0.48473978",
"0.48442322",
"0.4838065",
"0.48371917",
"0.48348415",
"0.483332",
"0.48327512",
"0.48209855",
"0.4815406",
"0.4798996",
"0.47984365",
"0.47962686",
"0.47942564",
"0.47907546",
"0.47866458",
"0.4781332",
"0.47802958",
"0.47743937",
"0.47734493",
"0.47730568",
"0.4771968",
"0.47697732",
"0.47660443",
"0.47653544",
"0.47634372",
"0.47621053",
"0.47517416",
"0.4747291",
"0.47447434",
"0.47441563",
"0.47425023",
"0.47364983",
"0.47254094",
"0.47235972",
"0.47225317",
"0.4715582",
"0.47095633",
"0.4699629",
"0.46963713",
"0.46957183",
"0.46695125",
"0.46680766",
"0.46653768",
"0.46587715",
"0.46578956"
] | 0.81031567 | 0 |
Return a new and configurated argument parser | def build_argument_parser():
description="A simple tool to batch rename given files."
parser = ArgumentParser(description=description)
parser.add_argument("-i", "--input-list", required=False,
help="the path to the input list file.")
parser.add_argument("-p", "--glob-pattern", default=DEFAULT_GLOB_PATTERN,
help="a glob pattern to filter input files.")
return parser | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_parser():\n parser = ArgumentParser(\n description=__doc__, formatter_class=ArgumentDefaultsHelpFormatter\n )\n parser.add_argument(\n \"-s\", \"--sentence\", dest=\"sentence\", help=\"sentence, splitted by ';'\"\n )\n return parser",
"def get_parser(self):\n parser = ArgumentParser()\n parser.add_argument(\n \"-c\", default='', dest='cmd',\n help=(\"just like python -c or sh -c (pass in a command)\"))\n parser.add_argument(\n \"-e\", \"--exec\", default='', dest='execfile',\n help='a filename to execute')\n parser.add_argument(\n \"-v\", '--version', default=False, dest='version',\n action='store_true',\n help=(\"show version information\"))\n parser.add_argument(\"--shell\", dest=\"shell\",\n default=False, help=\"application shell\",\n action='store_true')\n parser.add_argument(\"--config\", dest='config',\n default=\"\",\n help=\"use config file\")\n return parser",
"def get_parser():\n\n parser = parser.ArgumentParser()\n return parser",
"def build_parser(self, parser: ArgumentParser) -> None:",
"def get_parser(self):\n parser = argparse.ArgumentParser(description='Short sample app')\n\n parser.add_argument('-a', action=\"store_true\", default=False)\n parser.add_argument('-b', action=\"store\", dest=\"b\")\n parser.add_argument('-c', action=\"store\", dest=\"c\", type=int)\n return parser",
"def _create_parser(self):\n default_options = self._create_defaults()\n\n all_categories = ['build', 'whitespace']\n\n mock_stderr = self._MockStdErr()\n\n return ArgumentParser(\n all_categories=all_categories,\n base_filter_rules=[],\n default_options=default_options,\n mock_stderr=mock_stderr,\n usage='test usage')",
"def get_parser():\n p = argparse.ArgumentParser(description='such a good program')\n p.add_argument('infile')\n p.add_argument('outfile')\n return p",
"def parser_create():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"-c\", \"--config-file\", type=str, help=\"yaml configuration file name\")\n return parser.parse_args()",
"def get_parser():\n parser = ArgumentParser(description=__doc__,\n formatter_class=ArgumentDefaultsHelpFormatter,\n prog='pv2')\n subparsers = parser.add_subparsers(dest='cmd')\n # subparsers.add_parser('selfcheck',\n # add_help=False,\n # help=\"Self-check of the sst toolkit.\")\n # parser.add_argument('--version',\n # action='version',\n # version=('sst %s' % str(sst.__version__)))\n subparsers.add_parser('eval',\n add_help=False,\n parents=[evaluate.get_parser()],\n formatter_class=ArgumentDefaultsHelpFormatter,\n help=(\"Evaluate a single image\"))\n subparsers.add_parser('train',\n add_help=False,\n parents=[train.get_parser()],\n formatter_class=ArgumentDefaultsHelpFormatter,\n help=(\"Train a new model.\"))\n subparsers.add_parser('plot',\n add_help=False,\n parents=[plot.get_parser()],\n formatter_class=ArgumentDefaultsHelpFormatter,\n help=(\"Plot summary information.\"))\n return parser",
"def _create_parser(self):\n parser = argparse.ArgumentParser(\n description=description,\n formatter_class=argparse.RawTextHelpFormatter\n )\n\n parser.add_argument(\n '-v',\n '--verbose',\n action='store_true',\n default=False,\n help='Verbose mode (turn on logging.info)')\n\n parser.add_argument(\n '-d',\n '--debug',\n action='store_true',\n default=False,\n help='Debug (turn on logging.debug)')\n\n return parser",
"def parser(self):\n return self.arg_parser",
"def get_parser():\n if sys.version_info[0] < 3:\n # Using a version of Python < 3.\n parser = ArgumentParser(version=VERSION) # pylint: disable=E1123\n else:\n parser = ArgumentParser()\n parser.add_argument('--version', action='version', version=VERSION)\n\n subparsers = parser.add_subparsers(\n title='actions', help='Types of zappa commands',\n dest='command')\n\n parser_update_stack = subparsers.add_parser(\n 'update', help='Update a zappa deploy')\n parser_update_stack.add_argument(\n '--name', required=True,\n help='Name of the deployment (dev, prod, etc.)')\n\n parser_create_stack = subparsers.add_parser(\n 'deploy', help='Create a zappa deploy')\n parser_create_stack.add_argument(\n '--name', required=True,\n help='Name of the deployment (dev, prod, etc.)')\n\n return parser",
"def get_argparser(self):\n parser = argparse.ArgumentParser(description='Command Configuration')\n parser.add_argument('--coin', choices=['bitcoin', 'ethereum', 'litecoin'], default='bitcoin')\n parser.add_argument('--start_date', default='2019-10-21')\n parser.add_argument('--end_date', default='2019-10-31')\n parser.add_argument('--language', choices=['en', 'it', 'es', 'fr', 'de', 'ru', 'zh'], default='en')\n\n argparser = parser.parse_args()\n return argparser.__dict__",
"def create_parser(self, prog_name, subcommand):\n return OptionParser(prog=prog_name,\n usage=self.usage(subcommand),\n version=self.get_version(),\n option_list=self.option_list)",
"def create_parser(self, prog_name, subcommand):\r\n return OptionParser(prog=prog_name,\r\n usage=self.usage(subcommand),\r\n version='',\r\n add_help_option = False,\r\n option_list=self.option_list)",
"def command_line_argument_parser() -> argparse.ArgumentParser:\n parser = argparse.ArgumentParser(\n description=description(),\n epilog=epilog(),\n formatter_class=argparse.RawDescriptionHelpFormatter,\n )\n return parser",
"def command_line_argument_parser() -> argparse.ArgumentParser:\n parser = argparse.ArgumentParser(\n description=description(),\n epilog=epilog(),\n formatter_class=argparse.RawDescriptionHelpFormatter,\n )\n return parser",
"def get_parser():\n # Parent and only parser.\n parser = argparse.ArgumentParser(\n add_help=True,\n formatter_class=argparse.RawTextHelpFormatter)\n parser.add_argument('mode', action='store',\n choices=range(len(MODES)),\n type=int,\n help='Select mode of file download.\\n'\n ' e.g: 0(rated) or 1(list).')\n parser.add_argument('torr_page', action='store',\n choices=range(len(TORRENTS)),\n type=int,\n help='Select tracking page to download from.\\n'\n ' e.g: 0 to .. ' + str(len(TORRENTS)-1) + '.')\n parser.add_argument('str_search', action='store',\n type=str,\n help='Input torrent string to search.\\n'\n ' e.g: \"String search\"')\n return(parser)",
"def build_parser(usage, **kwargs):\n return BetterArgumentParser(usage=usage, version=VERSION, **kwargs)",
"def get_parser():\n parser = argparse.ArgumentParser(description='Parser des liens sur les sites Jahia et Wordpress.')\n parser.add_argument('ficher_des_sites', help='le fichier contenant les sites a parser.')\n parser.add_argument('-v', '--version', help='affiche la version du parser',\n action='version', version='%(prog)s ' + __version__)\n return parser",
"def get_parser():\n parser = argparse.ArgumentParser(description='Parser des liens sur les sites Jahia et Wordpress.')\n parser.add_argument('ficher_des_sites', help='le fichier contenant les sites a parser.')\n parser.add_argument('-v', '--version', help='affiche la version du parser',\n action='version', version='%(prog)s ' + __version__)\n return parser",
"def get_parser():\n parser = ArgumentParser(description=\"Script used to generate Freeplane \"\n + \"mindmap files\")\n\n # This is use when people in Linaro aren't using their email address.\n parser.add_argument('--disable-altname', required=False,\n action=\"store_true\", default=False,\n help=\"Use alternative names (from cfg.yaml) to the tree\")\n\n parser.add_argument('--assignee', required=False,\n action=\"store_true\", default=False,\n help=\"Add assignees (from cfg.yaml) to the tree\")\n\n parser.add_argument('-a', '--author', required=False,\n action=\"store_true\", default=False,\n help=\"If set, git statistic only count the commit \"\n + \"from the author\")\n\n parser.add_argument('-p', '--path', required=False, action=\"store\",\n default=\"/home/jyx/devel/optee_projects/reference/linux\",\n help='Full path to the kernel tree')\n\n parser.add_argument('-s', '--since', required=False, action=\"store\",\n default=None,\n help='Used with the git log --since command')\n\n parser.add_argument('-o', '--output', required=False, action=\"store\",\n default=\"linux-kernel.mm\",\n help='Output filename')\n\n parser.add_argument('-v', required=False, action=\"store_true\",\n default=False,\n help='Output some verbose debugging info')\n\n return parser",
"def get_parser():\n parser = argparse.ArgumentParser()\n parser.add_argument('--dataset', type=str)\n parser.add_argument('--method', type=str)\n parser.add_argument('--size_part', type=float, default=None)\n parser.add_argument('--start', type=int, default=0)\n parser.add_argument('--count', type=int, default=None)\n return parser",
"def create_parser() -> configargparse.ArgParser:\n parser = configargparse.ArgParser(default_config_files=[\n \"/etc/lookout/analyzer.conf\", \"~/.config/lookout/analyzer.conf\"],\n formatter_class=ArgumentDefaultsHelpFormatterNoNone,\n auto_env_var_prefix=\"lookout_\")\n slogging.add_logging_args(parser)\n subparsers = parser.add_subparsers(help=\"Commands\", dest=\"command\")\n\n def add_parser(name, help):\n return subparsers.add_parser(\n name, help=help, formatter_class=ArgumentDefaultsHelpFormatterNoNone)\n\n list_parser = add_parser(\"list\", \"Print globally available analyzers.\")\n list_parser.set_defaults(handler=list_analyzers)\n\n run_parser = add_parser(\n \"run\", \"Launch a new service with the specified (one or more) analyzers.\")\n run_parser.set_defaults(handler=run_analyzers)\n add_analyzer_arg(run_parser)\n run_parser.add(\"-c\", \"--config\", is_config_file=True,\n help=\"Path to the configuration file with option defaults.\")\n run_parser.add(\"-s\", \"--server\", required=True,\n help=\"Lookout server address, e.g. localhost:1234.\")\n run_parser.add(\"-w\", \"--workers\", type=int, default=1,\n help=\"Number of threads which process Lookout events.\")\n add_model_repository_args(run_parser)\n run_parser.add_argument(\"--request-server\", default=\"auto\",\n help=\"Address of the data retrieval service. \\\"same\\\" means --server.\")\n\n init_parser = add_parser(\"init\", \"Initialize the model repository.\")\n init_parser.set_defaults(handler=init_repo)\n add_model_repository_args(init_parser)\n\n tool_parser = add_parser(\"tool\", \"Invoke the tooling of a given analyzer.\")\n tool_parser.set_defaults(handler=run_analyzer_tool)\n tool_parser.add(\"analyzer\", help=\"Fully qualified package name with an analyzer.\")\n tool_parser.add(\"args\", nargs=argparse.REMAINDER)\n\n package_parser = add_parser(\n \"package\",\n \"Package several analyzers to a Docker container and write a sample Docker Compose config \"\n \"for Lookout.\")\n package_parser.set_defaults(handler=package_cmdline_entry)\n add_analyzer_arg(package_parser)\n package_parser.add(\"-w\", \"--workdir\", help=\"Generate files in this directory.\",\n default=tempfile.mkdtemp(prefix=\"lookout_package_\"))\n package_parser.add(\"--requirements\", help=\"Path to a custom requirements.txt\")\n package_parser.add(\"-r\", \"--repo\", help=\"GitHub repository name to watch. \"\n \"Example: \\\"src-d/lookout\\\".\",\n required=True)\n package_parser.add(\"-u\", \"--user\", help=\"GitHub user name which will send review comments.\",\n required=True)\n paturl = \"https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/\" # noqa\n package_parser.add(\"-t\", \"--token\", help=\"GitHub token for -u/--user. See \" + paturl,\n required=True)\n package_parser.add(\"-y\", \"--yes\", help=\"Run the commands in the end.\",\n action=\"store_true\")\n package_parser.add(\"-n\", \"--no\", help=\"Do not run the commands in the end.\",\n action=\"store_true\")\n return parser",
"def get_parser():\n module_parser = ArgumentParser(\n formatter_class=ArgumentDefaultsHelpFormatter)\n module_parser.add_argument(\"-i\", dest=\"data_path\", type=str,\n help=\"the location dataset\")\n module_parser.add_argument(\"-o\", dest=\"output_path\", type=str,\n help='base dir for outputs')\n module_parser.add_argument(\"-subdir\", dest=\"subdir\", type=str,\n choices=['test', 'train', 'val', 'all'],\n help='subdir: trn, test, val, or all ...')\n module_parser.add_argument(\"-n\", dest=\"n_train\", type=int,\n help='n: number of images for training')\n module_parser.add_argument(\"-Rx\", dest=\"x_res\", type=int,\n help='x resulution for final img')\n module_parser.add_argument(\"-Ry\", dest=\"y_res\", type=int,\n help='y resolution of final image')\n module_parser.add_argument(\"-d\", dest=\"d\",\n type=int,\n default=0,\n help='debug')\n return module_parser",
"def _to_parser(self) -> ArgumentParser:\n parser = ArgumentParser()\n for arg in self.arg_list:\n parser.add_argument(arg._flag, **arg._parser_kwargs)\n return parser",
"def make_parser():\n parser = argparse.ArgumentParser(description='')\n parser.add_argument('-p', '--platform', dest='platform', type=str, required=False, default='')\n return parser",
"def make_parser():\n parser = argparse.ArgumentParser(description=config.DESCRIPTION)\n parser.add_argument('url_file', metavar='URL_FILE', type=str,\n help=config.HELP_URL_FILE)\n parser.add_argument('-d', metavar='DEST_DIR', dest='destination_dir', default=config.DEFAULT_DESTINATION_DIR, type=str,\n help=config.HELP_DESTINATION_DIR)\n parser.add_argument('-l', metavar='LOG_FILE', dest='log_file', default=config.DEFAULT_LOG_FILE, type=str,\n help=config.HELP_LOG_FILE % config.DEFAULT_LOG_FILE)\n\n return parser",
"def get_parser():\n from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter\n parser = ArgumentParser(description=__doc__,\n formatter_class=ArgumentDefaultsHelpFormatter)\n parser.add_argument(\"-m\", \"--model\",\n dest=\"modelfile\",\n help=\"where is the model file (.tar)?\",\n metavar=\"FILE\",\n type=lambda x: utils.is_valid_file(parser, x),\n required=True)\n parser.add_argument(\"-i\", \"--input\",\n dest=\"inputvec\",\n help=\"\"\"a file which contains an input vector\n [[0.12, 0.312, 1.21 ...]]\"\"\",\n metavar=\"FILE\",\n type=lambda x: utils.is_valid_file(parser, x),\n required=True)\n return parser",
"def create_cli_parser() -> argparse.ArgumentParser:\n parser = argparse.ArgumentParser()\n parser.add_argument(\"config\", nargs=\"?\", help=\"path to yaml configuration file\")\n return parser",
"def get_parser():\n parser = argparse.ArgumentParser(description=\"Update golang.org/x/<name> in vendor folder\")\n parser.add_argument('-q', '--quiet', dest='verbose', action='store_false', help='work quietly')\n parser.add_argument('--revision', help='update deps to this revision', default='')\n parser.add_argument('name', help='name of the golang.org/x/ package. Can be empty', default='', nargs='?')\n return parser",
"def get_base_argument_parser(\n **kwargs\n) -> ArgumentParser:\n\n parser = ArgumentParser(\n allow_abbrev=False,\n add_help=False,\n **kwargs\n )\n\n parser.add_argument(\n '--help',\n action='store_true',\n help='Pass this flag to print usage and argument descriptions.'\n )\n\n parser.add_argument(\n '--log',\n choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],\n help='Logging level.'\n )\n\n return parser",
"def get_parser(name):\n parser = argparse.ArgumentParser(name, formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n # print default value always\n parser.add_argument = partial(parser.add_argument, help=' ')\n return parser",
"def get_parser(name):\n parser = argparse.ArgumentParser(name, formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n # print default value always\n parser.add_argument = partial(parser.add_argument, help=' ')\n return parser",
"def build_arg_parser():\n\n main = ArgumentParser(description='AMFinder command-line arguments.',\n allow_abbrev=False,\n formatter_class=RawTextHelpFormatter)\n\n subparsers = main.add_subparsers(dest='run_mode', required=True,\n help='action to be performed.')\n\n _ = training_subparser(subparsers)\n _ = prediction_subparser(subparsers)\n _ = diagnostic_subparser(subparsers)\n\n return main",
"def create_parser(self, prog_name):\n return OptionParser(\n\t\t\tprog=prog_name,\n\t\t\t#usage=self.usage(subcommand),\n\t\t\toption_list=self.option_list\n\t\t)",
"def build_parser(self, add_help=True):\n self.parser = argparse.ArgumentParser(\n description=self.description, add_help=add_help\n )\n self.parser.prog = f\"python -m {self.package}.{self.module_name}\"\n self.parser.add_argument(\n \"config_file\", help=\"Path/name of YAML configuration file for NEMO nowcast.\"\n )",
"def _build_arg_parser():\n parser = argparse.ArgumentParser(\n description=_description,\n add_help=True,\n )\n add_generic_args(parser)\n add_diff_args(parser)\n add_filename_args(parser, [\"base\", \"remote\"])\n\n parser.add_argument(\n '-o', '--output',\n default=None,\n help=\"if supplied, the diff is written to this file. \"\n \"Otherwise it is printed to the terminal.\")\n\n return parser",
"def init_argparse(self, parser=None):\n if parser:\n p = parser\n else:\n p = argparse.ArgumentParser()\n\n # generic options\n p.add_argument(\"-q\", \"--quiet\", action=\"store_true\",\n help=\"log only errors and warnings\")\n p.add_argument(\"-v\", \"--verbose\", action=\"count\",\n help=\"log verbosely\")\n p.add_argument(\"-V\", \"--version\", action=\"store_true\",\n help=\"print version info and exit\")\n p.add_argument(\"--set\", action=\"append\",\n help=\"override config setting (--set 'PARAM=VAL')\")\n p.add_argument(\"command\", help=\"command name\")\n p.add_argument(\"args\", nargs=argparse.REMAINDER, help=\"arguments for command\")\n return p",
"def get_parser():\n parser = argparse.ArgumentParser()\n # parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)\n parser.add_argument('files', nargs='+')\n return parser",
"def _get_parserobj(self, option_list):\n if '--version' in self.parselines[0]:\n if 'optparse' == self.parser_type:\n parser = OptionParser(version=\"dummy\")\n else:\n parser = ArgumentParser(\n version='dummy',\n formatter_class=RawDescriptionHelpFormatter)\n else:\n if 'optparse' == self.parser_type:\n parser = OptionParser()\n else:\n parser = ArgumentParser(\n formatter_class=RawDescriptionHelpFormatter)\n for opt in option_list:\n if opt['short'] and self.parser_type is 'optparse':\n parser.add_option(opt['short'], opt['long'],\n metavar=opt['metavar'],\n help=opt['help'].strip())\n elif not opt['short'] and self.parser_type is 'optparse':\n parser.add_option(opt['long'],\n metavar=opt['metavar'],\n help=opt['help'].strip())\n elif opt['short'] and self.parser_type is 'argparse':\n parser.add_argument(opt['short'], opt['long'],\n metavar=opt['metavar'],\n help=opt['help'].strip())\n elif not opt['short'] and self.parser_type is 'argparse':\n parser.add_argument(opt['long'],\n metavar=opt['metavar'],\n help=opt['help'].strip())\n else:\n raise InvalidParserTypeError(\"Invalid paresr type.\")\n return parser",
"def create_parser():\n parser = argparse.ArgumentParser()\n parser.add_argument('url', help='url to scrape')\n return parser",
"def _CreateParser():\n parser = commandline.ArgumentParser(description=__doc__, caching=True)\n\n # TODO(rcui): Have this use the UI-V2 format of having source and target\n # device be specified as positional arguments.\n parser.add_argument('--force', action='store_true', default=False,\n help='Skip all prompts (i.e., for disabling of rootfs '\n 'verification). This may result in the target '\n 'machine being rebooted.')\n sdk_board_env = os.environ.get(cros_chrome_sdk.SDKFetcher.SDK_BOARD_ENV)\n parser.add_argument('--board', default=sdk_board_env,\n help=\"The board the Chrome build is targeted for. When \"\n \"in a 'cros chrome-sdk' shell, defaults to the SDK \"\n \"board.\")\n parser.add_argument('--build-dir', type='path',\n help='The directory with Chrome build artifacts to '\n 'deploy from. Typically of format '\n '<chrome_root>/out/Debug. When this option is used, '\n 'the GYP_DEFINES environment variable must be set.')\n parser.add_argument('--target-dir', type='path',\n default=None,\n help='Target directory on device to deploy Chrome into.')\n parser.add_argument('-g', '--gs-path', type='gs_path',\n help='GS path that contains the chrome to deploy.')\n parser.add_argument('--nostartui', action='store_false', dest='startui',\n default=True,\n help=\"Don't restart the ui daemon after deployment.\")\n parser.add_argument('--nostrip', action='store_false', dest='dostrip',\n default=True,\n help=\"Don't strip binaries during deployment. Warning: \"\n 'the resulting binaries will be very large!')\n parser.add_argument('-p', '--port', type=int, default=remote.DEFAULT_SSH_PORT,\n help='Port of the target device to connect to.')\n parser.add_argument('-t', '--to',\n help='The IP address of the CrOS device to deploy to.')\n parser.add_argument('-v', '--verbose', action='store_true', default=False,\n help='Show more debug output.')\n parser.add_argument('--mount-dir', type='path', default=None,\n help='Deploy Chrome in target directory and bind it '\n 'to the directory specified by this flag.'\n 'Any existing mount on this directory will be '\n 'umounted first.')\n parser.add_argument('--mount', action='store_true', default=False,\n help='Deploy Chrome to default target directory and bind '\n 'it to the default mount directory.'\n 'Any existing mount on this directory will be '\n 'umounted first.')\n\n group = parser.add_argument_group('Advanced Options')\n group.add_argument('-l', '--local-pkg-path', type='path',\n help='Path to local chrome prebuilt package to deploy.')\n group.add_argument('--sloppy', action='store_true', default=False,\n help='Ignore when mandatory artifacts are missing.')\n group.add_argument('--staging-flags', default=None, type=ValidateGypDefines,\n help=('Extra flags to control staging. Valid flags are - '\n '%s' % ', '.join(chrome_util.STAGING_FLAGS)))\n # TODO(stevenjb): Remove --strict entirely once removed from the ebuild.\n group.add_argument('--strict', action='store_true', default=False,\n help='Deprecated. Default behavior is \"strict\". Use '\n '--sloppy to omit warnings for missing optional '\n 'files.')\n group.add_argument('--strip-flags', default=None,\n help=\"Flags to call the 'strip' binutil tool with. \"\n \"Overrides the default arguments.\")\n group.add_argument('--ping', action='store_true', default=False,\n help='Ping the device before connection attempt.')\n group.add_argument('--mash', action='store_true', default=False,\n help='Copy additional files for mus+ash. Will not fit in '\n 'the default target-dir.')\n\n group = parser.add_argument_group(\n 'Metadata Overrides (Advanced)',\n description='Provide all of these overrides in order to remove '\n 'dependencies on metadata.json existence.')\n group.add_argument('--target-tc', action='store', default=None,\n help='Override target toolchain name, e.g. '\n 'x86_64-cros-linux-gnu')\n group.add_argument('--toolchain-url', action='store', default=None,\n help='Override toolchain url format pattern, e.g. '\n '2014/04/%%(target)s-2014.04.23.220740.tar.xz')\n\n # GYP_DEFINES that Chrome was built with. Influences which files are staged\n # when --build-dir is set. Defaults to reading from the GYP_DEFINES\n # enviroment variable. WILL BE DEPRECATED.\n parser.add_argument('--gyp-defines', default=None, type=ValidateGypDefines,\n help=argparse.SUPPRESS)\n\n # GN_ARGS (args.gn) used to build Chrome. Influences which files are staged\n # when --build-dir is set. Defaults to reading from the GN_ARGS env variable.\n # CURRENLY IGNORED, ADDED FOR FORWARD COMPATABILITY.\n parser.add_argument('--gn-args', default=None, type=ValidateGnArgs,\n help=argparse.SUPPRESS)\n\n # Path of an empty directory to stage chrome artifacts to. Defaults to a\n # temporary directory that is removed when the script finishes. If the path\n # is specified, then it will not be removed.\n parser.add_argument('--staging-dir', type='path', default=None,\n help=argparse.SUPPRESS)\n # Only prepare the staging directory, and skip deploying to the device.\n parser.add_argument('--staging-only', action='store_true', default=False,\n help=argparse.SUPPRESS)\n # Path to a binutil 'strip' tool to strip binaries with. The passed-in path\n # is used as-is, and not normalized. Used by the Chrome ebuild to skip\n # fetching the SDK toolchain.\n parser.add_argument('--strip-bin', default=None, help=argparse.SUPPRESS)\n return parser",
"def arg_parser():\n import argparse\n return argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)",
"def get_argument_parser():\n parser = argparse.ArgumentParser()\n parser.add_argument(\n 'names',\n help=(\n 'list of name-location pairs '\n '(location can be nat/hhs/cen/state or specific location labels)'))\n parser.add_argument(\n '--first',\n '-f',\n type=int,\n help='first epiweek override')\n parser.add_argument(\n '--last',\n '-l',\n type=int,\n help='last epiweek override')\n parser.add_argument(\n '--epiweek',\n '-w',\n type=int,\n help='epiweek override')\n parser.add_argument(\n '--test',\n '-t',\n default=False,\n action='store_true',\n help='dry run only')\n parser.add_argument(\n '--valid',\n '-v',\n default=False,\n action='store_true',\n help='do not fall back to stable wILI; require unstable wILI')\n return parser",
"def parse() -> Namespace:\n parser = ArgumentParser()\n parser.add_argument(\n \"--config\",\n \"-c\",\n default=\"qwauto.cfg\",\n help=\"Config file. Defaults to qwauto.cfg.\",\n )\n return parser.parse_args()",
"def create_arguments_parser():\n description = \"Statically analyse SBML files for modelling errors\"\n parent_arg_parser = rate_checker_sbml.create_arguments_parser()\n parser = argparse.ArgumentParser(description=description,\n parents=[parent_arg_parser])\n return parser",
"def get_arg_parser(parser=None):\n # add arguments that are specific to the component\n if parser is None:\n parser = argparse.ArgumentParser(description=__doc__)\n\n parser.add_argument(\"--raw_training_data\", type=str, required=True, help=\"\")\n parser.add_argument(\"--raw_testing_data\", type=str, required=True, help=\"\")\n parser.add_argument(\"--train_output\", type=str, required=True, help=\"\")\n parser.add_argument(\"--test_output\", type=str, required=True, help=\"\")\n parser.add_argument(\"--metrics_prefix\", type=str, required=False, help=\"Metrics prefix\")\n return parser",
"def _init_parser():\n\t\n\t_parser = argparse.ArgumentParser()\n\t_parser.add_argument(\"--pull\", help=\"pull scripts from UR3\", action=\"store_true\")\n\t_parser.add_argument(\"--create\", help=\"create data base from script files\", action=\"store_true\")\n\t_parser.add_argument(\"--clear\", help=\"clear all data base\", action=\"store_true\")\n\treturn _parser",
"def parser(cls, *args, **kwargs):\n\n parser = ArgumentParser(*args, **kwargs)\n parser.add_argument('-a', \"--address\",\n help=\"Force entry point address\", default=None)\n parser.add_argument('-b', \"--dumpblocs\", action=\"store_true\",\n help=\"Log disasm blocks\")\n parser.add_argument('-z', \"--singlestep\", action=\"store_true\",\n help=\"Log single step\")\n parser.add_argument('-d', \"--debugging\", action=\"store_true\",\n help=\"Debug shell\")\n parser.add_argument('-g', \"--gdbserver\", type=int,\n help=\"Listen on port @port\")\n parser.add_argument(\"-j\", \"--jitter\",\n help=\"Jitter engine. Possible values are: gcc (default), tcc, llvm, python\",\n default=\"gcc\")\n parser.add_argument(\n '-q', \"--quiet-function-calls\", action=\"store_true\",\n help=\"Don't log function calls\")\n parser.add_argument('-i', \"--dependencies\", action=\"store_true\",\n help=\"Load PE and its dependencies\")\n\n for base_cls in cls._classes_():\n base_cls.update_parser(parser)\n return parser",
"def _configure_args(self, parser: ArgumentParser) -> ArgumentParser:\n pass",
"def create_parser():\n now = datetime.datetime.today()\n default_date = \"{}-{}-{}\".format(now.day, now.month, now.year)\n parser = argparse.ArgumentParser(description=\"Git plugin for automatic insertion of @since and @author annotations \"\n \"into *.java source files in a project.\",\n epilog=\"© Avner & Oded\")\n parser.add_argument(\"-v\", \"--version\", help=\"Display the version of this plugin\", action='store_true')\n parser.add_argument(\"--since\", nargs='?', help=\"Add the @since annotations to project\", const=default_date)\n parser.add_argument(\"--author\", nargs='?', help=\"Add the @author annotations to project\", const=getpass.getuser())\n\n return parser",
"def _setup_parser():\n parser = argparse.ArgumentParser(add_help=True)\n parser.add_argument('--eval_model', type=str, default=None)\n parser.add_argument('--stack', type=int, default=1)\n parser.add_argument('--flare', action='store_true')\n parser.add_argument('--mixreg', action='store_true')\n\n env_group = parser.add_argument_group(\"Env Args\")\n env_group.add_argument('--env_name', type=str, default=ENV_NAME)\n env_group.add_argument('--num_envs', type=int, default=NUM_ENVS)\n env_group.add_argument('--num_levels', type=int, default=NUM_LEVELS)\n env_group.add_argument('--start_level', type=int, default=START_LEVEL)\n\n agent_group = parser.add_argument_group(\"Agent Args\")\n PPOAgent.add_to_argparse(agent_group)\n\n model_group = parser.add_argument_group(\"Model Args\")\n ImpalaPPO.add_to_argparse(model_group)\n\n return parser",
"def _createConfigParser(self):\n return ConfigParser.ConfigParser()",
"def create_parser():\n parser = argparse.ArgumentParser(\n formatter_class=argparse.ArgumentDefaultsHelpFormatter,\n add_help=False)\n parser.add_argument(\n '--help', '-h',\n action='store_true',\n dest='help',\n help=\"\"\"show this help message and exit\"\"\")\n parser.add_argument(\n '--verbose', '-v',\n action='count',\n default=0,\n help=\"\"\"Enable verbose output from '%(prog)s'. A second and third\n '-v' increases verbosity.\"\"\")\n parser.add_argument(\n '--sequential',\n action='store_true',\n help=\"\"\"Execute analyzer sequentialy.\"\"\")\n parser.add_argument(\n '--cdb',\n metavar='<file>',\n default=\"compile_commands.json\",\n help=\"\"\"The JSON compilation database.\"\"\")\n return parser",
"def build_cli(self):\n parser = argparse.ArgumentParser(\"xsgen\",\n conflict_handler='resolve', argument_default=NotSpecified)\n for plugin in self.plugins:\n plugin.update_argparser(parser)\n self.parser = parser\n return parser",
"def create_parser():\n parser = argparse.ArgumentParser()\n parser.add_argument('webpage', help='webpage to search')\n\n return parser",
"def get_parser():\n _program_name = Path(__file__).stem\n example = f''' Example: >> {_program_name} sample.odb\\n '''\n parser = ArgumentParser(description=__doc__.split('..')[0], # Don't include module author part of doc string\n formatter_class=ArgumentDefaultsHelpFormatter, epilog=example, prog=_program_name)\n parser.add_argument(nargs=1,\n dest='input_file',\n type=str,\n help='odb or odbreport file for extracting data',\n metavar='sample.odb')\n parser.add_argument('-o', '--output-file',\n dest='output_file',\n type=str,\n help='file for printing output',\n metavar='sample.h5')\n parser.add_argument('-f', '--output-file-type',\n dest='output_type',\n choices=['yaml', 'json', 'h5'],\n type=str,\n default='h5',\n help='Type of file in which to store output data',\n metavar='h5')\n parser.add_argument('-r', '--odb-report-args',\n dest='odb_report_args',\n type=str,\n help='Arguments to give to the odbreport command. Require the ``option=value`` interface style.',\n metavar='\"step=step1 results\"')\n parser.add_argument('-a', '--abaqus-command',\n dest='abaqus_command',\n type=str,\n default=_settings._default_abaqus_command,\n help='Abaqus command to use',\n metavar='/path/to/abaqus')\n parser.add_argument('-d', '--delete-report-file',\n action=\"store_true\",\n dest='delete_report_file',\n default=False,\n help='Delete after parsing the file created by the odbreport command')\n parser.add_argument('-v', '--verbose',\n action=\"store_true\",\n dest='verbose',\n default=False,\n help='Print all messages')\n return parser",
"def create_parser():\n p = NewParser()\n\n p.add_argument('reference', type=str,\n help = \"Fasta reference file that reads were mapped to.\")\n\n p.add_argument('gff', type=str,\n help = \"GFF file containing reference genome annotations.\")\n\n p.add_argument('vcf', type=str,\n help = \"VCF file to parse.\")\n\n args = p.parse_args(sys.argv[1:])\n return args",
"def setup_parser():\r\n parser = argparse.ArgumentParser(description='Freeseer Recording Utility',\r\n formatter_class=argparse.RawTextHelpFormatter)\r\n parser.add_argument(\"-v\", \"--version\", action='version',\r\n version=textwrap.dedent('''\\\r\n Freeseer {version} ({platform})\r\n Python {pymajor}.{pyminor}.{pymicro}\r\n PyGst {pygst_version}\r\n PyQt {pyqt_version}\r\n Qt {qt_version}\r\n Yapsy {yapsy_version}\r\n '''.format(version=__version__,\r\n platform=sys.platform,\r\n pymajor=sys.version_info.major,\r\n pyminor=sys.version_info.minor,\r\n pymicro=sys.version_info.micro,\r\n pygst_version=pygst._pygst_version,\r\n pyqt_version=QtCore.PYQT_VERSION_STR,\r\n qt_version=QtCore.QT_VERSION_STR,\r\n yapsy_version=yapsy.__version__)))\r\n\r\n # Configure Subparsers\r\n subparsers = parser.add_subparsers(dest='app', help='Command List')\r\n setup_parser_record(subparsers)\r\n setup_parser_config(subparsers)\r\n setup_parser_talk(subparsers)\r\n setup_parser_report(subparsers)\r\n setup_parser_upload(subparsers)\r\n return parser",
"def build_parser():\n def commaSplitter(str):\n \"\"\"\n Argparse a comm-seperated list\n \"\"\"\n # leave this here as a reminder of what I should do to make the argument parsing more robust\n\n # if sqrt != int(sqrt):\n # msg = \"%r is not a perfect square\" % string\n # raise argparse.ArgumentTypeError(msg)\n # return value\n return str.split(',')\n\n def existing_file(fname):\n \"\"\"\n Argparse type for an existing file\n \"\"\"\n if not os.path.isfile(fname):\n raise ValueError(\"Invalid file: \" + str(fname))\n return fname\n\n parser = argparse.ArgumentParser(description=__doc__)\n\n parser.add_argument('-p', '--prefix', help='dont really know what this does...',\n action='store', default='patient', dest='prefix')\n parser.add_argument('-d', '--date', help='dont really know what this does...',\n action='store', default='', dest='sampledate')\n parser.add_argument('template', type=argparse.FileType('r'), help='BEAST config template file')\n parser.add_argument('fasta', type=argparse.FileType('r'), help='file of sequences (in FASTA format)')\n\n return parser",
"def make_argument_parser():\n parser = Benchmark.make_argument_parser()\n parser.add_argument('--skip-reference',\n action='store_true',\n help='Skip the reference simulation run.')\n return parser",
"def get_parser():\n parser = argparse.ArgumentParser(\n description=\"\"\"Start a Classy Vision training job.\n\n This can be used for training on your local machine, using CPU or GPU, and\n for distributed training. This script also supports Tensorboard, Visdom and\n checkpointing.\"\"\"\n )\n\n parser = add_generic_args(parser)\n return parser",
"def get_parser():\n # parse parameters\n parser = argparse.ArgumentParser(description=\"Evaluate sentences with RTTL\")\n\n # main parameters\n parser.add_argument(\"--dump_path\", type=str, default=\"./dumped/\", help=\"Experiment dump path\")\n parser.add_argument(\"--exp_name\", type=str, default=\"\", help=\"Experiment name\")\n parser.add_argument(\"--exp_id\", type=str, default=\"\", help=\"Experiment ID\")\n parser.add_argument(\"--batch_size\", type=int, default=32, help=\"Number of sentences per batch\")\n\n # model / output paths\n parser.add_argument(\"--model_path\", type=str, default=\"\", help=\"Model path\")\n parser.add_argument(\"--output_path\", type=str, default=\"\", help=\"Output path for scores\")\n parser.add_argument(\"--input_path\", type=str, default=\"\", help=\"Input path for source sentences\")\n\n # parser.add_argument(\"--max_vocab\", type=int, default=-1, help=\"Maximum vocabulary size (-1 to disable)\")\n # parser.add_argument(\"--min_count\", type=int, default=0, help=\"Minimum vocabulary count\")\n\n # source language / target language\n parser.add_argument(\"--src_lang\", type=str, default=\"\", help=\"Source language\")\n parser.add_argument(\"--tgt_lang\", type=str, default=\"\", help=\"Target language\")\n\n return parser",
"def get_parser():\n parser = ArgumentParser(\n description='phpMyAdmin work reporting tool\\n\\nGenerates list of commits and issues handled in given period.',\n epilog='Credentials can be also stored in ~/.config/phpmyadmin:\\n\\n[github]\\nuser=USER\\ntoken=TOKEN',\n formatter_class=RawDescriptionHelpFormatter,\n )\n parser.add_argument(\n '-u', '--user',\n help='GitHub username, used for both reporting and authentication'\n )\n parser.add_argument(\n '-t', '--token',\n help='GitHub authentication token'\n )\n parser.add_argument(\n '-s', '--start-date',\n type=dateutil.parser.parse,\n default=datetime.now() - timedelta(days=7),\n help='Starting datetime, defaults to 7 days ago'\n )\n parser.add_argument(\n '-e', '--end-date',\n type=dateutil.parser.parse,\n default=datetime.now(),\n help='Ending datetime, defaults to current timestamp'\n )\n parser.add_argument(\n '-f', '--format',\n choices=('markdown', ),\n default='markdown',\n help='Output format',\n )\n parser.add_argument(\n '-w', '--weekly',\n action='store_true',\n help='Weekly report not including private repositories'\n )\n parser.add_argument(\n '-W', '--last-week',\n action='store_true',\n help='Create report for last week'\n )\n parser.add_argument(\n '-M', '--last-month',\n action='store_true',\n help='Create report for last month'\n )\n parser.add_argument(\n '--this-week',\n action='store_true',\n help='Create report for this week'\n )\n return parser",
"def create_arg_parser():\n server_modes = ['builtin', 'waitress']\n\n parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n parser.add_argument('h', metavar='HOST', help='Server HOST (e.g. \"localhost\")', type=str)\n parser.add_argument('p', metavar='PORT', help='Server PORT (e.g. \"5001\")', type=int)\n parser.add_argument('m', metavar='SERVER_MODE', help=\", \".join(server_modes), choices=server_modes, type=str)\n parser.add_argument('--debug', help=\"Run builtin server in debug mode\", action='store_true', default=False)\n\n return parser",
"def initCmdLineParser():\n\n # Init parser and all general flags\n logging.debug(\"initiating command line option parser\")\n usage = \"usage: %prog [options]\"\n parser = OptionParser(usage)\n parser.add_option(\"--gen-answer-file\", help=\"Generate a template of an answer file, using this option excludes all other option\")\n parser.add_option(\"--answer-file\", help=\"Runs the configuration in none-interactive mode, extracting all information from the \\\n configuration file. using this option excludes all other option\")\n parser.add_option(\"--no-mem-check\", help=\"Disable minimum memory check\", action=\"store_true\", default=False)\n\n # For each group, create a group option\n for group in controller.getAllGroups():\n groupParser = OptionGroup(parser, group.getKey(\"DESCRIPTION\"))\n\n for param in group.getAllParams():\n cmdOption = param.getKey(\"CMD_OPTION\")\n paramUsage = param.getKey(\"USAGE\")\n optionsList = param.getKey(\"OPTION_LIST\")\n useDefault = param.getKey(\"USE_DEFAULT\")\n if not useDefault:\n if optionsList:\n groupParser.add_option(\"--%s\" % cmdOption, metavar=optionsList, help=paramUsage, choices=optionsList)\n else:\n groupParser.add_option(\"--%s\" % cmdOption, help=paramUsage)\n\n # Add group parser to main parser\n parser.add_option_group(groupParser)\n\n return parser",
"def make_parser():\n p = argparse.ArgumentParser(\n description=\"Visualize and analyze error from oblique/straight tag observations\"\n )\n\n p.add_argument(\"-n\", help=\"name of the test in the config file\")\n\n p.add_argument(\"-t\", help=\"throw out bad tags\", action=\"store_true\")\n\n p.add_argument(\"-v\", help=\"visualize data\", action=\"store_true\")\n\n p.add_argument(\"-i\", help=\"print result data\", action=\"store_true\")\n\n return p",
"def build_parser ():\n\n parser = argparse.ArgumentParser (description = __doc__)\n\n parser.add_argument (\n '-v', '--verbose', dest='verbose', action='count',\n help='increase output verbosity', default=0\n )\n parser.add_argument (\n '-l', '--live', dest='get_live_data', action='store_true',\n help='get live data from OSM database',\n )\n parser.add_argument (\n '-e', '--edit', action='store_true',\n help='edit the OSM database',\n )\n parser.add_argument (\n '-u', '--user', dest='my_edits', action='store_true',\n help='only report about my edits',\n )\n parser.add_argument (\n '--min-length', dest=\"min_length\", type=float, default=1000.0,\n help='way must be longer than this to get a ref (in m) (default=1000)',\n )\n parser.add_argument (\n '--batch-size', dest=\"batch_size\", type=int, default=10,\n help='apply OSM edits in changesets of this size (default=10)',\n )\n return parser",
"def make_parser():\n parser_ = argparse.ArgumentParser(\n description=\"\"\"\n A tool to retrieve history from\n (almost) any browser on (almost) any platform\n\n██████╗ ██████╗ ██████╗ ██╗ ██╗███████╗███████╗██████╗ ██╗ ██╗██╗███████╗████████╗ ██████╗ ██████╗ ██╗ ██╗\n██╔══██╗██╔══██╗██╔═══██╗██║ ██║██╔════╝██╔════╝██╔══██╗ ██║ ██║██║██╔════╝╚══██╔══╝██╔═══██╗██╔══██╗╚██╗ ██╔╝\n██████╔╝██████╔╝██║ ██║██║ █╗ ██║███████╗█████╗ ██████╔╝█████╗███████║██║███████╗ ██║ ██║ ██║██████╔╝ ╚████╔╝\n██╔══██╗██╔══██╗██║ ██║██║███╗██║╚════██║██╔══╝ ██╔══██╗╚════╝██╔══██║██║╚════██║ ██║ ██║ ██║██╔══██╗ ╚██╔╝\n██████╔╝██║ ██║╚██████╔╝╚███╔███╔╝███████║███████╗██║ ██║ ██║ ██║██║███████║ ██║ ╚██████╔╝██║ ██║ ██║\n╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚══════╝╚══════╝╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝\n \"\"\", # noqa: E501\n epilog=\"\"\"\n Checkout the GitHub repo\n https://github.com/pesos/browser-history\n if you have any issues or want to help contribute\"\"\",\n formatter_class=RawDescriptionHelpFormatter,\n )\n\n parser_.add_argument(\n \"-t\",\n \"--type\",\n default=\"history\",\n help=f\"\"\"\n argument to decide whether to retrieve history or bookmarks.\n Should be one of {AVAILABLE_TYPES}.\n Default is history.\"\"\",\n )\n parser_.add_argument(\n \"-b\",\n \"--browser\",\n default=\"all\",\n help=f\"\"\"\n browser to retrieve history or bookmarks from. Should be one\n of all, default, {AVAILABLE_BROWSERS}.\n Default is all (gets history or bookmarks from all browsers).\n \"\"\",\n )\n\n parser_.add_argument(\n \"-f\",\n \"--format\",\n default=\"infer\",\n help=f\"\"\"\n Format to be used in output. Should be one of {AVAILABLE_FORMATS}.\n Default is infer (format is inferred from the output file's\n extension. If no output file (-o) is specified, it defaults to csv)\"\"\",\n )\n\n parser_.add_argument(\n \"-o\",\n \"--output\",\n default=None,\n help=\"\"\"\n File where history output or bookmark output is to be written.\n If not provided, standard output is used.\"\"\",\n )\n\n parser_.add_argument(\n \"-p\",\n \"--profile\",\n default=None,\n help=\"\"\"\n Specify the profile from which to fetch history or bookmarks. If\n not provided all profiles are fetched\n \"\"\",\n )\n\n parser_.add_argument(\n \"--show-profiles\",\n default=None,\n metavar=\"BROWSER\",\n help=f\"\"\"\n List all available profiles for a given browser where browser\n can be one of default, {AVAILABLE_BROWSERS}. The browser\n must always be provided.\n \"\"\",\n )\n\n parser_.add_argument(\n \"-v\", \"--version\", action=\"version\", version=\"%(prog)s \" + __version__\n )\n\n return parser_",
"def parser(cls, *, with_showtb=False):\n parser = argparse.ArgumentParser(add_help=False)\n parser.add_argument('-v', '--verbose', action='count', default=0,\n help='produce more output')\n parser.add_argument('-q', '--quiet', action='count', default=0,\n help='produce less output')\n parser.add_argument('--dry-run', dest='dryrun', action='store_true',\n default=False, help='do not actually make changes')\n\n if with_showtb:\n parser.add_argument('--traceback', action='store_true',\n default=False, help='do not hide tracebacks')\n\n return parser",
"def genargs() -> ArgumentParser:\n parser = ArgumentParser(prog=\"configure\", description=\"Configure a LinkML model repository\")\n parser.add_argument(\"configfile\", help=\"Model configuration file\", type=argparse.FileType('r'))\n parser.add_argument(\"--templatedir\", help=\"Template source directory (Default: template_configurator/templates)\",\n default=default_template_directory)\n parser.add_argument(\"-t\", \"--targetdir\", help=\"Output target directory (Default: current working directory\",\n default=os.getcwd())\n parser.add_argument(\"--reset\", help=\"Hard reset -- regenerate all files from scratch\", action=\"store_true\")\n return parser",
"def _get_argument_parser(fct):\n parser = argparse.ArgumentParser(\n prog=\"oval_office %s\" % fct.__name__.replace(\"office_\", \"\"),\n description=_get_cmd_description(fct))\n return parser",
"def create_argument_parser(cls):\n\n parser = super().create_argument_parser()\n\n # GitHub options\n group = parser.add_argument_group('GitHub arguments')\n\n group.add_argument(\"--owner\", required=True,\n help=\"GitHub owner\")\n group.add_argument(\"--repository\", required=True,\n help=\"GitHub repository\")\n group.add_argument(\"--sleep-for-rate\", dest='sleep_for_rate',\n action='store_true',\n help=\"sleep for getting more rate\")\n group.add_argument(\"--min-rate-to-sleep\", dest='min_rate_to_sleep',\n default=MIN_RATE_LIMIT, type=int,\n help=\"sleep until reset when the rate limit reaches this value\")\n\n return parser",
"def create_parser():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--getErrors\",\n type=str,\n default=None,\n help=\"get error messages - send \\'yes\\' \")\n parser.add_argument(\"--host\",\n type=str,\n default=\"localhost\",\n help=\"Host of redis. Default : localhost\")\n parser.add_argument(\"--port\",\n type=int,\n default=6379,\n help=\"Port of redis. Default : 6379\")\n parser.add_argument(\"--db\",\n type=int,\n default=0,\n help=\"Db of redis. Default : 0\")\n parser.add_argument(\"--cleanTemp\",\n type=str,\n default=None,\n help=\"clean trash files from db - send \\'yes\\' \")\n return parser",
"def get_parser():\n parser = argparse.ArgumentParser(description=\"Tweet Downloader\")\n parser.add_argument(\"-d\",\n \"--data\",\n dest=\"data\",\n help=\"Read data from file or display initial setting\",\n default=False)\n\n return parser",
"def _create_parser():\n parser = ArgumentParser(description=\"A CLI that sends messages to an Azure event hub.\")\n\n parser.add_argument(\"--connection-string\", type=str, required=True,\n help=\"The Azure event hub connection string\")\n\n parser.add_argument(\"--name\", type=str, required=True,\n help=\"The Azure event hub name\")\n\n parser.add_argument(\"--interval\", type=int, required=False,\n help=\"The number of seconds to wait between sends. Defaults to 10 seconds.\")\n\n parser.add_argument(\"--what-if\", type=bool, required=False,\n help=\"Run the program without sending messages to the Event Hub. \"\n \"The app will log what would have been sent to the Event Hub.\")\n\n return parser",
"def create_arg_parser():\n arg_parser = argparse.ArgumentParser()\n arg_parser.add_argument(\n '-f',\n '--file',\n required=True,\n help='Name of clean data file'\n )\n return arg_parser",
"def init_parser():\n parser = OptionParser()\n\n parser.add_option(\n \"-d\",\n \"--debug\",\n dest=\"debug\",\n help=\"Toggle debugging\",\n action=\"store_true\",\n default=False,\n )\n\n parser.add_option(\n \"-f\",\n \"--questions-file\",\n dest=\"file\",\n help=(\"Use this file instead of the default \"\n \"questions.yaml\"),\n metavar=\"FILE\",\n )\n\n parser.add_option(\n \"-p\",\n \"--generate-pdf\",\n dest=\"pdf\",\n help=(\"Generate the speaker PDF\"),\n action=\"store_true\",\n default=False,\n )\n\n parser.add_option(\n \"-v\",\n \"--version\",\n dest=\"version\",\n help=\"Show program version\",\n action=\"store_true\",\n default=False,\n )\n\n options = parser.parse_args()[0]\n return options",
"def _make_parser(self, **kwargs):\n\n kwargs.setdefault('help', self.help)\n kwargs.setdefault('formatter_class',argparse.RawDescriptionHelpFormatter)\n kwargs.setdefault('description', self.description)\n kwargs.setdefault('name', self.name)\n names = (kwargs.get('name') or self.name).split('.')\n \n def _get_subparser(a):\n if a._subparsers:\n for action in a._subparsers._actions:\n if isinstance(action, argparse._SubParsersAction):\n return action\n raise RuntimeError('could not find adequate subparser')\n return a.add_subparsers(dest='command',\n title='commands',\n metavar='COMMAND')\n def _get_parser(node, idx, names):\n name = names[idx]\n if name in node.choices:\n return node.choices[name]\n args = {\n 'name' : name,\n 'help' : 'a group of sub-commands',\n }\n return node.add_parser(**args)\n \n parser = ACMD_PARSER\n node = _get_subparser(parser)\n\n for i,n in enumerate(names[:-1]):\n node = _get_subparser(parser)\n parser = _get_parser(node, i, names)\n \n node = _get_subparser(parser)\n kwargs['name'] = names[-1]\n parser = node.add_parser(**kwargs)\n return parser",
"def get_parser():\n parser = argparse.ArgumentParser(\n description='Converts HTML from file or url to a clean text version')\n parser.add_argument('input', nargs='?', default=None,\n help='Html input either from a file or an url '\n '(default:stdin)')\n parser.add_argument('-o', '--output', type=str,\n help='Output file (default:stdout).')\n parser.add_argument('-e', '--encoding', type=str,\n help='Content encoding for reading and writing files '\n '(default:utf-8)',\n default='utf-8')\n parser.add_argument('-i', '--display-image-captions',\n action='store_true', default=False,\n help='Display image captions (default:false).')\n parser.add_argument('-d', '--deduplicate-image-captions',\n action='store_true', default=False,\n help='Deduplicate image captions (default:false).')\n parser.add_argument('-l', '--display-link-targets',\n action='store_true', default=False,\n help='Display link targets (default:false).')\n parser.add_argument('-a', '--display-anchor-urls',\n action='store_true', default=False,\n help='Deduplicate image captions (default:false).')\n parser.add_argument('--indentation', default='extended',\n help='How to handle indentation (extended or strict;'\n ' default: extended).')\n parser.add_argument('-v', '--version',\n action='store_true', default=False,\n help='display version information')\n return parser",
"def parser(self, prog, **kwargs):\n prog += ' ' + self.name\n parser = argparse.ArgumentParser(prog=prog,\n description=self.summary,\n **kwargs)\n for args, kwargs in self.args or ():\n parser.add_argument(*args, **kwargs)\n return parser",
"def get_parser():\n parser = argparse.ArgumentParser(\n description=\"CLEWsy tools for CLEWs models\")\n parser.add_argument(\n \"--version\",\n action=\"version\",\n version=\"clewsy {ver}\".format(ver=__version__))\n \n parser.add_argument(\n \"-v\",\n \"--verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to INFO\",\n action=\"store_const\",\n const=logging.INFO)\n parser.add_argument(\n \"-vv\",\n \"--very-verbose\",\n dest=\"loglevel\",\n help=\"set loglevel to DEBUG\",\n action=\"store_const\",\n const=logging.DEBUG)\n subparsers = parser.add_subparsers()\n \n # Parser for building a clews model\n build_parser = subparsers.add_parser(\"build\", help=\"build a CLEWs model from clustering data and a yaml model description file\")\n build_parser.add_argument(\n \"yamlfile\",\n help=\"Please provide the yaml model description file\",\n )\n build_parser.set_defaults(func=build)\n \n \n return parser",
"def get_parser():\n\tparser = argparse.ArgumentParser('preprocessing.py',\n\t\tformatter_class=argparse.RawDescriptionHelpFormatter,\n\t\tdescription=\"\"\"\nRun a piepline for one NICER ObsID data. \n\t\t\"\"\"\n\t\t)\n\tversion = '%(prog)s ' + __version__\n\tparser.add_argument('obsid', type=str, \n\t\thelp='ObsID (e.g., 4012010109)')\t\n\treturn parser",
"def create_parser(self, prog_name, subcommand):\n parser = CommandParser(\n self, prog=\"%s %s\" % (os.path.basename(prog_name), subcommand),\n description=self.help or None,\n )\n parser.add_argument(\n '--version', action='version', version=self.get_version())\n\n self.add_arguments(parser)\n return parser",
"def make_argument_parser():\n parser = argparse.ArgumentParser()\n parser.add_argument('--device',\n type=str,\n choices=['CPU', 'GPU'],\n help='Execution device.',\n required=True)\n parser.add_argument('-N',\n type=int,\n default=DEFAULT_N,\n help='Number of particles.')\n parser.add_argument('--rho',\n type=float,\n default=DEFAULT_RHO,\n help='Number density.')\n parser.add_argument('--dimensions',\n type=int,\n choices=[2, 3],\n help='Number of dimensions.',\n default=DEFAULT_DIMENSIONS)\n parser.add_argument('--warmup_steps',\n type=int,\n default=DEFAULT_WARMUP_STEPS,\n help='Number of timesteps to run before timing.')\n parser.add_argument('--benchmark_steps',\n type=int,\n default=DEFAULT_BENCHMARK_STEPS,\n help='Number of timesteps to run in the benchmark.')\n parser.add_argument('--repeat',\n type=int,\n default=DEFAULT_REPEAT,\n help='Number of times to repeat the run.')\n parser.add_argument('-v',\n '--verbose',\n action='store_true',\n help='Verbose output.')\n return parser",
"def build_parser() -> ArgumentParser:\n parser = ArgumentParser(prog=\"bartender\")\n parser.add_argument(\"--version\", action=\"version\", version=version(\"bartender\"))\n subparsers = parser.add_subparsers(dest=\"subcommand\")\n\n serve_sp = subparsers.add_parser(\"serve\", help=\"Serve web service\")\n serve_sp.set_defaults(func=serve)\n\n perform_sp = subparsers.add_parser(\"perform\", help=\"Async Redis queue job worker\")\n perform_sp.add_argument(\n \"--config\",\n default=Path(\"config.yaml\"),\n type=Path,\n help=\"Configuration with schedulers that need arq workers\",\n )\n perform_sp.add_argument(\n \"--destination\",\n nargs=\"+\",\n help=\"\"\"Name of destinations to run workers for.\n Each destination must have `scheduler.type:arq`.\n By default runs workers for all destinations with `scheduler.type:arq`.\"\"\",\n dest=\"destination_names\",\n )\n perform_sp.set_defaults(func=perform)\n\n add_generate_token_subcommand(subparsers)\n\n return parser",
"def init_parser():\n parser = argparse.ArgumentParser(\n description='Backup application code and data.')\n parser.add_argument('-a', '--app-id', required=True,\n help='the application ID to run the backup for')\n parser.add_argument('--source-code', action='store_true',\n default=False, help='backup the source code too. Disabled by default.')\n parser.add_argument('-d', '--debug', required=False, action=\"store_true\",\n default=False, help='display debug messages')\n parser.add_argument('--skip', required=False, nargs=\"+\",\n help='skip the following kinds, separated by spaces')\n\n return parser",
"def makeParser():\n parser = argparse.ArgumentParser(\n description=(\n \"Print a JSON object containing reference to read \"\n \"distances extracted from a SAM file.\"\n )\n )\n\n parser.add_argument(\n \"--samFile\",\n action=\"append\",\n required=True,\n help=\"The SAM file(s) to load. May be repeated.\",\n )\n\n parser.add_argument(\n \"--minMatchingReads\",\n type=int,\n help=(\n \"The minimum number of reads that must match a reference for it \"\n \"to be included.\"\n ),\n )\n\n parser.add_argument(\n \"--scoreTag\",\n help=(\n \"The score tag to use for the alignment score. If not given, \"\n \"1 will be used to indicate that a read matched a reference \"\n \"(non-matches are not included). The default is no score tag, \"\n 'which is not that useful. A good choice is \"AS\", for the '\n \"alignment score, but that has to be present in the SAM file, \"\n \"which means that the aligner (bowtie2, bwa, etc. has to have \"\n \"produced such a tag.\"\n ),\n )\n\n parser.add_argument(\n \"--verbose\", action=\"store_true\", help=\"Print extra information.\"\n )\n\n return parser",
"def get_parser():\r\n parser = argparse.ArgumentParser(description=( # pylint: disable=redefined-outer-name\r\n \"Automatically finds translation errors in all edx-platform *.po files, \"\r\n \"for all languages, unless one or more language(s) is specified to check.\"\r\n ))\r\n\r\n parser.add_argument(\r\n '-l', '--language',\r\n type=str,\r\n nargs='*',\r\n help=\"Specify one or more specific language code(s) to check (eg 'ko_KR').\"\r\n )\r\n\r\n parser.add_argument(\r\n '-e', '--empty',\r\n action='store_true',\r\n help=\"Includes empty translation strings in .prob files.\"\r\n )\r\n\r\n parser.add_argument(\r\n '-v', '--verbose',\r\n action='count', default=0,\r\n help=\"Turns on info-level logging.\"\r\n )\r\n\r\n return parser",
"def cmd_line_parser():\n usage = \"usage: %prog [options]\\n\"\n opt_parser = OptionParser(usage=usage)\n opt_parser.add_option(\"--ai\", action=\"store\", dest=\"alternative_input\",\n help=\"an alternative input file (works only with load_from_pickle)\")\n opt_parser.add_option(\"--dl\", action=\"store\", dest=\"dumped_lexicon\",\n help=\"a dumped lexicon file (works only with load_from_pickle\")\n opt_parser.add_option(\"--dotest\", action=\"store_true\", dest=\"dotest\", default=False,\n help=\"use this flag if you want to apply testing\")\n opt_parser.add_option(\"-t\", action=\"store\", dest=\"test_parses\",\n help=\"the output file for the test parses\")\n opt_parser.add_option(\"-n\", action=\"store\", dest=\"train_parses\",\n help=\"the output file for the train parses\")\n opt_parser.add_option(\"-i\", dest=\"inp_file\", default=\"trainFiles/trainPairs\",\n help=\"the input file names (with the annotated corpus)\")\n opt_parser.add_option(\"--devel\", dest=\"development_mode\", default=False, action=\"store_true\",\n help=\"development mode\")\n\n return opt_parser",
"def getArgumentParser():\n parser = argparse.ArgumentParser(description=\"Script for running optimization for the ZH dark photon SR\")\n parser.add_argument('-i',\n '--infile',\n dest='infile',\n help='Input CSV file',\n default = '/afs/cern.ch/user/e/ehofgard/public/data/all_data')\n parser.add_argument('-o',\n '--output',\n dest='outdir',\n help='Output directory for plots, selection lists, etc',\n default='outdir')\n \n return parser",
"def create_parser():\n parser = OptionParser()\n\n parser.add_option(\"-s\", \"--script\", dest=\"script\", default='pbs.sh', help=\"Output location\")\n parser.add_option(\"-p\", \"--period\", dest=\"period\", default=\"30\", help=\"qstat period\")\n\n parser.set_usage(\"\"\"%prog [options]\"\"\")\n return parser",
"def init_parser():\n parser = OptionParser()\n parser.add_option(\"-n\", \"--interactive\", action=\"store_true\", help=\"run in interactive (non-daemon) mode\")\n parser.add_option(\"-r\", \"--run\", action=\"store_true\", help=\"starts process identified by -app parameter\")\n parser.add_option(\"-k\", \"--kill\", action=\"store_true\", help=\"kill process identified by -app parameter\")\n parser.add_option(\"-a\", \"--app\", action=\"store\", help=\"application to start (process name)\")\n parser.add_option(\"-q\", \"--query\", action=\"store_true\", help=\"query application's state\")\n parser.add_option(\"-i\", \"--install_ve\", action=\"store_true\", help=\"install a virtualenv for the runtime to use\")\n parser.add_option(\"-s\", \"--shell\", action=\"store_true\", help=\"run an ipython shell within the virtualenv\")\n parser.add_option(\"-t\", \"--tests\", action=\"store_true\", help=\"run tests\")\n parser.add_option(\"-x\", \"--xunit\", action=\"store_true\", help=\"run tests with coverage and xunit output for Jenkins\")\n parser.add_option(\"-z\", \"--analyze\", action=\"store_true\", help=\"run pylint on project\")\n parser.add_option(\"-l\", \"--list\", action=\"store_true\", help=\"list available applications\")\n parser.add_option(\"-o\", \"--outfile\", action=\"store\", help=\"save results from a report to a file\")\n return parser",
"def arg_parse():\n p = ap.ArgumentParser()\n p.add_argument()\n return p.parse_args()",
"def make_cli_parser(self):\n super(SaArgParser, self).make_cli_parser()\n self.cli_parser.add_option('--steps', type='int',\n default=mcmc.defaults.NUM_STEPS,\n help=(\"the number of steps to Anneal. \"\n\t\t\t\t\"[default: %default]\")\n )\n self.cli_parser.add_option('--temperature', type='int',\n default=mcmc.defaults.TEMPERATURE,\n help=(\"the starting temperature to anneal from. \"\n \"[default: %default]\")\n )\n self.cli_parser.add_option('--end_temperature', type='int',\n default=mcmc.defaults.END_TEMPERATURE,\n help=(\"the temperature to end annealing.\"\n \"[default: %default]\")\n )\n self.cli_parser.add_option('--activity-threshold',\n type='float',\n default=mcmc.defaults.ACTIVITY_THRESHOLD,\n help=(\"set the (differential) expression threshold at \"\n \"which a gene is considered active [default: \"\n \"%default=-log10(0.05)]\")\n )\n self.cli_parser.add_option('--free-parameters',\n action='store_true',\n help=(\"parameters will be adjusted randomly, rather \"\n \"than incrementally\")\n )\n self.cli_parser.add_option('--disable-swaps', action='store_true',\n help=(\"disables swapping links as an option for \"\n \"transitions\")\n )\n self.cli_parser.add_option('--transition-ratio', type='float',\n default=0.9,\n help=(\"The target ratio of proposed link transitions \"\n \"to proposed parameter transitions [default: \"\n \"%default]\"\n )\n )\n self.cli_parser.add_option('--parameters-outfile',\n default=mcmc.defaults.PARAMETERS_OUTFILE,\n help=(\"the file to which the parameters results should \"\n \"be written [default: %default]\")\n )\n self.cli_parser.add_option('--transitions-outfile',\n default=mcmc.defaults.TRANSITIONS_OUTTFILE,\n help=(\"the file to which the transitions data should \"\n \"be written [default: %default]\")\n )\n self.cli_parser.add_option('--detailed-transitions',\n action='store_true',\n help=(\"Transitions file includes full information about \"\n \"each step's state.\")\n )\n self.cli_parser.add_option('--bzip2', action='store_true',\n help=\"compress transitions file using bzip2\"\n )",
"def make_cli_parser(self):\n super(ContextualArgParser, self).make_cli_parser()\n self.cli_parser.add_option('--num-permutations', type='int',\n default=cbpn.NUM_PERMUTATIONS,\n help=(\"number of permutations for statistics \"\n \"[default: %default]\")\n )\n self.cli_parser.add_option('-s', '--edge-swaps', type='int',\n help=(\"Perform the given number of edge swaps to \"\n \"produce random graphs. [NOTE: using this option \"\n \"changes the algorithm for determining \"\n \"significance of a link between each given pair \"\n \"of terms.]\"\n )\n )\n self.cli_parser.add_option('--no-estimation', dest='estimate',\n action='store_false', default=True,\n help=(\"Do not use p-value estimation, but run the \"\n \"full number of permutations for every pair of \"\n \"annotation terms. [NOTE: this can substantially \"\n \"increase running time.]\"\n )\n )\n self.cli_parser.add_option('--score-correction',\n action='store_true', default=False,\n help=(\"Correct scores for each pair of terms by an \"\n \"\\\"expected\\\" value calculated from the mean \"\n \"expression value.\"\n )\n )",
"def make_parser():\n parser = argparse.ArgumentParser(prog=__file__.replace(\".py\", \"\"),\n description='simple $PATH tool')\n parser.add_argument('-n', '--nocolor', dest=\"color\",\n action=\"store_false\", default=True,\n help='Turn off ANSI color codes.')\n parser.add_argument('-w', '--nowarn', action=\"store_true\",\n help='Turn off path warnings.')\n subs = parser.add_subparsers(title='subcommands',\n description='The subcommands')\n\n sub = subs.add_parser('replace', description=\"Search & Replace $PATH\")\n sub.set_defaults(cmd='path_replace')\n sub.add_argument('terms', nargs='+',\n help='Format: search:replace, search:replace, ...')\n\n sub = subs.add_parser('show', description=\"Show $PATH compoents\")\n sub.set_defaults(cmd='path_show')\n sub.add_argument('-n', '--nocolor', dest=\"color\",\n action=\"store_false\", default=True,\n help='Turn off ANSI color codes.')\n sub.add_argument('-w', '--nowarn', action=\"store_true\",\n help='Turn off path warnings.')\n\n sub = subs.add_parser('which', description=\"Platform agnostic `which -a`\")\n sub.set_defaults(cmd='path_which')\n sub.add_argument('look', help='Look for this executable')\n sub.add_argument('-n', '--nocolor', dest=\"color\",\n action=\"store_false\", default=True,\n help='Turn off ANSI color codes.')\n sub.add_argument('-v', '--version', action=\"store_true\",\n help='Show version of exact matches.')\n\n return parser",
"def get_parser():\n # Get parsers for various model architectures.\n model_parser = ModelFactory.get_all_parsers()\n # Get parsers for various optimizers.\n optimizer_parser = OptimizerFactory.get_all_parsers()\n # Add parent parsers.\n parent_parsers = model_parser + optimizer_parser\n parser = argparse.ArgumentParser(parents=parent_parsers)\n\n # Generic options\n parser.add_argument('--checkpoint-step', type=int, default=1,\n help='Number of epochs between successive checkpoint creations')\n parser.add_argument('--config-file', type=str, default=[], nargs='*',\n help='File(s) to read the command-line arguments from')\n parser.add_argument('--continue', action='store_true',\n help='Continue the execution of the last experiment saved into the export directory')\n parser.add_argument('--debug', action='store_true', help='Show debug messages')\n parser.add_argument('--export-dir', type=str, required=True, help='Export directory')\n parser.add_argument('--no-gpu', action='store_true', help='Use CPU')\n \n parser.add_argument(\"--wandb-directory\", type=str, default=\"../wandb\")\n parser.add_argument(\"--disable-wandb\", action=\"store_true\", help=\"No Wandb logging\")\n\n # Data options\n parser.add_argument('--batch-size', type=int, default=[16], nargs='*', help='Batch size(s)')\n parser.add_argument('--dataset', type=str, default=[consts.SIGMORPHON2020], nargs='*',\n choices=[consts.SIGMORPHON2020], help='Dataset(s) to train on')\n parser.add_argument('--sigmorphon2020-root', type=str, help='Root directory for the SIGMORPHON 2020 dataset')\n\n # Language options\n parser.add_argument('--language-families', type=str, nargs='*', default=None,\n help='The families of languages to load the data for.'\n ' If not provided, all available families will be used.')\n parser.add_argument('--language-info-file', type=str, default='lang_config.tsv',\n help='The language information file.')\n parser.add_argument('--languages', type=str, nargs='*', default=None,\n help='The languages to load the data for.'\n ' If not provided, all available languages will be used.')\n\n # Optimizer options\n parser.add_argument('--optimizer', type=str, default=[OptimizerFactory.optimizers[0]],\n choices=OptimizerFactory.optimizers, nargs='*', help='Optimizer algorithm(s)')\n parser.add_argument('--num-epochs', type=int, default=30, help='Number(s) of epochs')\n\n # Model options\n parser.add_argument('--model-architecture', type=str, default=[ModelFactory.architectures[0]], nargs='*',\n choices=ModelFactory.architectures, help='Model architecture(s)')\n \n # Parallelism Optoions, affect various\n parser.add_argument('--loader-threads', type=int, default=0, help='Data loading threads. Default to 0 (load in main)')\n parser.add_argument('--use-dataparallel', action='store_true', help='Use torch.nn.DataParallel to wrap the model?')\n\n return parser",
"def extend_parser(self, parser):\n return parser"
] | [
"0.7564569",
"0.74022853",
"0.73748535",
"0.7330005",
"0.73285943",
"0.73109233",
"0.7296097",
"0.72469074",
"0.7230259",
"0.7213541",
"0.7185622",
"0.7110324",
"0.7101994",
"0.70998615",
"0.7094069",
"0.70916784",
"0.70916784",
"0.70733887",
"0.70644695",
"0.7051284",
"0.7051284",
"0.70415086",
"0.7023836",
"0.7012235",
"0.7007044",
"0.70031786",
"0.69946057",
"0.6993284",
"0.6992788",
"0.6985943",
"0.69857836",
"0.6971119",
"0.6962339",
"0.6962339",
"0.6958329",
"0.6935254",
"0.6923756",
"0.6895144",
"0.68870944",
"0.68799865",
"0.6875813",
"0.68720645",
"0.6869272",
"0.68669283",
"0.68296814",
"0.68268204",
"0.6824535",
"0.6810109",
"0.68092304",
"0.68068224",
"0.6795409",
"0.67823046",
"0.6777575",
"0.67732394",
"0.677232",
"0.67666984",
"0.6751848",
"0.6735244",
"0.6726295",
"0.6718532",
"0.6704005",
"0.6703561",
"0.67024076",
"0.6692382",
"0.66878194",
"0.66844445",
"0.668264",
"0.66798985",
"0.66775537",
"0.6663931",
"0.66552067",
"0.66514885",
"0.66497314",
"0.6646509",
"0.66446346",
"0.66434675",
"0.66349965",
"0.66343236",
"0.66303885",
"0.66274625",
"0.66191363",
"0.6616143",
"0.6614825",
"0.66105366",
"0.66079414",
"0.66076034",
"0.6596827",
"0.6596251",
"0.6579644",
"0.65746295",
"0.65677035",
"0.6565912",
"0.6565853",
"0.65645283",
"0.6559056",
"0.65585935",
"0.6553713",
"0.6552429",
"0.6552293",
"0.65497905"
] | 0.6929574 | 36 |
Determina numere divizibile cu k dintro lista | def get_longest_div_k(lst, k):
rezultat = []
for x in lst:
if x % k == 0:
rezultat.append(x)
return rezultat | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getDivisors(n):",
"def divisor(k, num):\n\n if k < 0:\n raise Exception('k must be >= 0: {}'.format(k))\n\n factors = prime_factorization(num)\n result = 1\n if k == 0:\n for prime in factors:\n result *= prime + 1\n\n for prime in factors:\n result *= ((pow(prime, (factors[prime] + 1) * k) - 1) //\n (prime ** k - 1))\n return result",
"def calc(k):\n n = factorial(4*k) * (1103.0 + 26390.0*k)\n d = factorial(k)**4 * 396.0**(4.0*k)\n z = n/d\n return z",
"def C(n,k):\n if 0 <= k <= n:\n ntok = 1\n ktok = 1\n for t in xrange(1, min(k, n - k) + 1):\n ntok *= n\n ktok *= t\n n -= 1\n return ntok // ktok\n else:\n return 0",
"def count_k(n, k):\n if n == 0:\n return 1\n elif n < 0:\n return 0\n else:\n total = 0\n i = 1\n while i <= k:\n total += count_k(n - i, k)\n i += 1\n return total",
"def beautifulSubsets(self, nums: List[int], k: int) -> int:\n\n \"\"\"\n queue = deque([([], -1)])\n res = 0\n\n while queue:\n cur, idx = queue.popleft()\n res += 1\n\n for i in range(idx + 1, len(nums)):\n if nums[i] - k in cur or nums[i] + k in cur:\n continue\n\n queue.append((cur + [nums[i]], i))\n\n return res - 1\n \"\"\"\n\n \"\"\"\n # dp0 is the ways that without A[i]\n # dp1 is the ways that with A[i]\n\n count = [Counter() for i in range(k)]\n for n in nums:\n count[n % k][n] += 1\n\n res = 1\n for i in range(k):\n prev, dp0, dp1 = 0, 1, 0\n for n in sorted(count[i]):\n v = pow(2, count[i][n])\n if prev + k == n:\n dp0, dp1 = dp0 + dp1, dp0 * (v - 1)\n else:\n dp0, dp1 = dp0 + dp1, (dp0 + dp1) * (v - 1)\n\n prev = n\n\n res *= dp0 + dp1\n\n return res - 1\n \"\"\"\n\n # Count the frequency of A, and then consider all the arithmetic sequence with difference k.\n # Each arithmetic sequence can be solve as a hourse robber problem.\n # We solve the hourse robber by dp.\n # dp(a) return the result for sequence no bigger than a.\n\n # dp(a)[0] is the ways that without a\n # dp(a)[1] is the ways that with a\n\n # dp(a)[0] = dp(a - k)[0] + dp(a - k)[1]\n # dp(a)[1] = dp(a - k)[0] * (2 ^ count(a) - 1\n\n count = Counter(nums)\n\n def dp(n):\n dp0, dp1 = dp(n - k) if n - k in count else (1, 0)\n return dp0 + dp1, dp0 * (pow(2, count[n]) - 1)\n\n return functools.reduce(operator.mul, (sum(dp(n)) for n in count if not count[n + k])) - 1",
"def divisior(n: int) -> list:\n j = [n]\n for d in range(n+1): #loop bis n\n d > 0",
"def choose(n, k):\r\n if 0 <= k <= n:\r\n ntok = 1\r\n ktok = 1\r\n for t in range(1, min(k, n - k) + 1):\r\n ntok *= n\r\n ktok *= t\r\n n -= 1\r\n return ntok // ktok\r\n else:\r\n return 0",
"def solution2(nums, K):\n s = 0\n sum_til = []\n for n in nums:\n s += n\n sum_til.append(s)\n\n l = len(nums)\n for i in range(l):\n for j in range(i+1, l):\n sum_ij = sum_til[j] if i == 0 else sum_til[j] - sum_til[i-1]\n if K != 0 and sum_ij % K == 0:\n return True\n if K == 0 and sum_ij == 0:\n return True\n return False",
"def _get_m(self, ks: List[int]) -> int:\n\n base = 1\n for c in ks:\n base = base * c // gcd(base, c)\n return base",
"def count_kmers_possible(read, k):\n num_kmers = {}\n num_kmers1 = len(read) - k + 1\n num_kmers2 = 4**k\n#num_kmers.append(min(num_kmers1,num_kmers2))\n num_kmers = min(num_kmers1,num_kmers2)\n num_kmers3 = max(num_kmers,0)\n return(num_kmers3)",
"def divisor_k_lookup(up_to, k):\n div = defaultdict(lambda: 1)\n div[1] = 1\n\n for i in xrange(2, up_to):\n for j in xrange(i, up_to, i):\n div[j] += i**k\n\n return div",
"def numSubarrayProductLessThanK(self, nums: List[int], k: int) -> int:\n\n if not nums:\n return 0\n\n if k <= 1:\n return 0\n\n count = 0\n lo = 0\n product = 1\n for hi in range(len(nums)):\n product *= nums[hi]\n while product >= k:\n product /= nums[lo]\n lo += 1\n count += hi - lo + 1\n return count",
"def chosse(n,k):\n import math \n if (n>=k and k>=0):\n return math.factorial(n) / (math.factorial(k) * math.factorial(n-k))\n else:\n return \"No se puede calcular el numero factorial indicado\"",
"def choose(n, k):\n if 0 <= k <= n:\n ntok = 1\n ktok = 1\n for t in xrange(1, min(k, n - k) + 1):\n ntok *= n\n ktok *= t\n n -= 1\n return ntok // ktok\n else:\n return 0",
"def choose(n, k):\n if 0 <= k <= n:\n ntok = 1\n ktok = 1\n for t in xrange(1, min(k, n - k) + 1):\n ntok *= n\n ktok *= t\n n -= 1\n return ntok // ktok\n else:\n return 0",
"def main(l, k):\n S = 0\n T = product(xrange(2), repeat=k)\n for ts in T:\n tmp = []\n\n for t, c in zip(ts, cs):\n tmp.append(((-1)*c)**t)\n\n S += (sum(tmp)**l)\n val = (sum(tmp)**l)\n print val\n return S / float(2**(k))",
"def main(l, k):\n S = 0\n T = product(xrange(2), repeat=k)\n for ts in T:\n tmp = []\n\n for t, c in zip(ts, cs):\n tmp.append(((-1)*c)**t)\n\n S += (sum(tmp)**l)\n val = (sum(tmp)**l)\n print val\n return S / float(2**(k))",
"def ndcg_at_k(self, r, k, method=0):\n # print(\"sorted:\" + str(sorted(r, reverse=True)))\n # 排完序最理想的結果分數\n dcg_max = self.dcg_at_k(sorted(r, reverse=True), k, method)\n # print(\"dcg_max:\" + str(dcg_max))\n if not dcg_max:\n return 0.\n return self.dcg_at_k(r, k, method) / dcg_max",
"def diviseur(n):\n s = 0\n for i in range (1, n):\n if n%i == 0:\n s += 1\n print(i)\n return \"Le nombre de diviseurs est\", s",
"def cdf(self, k):\n\n if k < 0 or k > self.n:\n return 0\n\n k = int(k)\n ans = 0\n for i in range(0, k + 1):\n ans += self.pmf(i)\n return ans",
"def workersNeeded(k, m):\n # formula: k/m\n from math import ceil\n return ceil(float(k)/float(m))",
"def choose(n, k):\n # http://stackoverflow.com/a/3025547/313967\n if 0 <= k <= n:\n ntok = 1\n ktok = 1\n for t in xrange(1, min(k, n - k) + 1):\n ntok *= n\n ktok *= t\n n -= 1\n return ntok // ktok\n else:\n return 0",
"def answer(l):\n num_divisors = [0] * len(l)\n triple_count = 0\n for large in range(1, len(l)):\n for small in range (0, large):\n if l[large] % l[small] == 0:\n num_divisors[large] += 1\n triple_count += num_divisors[small]\n return triple_count",
"def get_k(self, n, m):\n k = m/n * log(2)\n return int(k)",
"def partition(n, ks):\n if type(ks) not in (list, tuple):\n raise TypeError('ks must be an iterable')\n if not ks:\n raise ValueError('ks must have at least one value')\n elif min(ks) < 0:\n raise ValueError('group size k must be non-negative')\n num = _math.factorial(n)\n den = 1\n for k in ks:\n den *= _math.factorial(k)\n return int(num / den)",
"def nCkarray(*k_values):\n result = 1\n for i, j in enumerate((m for k in k_values for m in range(1, k+1)), 1):\n result = (result * i) // j\n return result",
"def find_subarrays(nums, k):\n res = pre_sum = 0\n dic = {0: 1}\n for i in nums:\n pre_sum += i\n res += dic.get(pre_sum - k, 0)\n dic[pre_sum] = dic.get(pre_sum, 0) + 1\n return res",
"def divide(self, k, endless=False):\n ### ERROR: Insufficient volume. ###\n v = self.volume / k\n #vols = [v for i in xrange(k)]\n #vols[-1] = self.volume - sum(vols[:-1])\n if self.endless:\n return [self.aliquot(v, endless) for i in xrange(k)]\n else:\n samples = [self.aliquot(v, endless) for i in xrange(k-1)]\n samples.append(self.aliquot(self.volume, endless))\n return samples",
"def all_kmers(k):\n for i in range(0, 4 ** k):\n res = number_to_kmer(i, k)\n yield res",
"def get_divisores(num):\n divisores = [] #uso una lista para guardar los divisores\n for i in range(1, num):\n if num%i == 0:\n divisores.append(i)\n return divisores",
"def nchoosek(n, k):\n if n < k:\n return 0\n return partition(n, [k, n - k])",
"def choose(n, k):\n ans, k = 1, min(k, n-k)\n for i in range(k):\n ans *= n-i\n ans //= i+1\n return ans",
"def combin(n, k):\n\tif k > n//2:\n\t\tk = n-k\n\tx = 1\n\ty = 1\n\ti = n-k+1\n\twhile i <= n:\n\t\tx = (x*i)//y\n\t\ty += 1\n\t\ti += 1\n\treturn x",
"def get_chunks(indivs, k):\r\n\tpair_chunk_collection=[]\r\n\tfor i in xrange(0, len(indivs[0])-k+1, k):\r\n\t\tchunks=[]\r\n\t\tfor x in indivs:\r\n\t\t\tchunks.append(x[i:i+k])\r\n\t\tpartial_phase_pairs=tune_em(chunks, 5)[1]\r\n\t\tprint partial_phase_pairs\r\n\t\tpair_chunk_collection.append(partial_phase_pairs)\r\n\treturn pair_chunk_collection",
"def numberOfPaths2(self, grid: List[List[int]], k: int) -> int:\r\n\r\n ROW, COL = len(grid), len(grid[0])\r\n dp = [[[0] * k for _ in range(COL)] for _ in range(ROW)]\r\n dp[0][0][grid[0][0] % k] = 1\r\n for r in range(ROW):\r\n for c in range(COL):\r\n for mod in range(k):\r\n if r - 1 >= 0:\r\n dp[r][c][mod] += dp[r - 1][c][(mod - grid[r][c]) % k]\r\n dp[r][c][mod] %= MOD\r\n if c - 1 >= 0:\r\n dp[r][c][mod] += dp[r][c - 1][(mod - grid[r][c]) % k]\r\n dp[r][c][mod] %= MOD\r\n return dp[ROW - 1][COL - 1][0]",
"def prime_factor(n):\n while n > 1:\n k = 2 \n while n % k != 0:\n k = k+1\n n = n // k\n print(k)",
"def d(n):\n divisors = []\n for i in range(1, n):\n if n % i == 0:\n divisors.append(i)\n return sum(divisors)",
"def count(arr, k):\n dp = [[None]*(k+1) for _ in range(len(arr)+1)]\n for i in range(len(dp)):\n dp[i][0] = 1\n for i in range(1, len(dp[0])):\n dp[0][i] = 0\n for a in dp:\n print(a)\n for i in range(1, len(dp)):\n for j in range(1, len(dp[0])):\n if arr[i-1] <= j:\n dp[i][j] = dp[i-1][j-arr[i-1]] + dp[i-1][j]\n else:\n dp[i][j] = dp[i-1][j]\n for a in dp:\n print(a)\n return dp[-1][-1]",
"def getKmers(seq, k):\n \n kmd = {}\n \n for i in range(len(seq)+1-k):\n kmer = seq[i:i+k]\n kmd[kmer] = kmd.get(kmer,0) + 1\n return kmd",
"def tab_Pdk(dmax):\r\n kmax = dmax*6 #la somme des des ne peut etre superieur a 6 fois leur nombre\r\n res = np.ones((dmax, kmax))\r\n\r\n\t#on met a zero toutes les cases qui sont impossible a completer\r\n for d in range(dmax):\r\n for k in range(kmax):\r\n if (k+1)<2*(1+d) or (k+1)>6*(d+1):\r\n res[d,k] = 0\r\n\t\t \r\n\t#on initialise pour le cas d=1\r\n for i in range(1,6):\r\n res[0][i] = 1/5\r\n\r\n\t#on met les valeurs des Q(d,k) dans toutes les cases non nulles\r\n for d in range(1,dmax):\r\n for k in range(kmax):\r\n if (res[d,k]==1) :\r\n res[d,k] = 0\r\n #on fait un for dans les valeurs qui sont realisables. \r\n #le +1 apres le min est la car nous sommes dans un range\r\n for i in range(max(k-6,2*(d+1-1)-1) , min(k-2,6*(d+1-1))+1):\r\n res[d,k] += res[d-1,i]/5\r\n\r\n\t#On multiplie toutes les cases selon la formule pour obtenir les P(d,k)\r\n for d in range(dmax):\r\n for k in range(kmax):\r\n res[d,k] = res[d,k]*(5/6)**(d+1)\r\n\t\t \r\n for d in range(dmax):\r\n res[d, 0] = 1-(5/6)**(d+1)\r\n\t\t\r\n return res",
"def ref_main(l, k):\n S = sum(binomial(k, z)*(2*z-k)**l for z in range(k+1))\n return S / 2**k",
"def ref_main(l, k):\n S = sum(binomial(k, z)*(2*z-k)**l for z in range(k+1))\n return S / 2**k",
"def idcg(k):\n res = sum([1.0 / math.log(i + 2, 2) for i in range(k)])\n if not res:\n return 1.0\n else:\n return res",
"def count_kmers(dna, k):\n kmer_count = Counter()\n for i in range(len(dna)):\n kmer = dna[i:(i+k)]\n if len(kmer) == k:\n kmer_count[kmer] += 1\n return kmer_count",
"def k(n):\r\n primes = u.sieve(n)\r\n l = [1, 0]\r\n for i in range(2, n + 1):\r\n l1 = [l[r] * sopf(i - r, primes) for r in range(1, i)]\r\n s = (sum(l1) + sopf(i, primes)) // i\r\n l.append(s)\r\n return l[n]",
"def num_divisors_iii(n):\n set_pf = set(n)\n n_div = 1\n for pf in set_pf:\n x = n.count(pf)\n n_div *= (1 + x)\n return n_div",
"def k_of_x(x):\n dx = x[1] - x[0]\n N = x.size\n dk = 2.*np.pi/(N*dx)\n inull = N//2\n k = dk*(np.linspace(1, N, N)-inull)\n\n return k",
"def ModExpo(a, k, n):\n ret = 1\n while k > 0:\n if k % 2:\n ret = ret * a % n\n k //= 2\n a = (a * a) % n\n return ret",
"def get_kth_ugly_number(k):\n count = 0; i = 0\n while count < k:\n i += 1\n if is_ugly(i):\n count += 1\n return i",
"def determine_k(dataset, range_k, n_seed=30):\r\n range_Ks = np.arange(0,range_k,1,dtype=int) #range of delays to study\r\n h_K=np.zeros((10,range_k))\r\n \r\n for i in range(10):\r\n for k, K in enumerate(range_Ks):\r\n traj_matrix= embed.trajectory_matrix(dataset, K=K)\r\n labels= cl.kmeans_knn_partition(traj_matrix, n_seed)\r\n h= op_calc.get_entropy(labels)\r\n h_K[i,k]=h\r\n \r\n return(h_K)",
"def combination(n, k):\n if (k > n) or (n < 0) or (k < 0):\n return 0\n val = 1\n for j in range(min(k, N - k)):\n val = (val * (N - j)) // (j + 1)\n return val",
"def answer():\n for k in range(2,3000):\n for j in range(k-1,0,-1):\n pj, pk = P(j), P(k)\n #print( j, k, pj, pk )\n if isPent(pk-pj):\n #print( j, k, pj, pk, pk+pj, isPent(pk+pj), pk-pj )\n if isPent(pk+pj) and isPent(pk-pj):\n return pk-pj",
"def listDivide(numbers, divide=2): \n counter = 0\n for num in numbers:\n if num % divide == 0:\n counter+=1\n return counter",
"def n_choose_k(N,K):\n return factorial(N) // (factorial(N - K) * factorial(K))",
"def __init__(self, k):\n self.k = k\n self.N = 2**self.k",
"def nCk(n, k):\n return factorial(n)//factorial(k)//factorial(n-k)",
"def choose(n, k):\n\n if n == k:\n return 1\n elif k == 1:\n return n\n elif k == 2:\n return n * (n - 1) // 2\n else:\n return fact(n) // (fact(n - k) * fact(k))",
"def get_k(self, modulus):\n\n\t\treturn (np.pi*self.z_thick*np.power(self.thick, 3.0)*modulus) \\\n\t\t\t\t/(6.0*self.length)",
"def superobl_p(k):\n if k%2==0:\n return 1/2 + 1/(2*(k+1))\n else:\n return 1/2 + 1/(2*k)",
"def langmuir_occ(p, k):\n\n intermediate = k * p\n\n occupancy = intermediate / (intermediate + 1)\n\n return occupancy",
"def n_choose_k(n: int, k: int) -> int:\n # Edge case, no possible way to choose.\n if k > n or k < 0 or n < 0: return 0\n # We choose the min of k or n - k\n # since nCk == nC(n - k).\n k = min(k, n - k)\n # The numerator represents the product\n # n * (n - 1) * (n - 2) * ... * (n - k - 1)\n numerator = reduce(mul, range(n, n - k, -1), 1)\n # The denominator represents the product\n # 1 * 2 * ... * k\n denominator = reduce(mul, range(1, k + 1), 1)\n # return the result as an integer.\n return numerator // denominator",
"def Split(self, k):\n n = len(self)\n start = range(0, n, ceil(n / k))\n end = list(start[1:]) + [n]\n return [range(first, last) for first, last in zip(start, end)]",
"def num_divisors_ii(n):\n set_pf = set(n)\n n_og = 2**(len(set_pf))\n n_div = n_og\n for pf in set_pf:\n x = n.count(pf)\n n_div += n_div//2 * (x - 1)\n return n_div",
"def division_algorithm(n):\n assert n < 1000\n decimals = []\n dividend = 1\n divisor = n\n counter = 0\n repeating, repeating_length = False, 0\n while dividend != 0 and not repeating:\n dividend = dividend * 10\n decimals.append(dividend // divisor)\n dividend = dividend % divisor\n counter += 1\n repeating, repeating_length = is_repeating(decimals)\n if repeating:\n counter = repeating_length\n return repeating, counter",
"def k_rank_approximate(doc_matrix, k):\n return []",
"def _find_dividers(num: int) -> List[int]:\r\n\r\n dividers: List[int] = list()\r\n while num != 1:\r\n primes = PrimeHandler.find_all_primes(num)\r\n for prime in reversed(primes):\r\n if num % prime == 0:\r\n dividers.append(prime)\r\n num = num // prime\r\n break\r\n return list(reversed(dividers))",
"def divide(self, val):\n ancien_pri = 999999\n ancien_chunck = 1\n for pri in prime_array:\n if val % pri == 0 and pri >= self.MINIMUM_NUMBER_OF_CHUNK and val / pri < self.MAXIMUM_SIZE_PER_CHUNK:\n ancien_pri = int(pri)\n ancien_chunck = int(val / pri)\n print({\"size\": ancien_pri, \"chunck\": ancien_chunck})\n self.divide(ancien_chunck)\n\n return {\"size\": ancien_pri, \"chunck\": ancien_chunck}",
"def number(self, ket):\n \n final = 0.0\n q = 0\n for i in ket:\n if i != 0:\n final += 2**q\n q += 1 \n return final",
"def choose(n: int, k: int) -> int:\n return permute(n, k) // factorial(k)",
"def kkDiv(*args):\n if (None in args):\n return None\n quot = float(args[0]) / float(args[1])\n if (quot > 1):\n return quot\n else:\n return 1/quot",
"def gatherDivisors(number): # prvni string ve funkci je comment; \"\"\" znamenam ze je na vic radek\n\tdivisors = []\n\tfor div in range(1, number + 1): # range vyhodi vse od jedne az do number\n\t\tif number % div == 0:\n\t\t\tdivisors.append(div)\n\treturn divisors",
"def calc_k(self):\n\t\n\tself.k = -np.array([self.sth*self.cphi, self.sth*self.sphi, self.cth])\n\n\treturn",
"def count_divisions(num, n):\n count = 0\n while pe_005.is_divisible(num, n):\n num = num // n\n count += 1\n return count, num",
"def get_k(M):\n k = np.arange(1,M+1)*np.pi/(M+1) # vector of all possible quasimomenta\n return k",
"def nr_pare(lista):\n nr_elemente_pare = 0\n for i in lista:\n if i % 2 == 0:\n nr_elemente_pare += 1\n return nr_elemente_pare",
"def falling(n, k):\n total, i = 1, 0\n if k==0: \n \treturn 1\n else: \n \twhile i < k: \n \t\ttotal = total * n \n \t\tn = n-1\n \t\ti = i+1\n \treturn total",
"def d(n):\n return sum(divisors(n))",
"def solution3(nums, K):\n modSeen = {0:-1}\n s = 0\n for i in range(len(nums)):\n n = nums[i]\n s += n\n mod = s % K if K != 0 else s\n if mod in modSeen:\n if i - modSeen[mod] > 1:\n return True\n else:\n modSeen[mod] = i\n return False",
"def print_final_res(res, k, num_of_cords):\r\n for t in range(k):\r\n for p in range(num_of_cords):\r\n counter = 1\r\n num = str(float(\"{:.4f}\".format(res[t][p]))).split(\".\")\r\n integ = list(map(str, num[0]))\r\n frac = list(map(int, num[1]))\r\n for digit in integ:\r\n if digit == \"-\":\r\n print(digit, end=\"\")\r\n elif counter <= 5:\r\n print(int(digit), end=\"\")\r\n counter += 1\r\n if counter <= 5:\r\n print(\".\", end=\"\")\r\n for digit in frac:\r\n if counter <= 5:\r\n print(digit, end=\"\")\r\n counter += 1\r\n while counter <= 5:\r\n print(0, end=\"\")\r\n counter += 1\r\n if p == num_of_cords-1:\r\n print(\"\")\r\n else:\r\n print(\",\", end=\"\")",
"def _discretize_mixture(mix, k):\n disc = np.floor(mix * k).astype(int)\n inds = np.argsort(disc - mix * k)[: k - disc.sum()]\n disc[inds] += 1\n return disc",
"def list_of_divisors_v1(n):\n \"\"\"\n This is a slow algorithm. But it is correct.\n \"\"\"\n if n == 1:\n return [1]\n if n == 2:\n return [1,2]\n L = {}\n if n > 0:\n L[1] = True\n if n > 1:\n L[n] = True\n for i in list_of_prime_factors(n):\n L[i] = True\n for j in list_of_divisors(n // i):\n L[j] = True\n return L.keys()",
"def divide(numbers):\n counter = 0\n for num in numbers:\n counter /= num\n return counter",
"def divisors(intgr):\n\tdivisors = []\n\tfor i in range(1,intgr+1):\n\t\tif(intgr%i==0):\n\t\t\tdivisors.append(i)\n\treturn divisors[1:-1]",
"def zeefVanEratosthenes(zeef, kStart, kEnd):\n zeef[0] = 0\n zeef[1] = 0\n for k in range(kStart, kEnd):\n for x in range(0, len(zeef), k):\n number = x\n if number >= k * 2:\n zeef[x] = 0\n return zeef # return de priemgetallen",
"def num_divisors(n):\n divisors = []\n for i in range(1, int(n**0.5) + 1):\n if n % i == 0:\n divisors += {i, n //i}\n return divisors",
"def Get(self,k:int): \n ### get partitions depending on the partition schemes C that depends on k!\n return subsets_k(list(range(self._n)),k)",
"def listDivide(numbers, divide = 2):\n divisible_count = 0\n\n for i in numbers:\n if i % divide == 0:\n divisible_count += 1\n return divisible_count",
"def distance(j, k, disk_count):\n \n if j <= k:\n return k - j\n else:\n return distance(j, disk_count, disk_count) + k",
"def solution(N, K):\n\n # not really sure what to return if N=1, it should be invalid?\n rounds = N - 1\n next_num = N\n while K > 0 and next_num >= 4:\n next_num = next_num - next_num % 2\n next_num = next_num // 2\n rounds -= (next_num - 1)\n K -= 1\n return rounds",
"def GetDivisions(self):\n ...",
"def div(self):\n a = self.nums()\n x = LibraryFunctions.per(a, 0.9) - LibraryFunctions.per(a, 0.1)\n return x / 2.58",
"def findKthNumber(self, m: int, n: int, k: int) -> int:\n l, r = 1, m * n\n while l < r:\n mid = l + ((r - l) >> 1)\n\n # Check if there are k or more values that are less than mid.\n # For each row, its elements look like 1*i, 2*i, ... n*i, so the\n # largest number that is less than x will be x // i. But if x is\n # too large for the current row, the total count for this row\n # will be n instead.\n if sum(min(mid // r, n) for r in range(1, m + 1)) >= k:\n # mid is our candidate.\n r = mid\n else:\n l = mid + 1\n\n return l",
"def partial_permutations(n, k):\n return int((factorial(n) / factorial(n - k)) % 1000000)",
"def list_of_divisibles(n):\n def is_prime(x, L = []):\n if x in L or x == 2:\n return True\n elif x == 1 or x % 2 == 0:\n return False\n for divisor in range(1, round(x ** .5)):\n if is_prime(divisor, L):\n if x % divisor == 0:\n return False\n return True\n \n def largest_exponent(i, n):\n \"\"\"\n Given a limit n and a base i, finds the largest exponenet x such that i ^ x <= n, and outputs i ^ x.\n\n \"\"\"\n x = 1\n while i ** x <= n:\n x += 1\n x -= 1\n print(i, x, i**x)\n return i ** x\n \n L = []\n for i in range(2, n+1):\n if i in L:\n continue\n elif is_prime(i):\n L.append(largest_exponent(i, n))\n return L",
"def perms(n, k):\n if n < k:\n return 0\n return partition(n, [n - k])",
"def aliquot_sum(k, num):\n return divisor(k, num) - num ** k",
"def _khlp_to_dks_on_basis(self, la):\n Sym = self._kBoundedRing.ambient()\n kB = Sym.kBoundedSubspace(self.k, t=self.t)\n Qp = Sym.hall_littlewood(t=self.t).Qp()\n ks = kB.kschur()\n return sum( Qp(ks(x)).coefficient(la) * self(x) for x in PartitionsGreatestLE(sum(la), self.k))",
"def fn(n, k):\n if n == 1: return k # base case \n return sum(fn(n-1, kk) for kk in range(1, k+1))",
"def find_k(i, j):\n\n result = ((i * i) + (j * j)) ** 0.5\n return result"
] | [
"0.68305314",
"0.66830385",
"0.64528286",
"0.6440956",
"0.64300156",
"0.6414138",
"0.640048",
"0.63713896",
"0.6357683",
"0.63318574",
"0.63268465",
"0.6303533",
"0.62746555",
"0.6266593",
"0.62309676",
"0.62309676",
"0.61922175",
"0.61922175",
"0.61577624",
"0.61330014",
"0.6132927",
"0.61119103",
"0.60999954",
"0.60972303",
"0.6066882",
"0.60249233",
"0.6024923",
"0.6023364",
"0.6001323",
"0.59818363",
"0.59598476",
"0.59299886",
"0.59267783",
"0.59157324",
"0.59143937",
"0.590186",
"0.5901061",
"0.5887188",
"0.5884086",
"0.5867818",
"0.5867612",
"0.58611405",
"0.58611405",
"0.58253276",
"0.58179516",
"0.58033955",
"0.5793456",
"0.5792239",
"0.57839656",
"0.57798165",
"0.5779595",
"0.5774368",
"0.5771292",
"0.5769468",
"0.57684827",
"0.57617867",
"0.57482654",
"0.5742744",
"0.5742639",
"0.5742596",
"0.5741269",
"0.57403684",
"0.57281196",
"0.57213527",
"0.5719277",
"0.57147855",
"0.5710421",
"0.5697176",
"0.5689883",
"0.56817245",
"0.5673526",
"0.5670636",
"0.5668777",
"0.56567925",
"0.5656704",
"0.56477576",
"0.56321985",
"0.5621964",
"0.56181973",
"0.5615519",
"0.56148267",
"0.561168",
"0.5610343",
"0.56092817",
"0.55984414",
"0.5596121",
"0.5588344",
"0.55769575",
"0.5572492",
"0.5569638",
"0.5566518",
"0.55646455",
"0.5564216",
"0.5563093",
"0.55567604",
"0.55548596",
"0.554972",
"0.55482286",
"0.5546298",
"0.55416226"
] | 0.70152783 | 0 |
Determina daca cifrele unui numar se afala in ordine decrecatoare | def is_desc(x):
while x > 9:
if x % 10 > x // 10 % 10:
return False
x = x // 10
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getNumber():",
"def num(self):\n return self.num",
"def get_oglindit(numar):\n if numar < 0:\n return numar\n numar_str = str(numar)\n numar_str = numar_str[::-1]\n return int(numar_str)",
"def number(self):",
"def nze(self) -> int:",
"def nze(self) -> int:",
"def valor_absoluto(numero):\r\n if numero >= 0:\r\n return numero\r\n else:\r\n return - numero",
"def get_num_val(self):\n if self.val == \"A\":\n return 0\n if self.val == \"K\":\n return 12\n if self.val == \"Q\":\n return 11\n if self.val == \"J\":\n return 10\n return_value = int(self.val) - 1\n return return_value",
"def nr():\n pass",
"def _num(self):\n try:\n num = int(self.__rId[3:])\n except ValueError:\n num = 9999\n return num",
"def numerize():\n pass",
"def get_num(self, data):\n data = NUM_PATTERN.findall(data)\n if data:\n return int(data[0])\n return 0",
"def gera_num_cc(abv):\n \n # Ao recebermos a indicacao de que entidade se pretende gerar um numero, usamos a funcao auxiliar escolhe_iin_comp para escolher aleatoriamente os digitos iniciais e o comprimento do cartao.\n # O numero final comeca por ser os digitos iniciais, juntando a estes, do lado direito, numeros aleatorios ate chegarmos ao comprimento pretendido menos 1. O ultimo digito sera o digito de verificacao.\n \n dig_iniciais , comp = escolhe_iin_comp(abv) \n num_cc = dig_iniciais\n \n for i in range(comp-len(dig_iniciais)-1): \n num_cc = num_cc + str(int(random()*10)) \n \n num_cc = num_cc + digito_verificacao(num_cc)\n \n return int(num_cc)",
"def headbut_miss(num):\r\n\tglobal php\r\n\tif num == 0:\r\n\t\tphp -= 10\r\n\t\treturn 0\r\n\telse:\r\n\t\treturn num",
"def number(self):\n return self._num",
"def _c13_num(pep_query, isolation_mz):\n return int(\n round(\n pep_query.pep_exp_z *\n abs(pep_query.pep_exp_mz - isolation_mz)\n )\n )",
"def residuo_cero(numero):\n for x in range (1,10):\n if(numero % x == 0):\n return x \n return numero",
"def eFeliz(numero, vezes):\r\n\tfor i in range(0,vezes):\r\n\t\tval = 0\r\n\t\tfor k in retornaDigitos(numero):\r\n\t\t\tval += pow(k,2)\r\n\t\tif val == 1:\r\n\t\t\treturn True\r\n\t\telse:\r\n\t\t\tnumero = val;\r\n\t\tprint val\r\n\treturn False",
"def evaluate_number(number : int)->int:\n if type(number) == int and number >1 and number < 100:\n num = total_numbers = porc = 0\n while porc < number:\n num = num + 1\n clasificate = is_bouncy(str(num))\n result = evaluate(clasificate , num)\n if result:\n total_numbers = total_numbers + 1\n porc = total_numbers * 100 / num\n return num\n return 0",
"def calc(self):\n num = 22\n while not self.divisible(num):\n # we know that only even numbers are divisible by 2, so\n # we only inspect even numbers.\n num = num + 2\n if num % 10000:\n print(str(num), end='\\r')\n\n return num",
"def numerocuadrado(posicion_del_mouse):\r\n\r\n for j in range(16):\r\n if Totalcuadrados[j].collidepoint(posicion_del_mouse):\r\n return j+1",
"def getSignDigit(number):\n #Fallunterscheidung, ob vor oder nach dem Komma gerundet werden\n # Bsp 12,xxx bzw 1,xxx\n index = 0\n mantisse = number[0]\n #print('mantisse: ' + str(mantisse))\n indexComma = getComma(mantisse)\n #print(\"comma: \" + str(indexComma))\n tempstrmant = str(mantisse)\n # führende Null\n #print('test: ' + tempstrmant[0])\n if tempstrmant[0] == '0':\n for i in range(1, len(tempstrmant)):\n #print(i)\n #print(\"test: \" + str(tempstrmant[i + index]))\n if (tempstrmant[i + indexComma] != '0'):\n break\n index = indexComma + i\n # Zahl beginnt nicht mit 0\n else:\n # Setze Index auf Anzahl der Stellen vor dem Komma Unterscheide mit 1.xxx oder 21.xx\n # Siehe dazu Bild\n # dies entspricht gerade der Stelle mit dem Index 2 - indexComma\n index = 2 - indexComma\n return(index)",
"def lentero():\r\n\twhile True:\r\n\t\tn = raw_input(\"Ingrese el valor deseado: \")\r\n\t\ttry:\r\n\t\t\tn_1 = int(n)\r\n\t\t\treturn n_1\r\n\t\texcept ValueError:\r\n\t\t\tprint(\"POR FAVOR: Ingrese un valor numerico y entero\")",
"def digito_verificacao(n):\n \n # Para obtermos o digito de verificacao, comecamos por somar todos os digitos do cartao, com excecao do de controle. Caso o resto da soma por 10 seja diferente de 0, o digito sera a diferenca entre 10 e esta. Caso seja 0, e este o digito de verificacao. \n\n soma = calc_soma(n)\n \n dig_ver = 0 \n \n if soma%10 != 0:\n dig_ver = 10 - soma%10\n \n \n return str(dig_ver)",
"def principal(self):\n\n\n sw = self.telofase()\n if sw is False:\n exc, var = self.excentricidad()\n if 0.35 < exc < 0.5:\n self.guardar_imagen('anafase')\n return 3\n elif 0.5 <= exc < 0.65:\n self.guardar_imagen('metafase')\n return 2\n elif exc >= 0.65:\n self.guardar_imagen('profase')\n return 1\n else:\n self.guardar_imagen('sin_clasificar')\n return 0\n else:\n return 4",
"def esprimo(numero):\n\tcontador = 2\n\t\n\tresultado = True\n\t\n\t#Loop principal. Corre hasta que el contador sea numero - 1.\n\twhile contador < numero:\n\t\t#Hace la division.\n\t\tresto = numero % contador\n\t\t\n\t\t#Mientras la division no sea 0, se salta esta parte completamente.\n\t\twhile resto == 0:\n\t\t\t#Si la division es 0, esto entrara en un loop infinito.\n\t\t\tresultado = False\n\t\t\t#por eso rompemos el loop con break\n\t\t\tbreak\n\t\t\n\t\tcontador += 1\n\treturn resultado",
"def getFreiePlaetze(self):\n frei = 0\n for reihe in self.belegung:\n for platz in reihe:\n if not platz.belegt(): frei += 1\n return frei",
"def Ingresar(frm):\r\n #se asigna el valor de la variable seguir\r\n \r\n #se le pide al usuario que ingresa la altura\r\n posinicial=float(frm.txtposinicial.GetValue())\r\n #posinicial=raw_input(\"Ingrese la altura desde donde se va dejar caer (en metros): \")\r\n #se condiona para que solo permite ingresar numero positivos \r\n \r\n posinicial=float(posinicial)\r\n if posinicial>0:\r\n posinicial=posinicial+2\r\n return posinicial",
"def multiplicaciones(): #906609 tiene que darme\n ultimo_palindromo = 0\n total = 0\n for primer_numero in range(100, 1000):\n for segundo_numero in range(100, 1000):\n #total se encarga de hacer la multiplicacion entre los numeros\n total = primer_numero * segundo_numero\n # llamamos a la funcion que verifica si la multiplicacion que envia es un palindromo\n if obtener_palindromo(total):\n #luego de verificar que la multiplicacion era palindromo pasamos a evaluarla hasta llegar al ultimo palindromo\n #entre 100 y 1000\n if ultimo_palindromo < total:\n ultimo_palindromo = total\n return ultimo_palindromo",
"def is_numberish(G):\n return True",
"def af(self) -> int:",
"def _get_n(self):#@Needs to fix it for general case\n n_60 = 0.55 * 1 * 1 * 0.75 * self._data[SoilProperty.SPT_N] /0.6\n if not self.is_clayey() and n_60>15: #apply dilitracy correction\n n_60 = 15 + 0.5 * (n_60 - 15)\n return n_60",
"def get_number(self):\n return self.__number",
"def valeur(self) -> int:\r\n if self.signe == '0':\r\n return super().valeur()\r\n if self == self.Minimum:\r\n return -2 ** (len(self) - 1)\r\n return - abs(self).valeur()",
"def fechou(self):\n return self.tot_rodada == self.rodada",
"def getInteger(self):",
"def getInteger(self):",
"def numer(self, a):\n return a",
"def ean_check_digit(ean):\n return (10 - (sum((2 + (-1) ** x) * y for (x, y) in enumerate(ean,\n start=1)) % 10)) % 10",
"def getSlipNum():\n query = Slip.query()\n results = query.fetch(limit = MAX_SLIPS)\n temp = 0\n for result in results:\n if result.number > temp:\n temp = result.number\n slipNum = temp\n slipNum += 1\n return slipNum",
"def vindhoogstecognummer(vhcn_lijst):\n try:\n return int(max(vhcn_lijst)[0])\n except IOError:\n return 0\n except IndexError:\n return 0\n except ValueError:\n return 0",
"def getNumber(self):\n return self.number",
"def parler(self, annonce):\n\n annonceJ = input(\"valeur annonce \" + str(self.numero) + \" :\")\n\n while (int(annonceJ) != 0 and int(annonceJ) <= annonce) or int(annonceJ) > 4:\n annonceJ = input(\"valeur annonce \" + str(self.numero) + \" :\")\n\n return int(annonceJ)",
"def ramfdec(self):\n return 0",
"def check_digit(tracking_number):\n check_digit = 10 - ((sum(itertools.starmap(operator.mul, zip(itertools.cycle((3, 1)), map(int, str(tracking_number))))) + 1) % 10)\n if check_digit == 10:\n check_digit = 0\n return check_digit",
"def z(self) -> int:",
"def evalute_number(dialed):\n if (len(dialed) == 11 or len(dialed) == 10) and str(dialed).startswith(\"0\"):\n # UK Number\n return \"+44%s\" % (dialed[1:])\n elif len(dialed) == 6:\n # Local Fishguard numbers\n return \"+441348%s\" % (dialed)\n return None",
"def _get_docket_numbers(self):\n return None",
"def check_prize(correct_num):",
"def test_hasta_el_numeral(self):\n fwa = FakeWikiArchivo('abcd <a href=\"/wiki/foobar#xy\">FooBar</a> dcba')\n _, r = self.peishranc(fwa)\n self.assertEqual(r, [(u'foobar', SCORE_PEISHRANC)])",
"def leerUltrasonido() -> int:\n pass",
"def pedir_numero():\n try:\n resposta = input(\"Digite um número inteiro por favor: \")\n except KeyboardInterrupt:\n print(\"\\n deixa quieto\")\n sys.exit()\n\n try:\n return int(resposta)\n except ValueError:\n print(\"número invalido\")\n return pedir_numero()",
"def modified_sommerfeld_number(self):\n return (\n self.radius_stator * 2 * self.omega * self.viscosity * (self.length ** 3)\n ) / (8 * self.load * (self.radial_clearance ** 2))",
"def get_fret_num(self):\n low_note = Note(self.guitar.tuning[self.string], self.string,\n self.guitar, False)\n self.fret = (ALL_NOTES.index(self.name) -\n ALL_NOTES.index(low_note.name))\n return self.fret",
"def get_num(self):\n l = []\n current = self\n while current:\n l.append(str(current.value))\n current = current.next\n\n l = list(reversed(l))\n string = \"\".join(l)\n return int(string)",
"def disc(P):\n ans = P.resultant(P.prime()) / P[-1]\n if P.isinteger():\n ans = int(ans.round())\n if P.deg % 4 in [0, 1]:\n return ans\n else:\n return -ans",
"def _check(self):\n try:\n num = int(self.ids.res_lim.text)\n # reset negative numbers to zero\n if num <= 0:\n self.ids.res_lim.text = str(0)\n except ValueError:\n self.ids.res_lim.text = str(self.limit)\n\n return int(self.ids.res_lim.text)",
"def degre(self):\n\t\tif self.__tete:\n\t\t\treturn len(self.__tete.plus_petit().get_indeterminee())\n\t\telse:\n\t\t\t\"\"\" concession a la definition mathematique du degre du polynome nul \"\"\"\n\t\t\treturn (-1)",
"def is_number(G):\n return True",
"def __int__(self) -> int:\n\n return self.centi",
"def Sgn(num):\n n = float(num)\n if n < 0:\n return -1\n elif n == 0:\n return 0\n else:\n return 1",
"def test_getnumber(self):\n convert = cnv()\n\n convert.setnum('einhundertdreiundzwanzig')\n self.assertEqual(convert.getnum(), 123)",
"def find_num(n: int) -> int:\n n = n - 54 * (n // 54)\n n = n - 6 * (n // 6)\n flat_nums = {1:1,\n 2:2,\n 3:3,\n 4:4,\n 5:5,\n 0:6}\n return(flat_nums[n%6])",
"def max_known_number(self):\n return len(self.number_list)-1",
"def getNum(self) :\n return self._num",
"def getNumber(self):\n return self.__number",
"def test_get_book_number(self):\n\t\t\n\t\tself.assertTrue(data.get_book_number('[1 corinthians 1:1]') == 46)",
"def detectar_pico_doge():\n\n ultimos_precios = persistence.traer_ultimos_precios_doge()\n prev = float(ultimos_precios[0])\n now = float(ultimos_precios[1])\n porcentaje = 0\n\n porcentaje = calcular_porcentaje(prev, now)\n porcentaje = round(porcentaje, 2)\n if porcentaje > 1.5 or porcentaje < -1.5:\n return porcentaje\n return 0",
"def is_number_correct(total):\n if int(total) < 0:\n return None\n return True",
"def n():\n # For documentation purposes",
"def number(self):\n return self._number",
"def number(self):\n return self._number",
"def get_value(self):\r\n return 0",
"def number(self, ket):\n \n final = 0.0\n q = 0\n for i in ket:\n if i != 0:\n final += 2**q\n q += 1 \n return final",
"def like_cust_id(_):\n return 1 / 27989.0",
"def num_decims(num):\r\n\r\n if (\".\" in num):\r\n \r\n numA, numB = num.split(\".\") \r\n num_decms = len(numB)\r\n \r\n elif (\".\" not in num):\r\n \r\n num_decms = 0\r\n \r\n return num_decms",
"def get_klassenstufe(klassenstufe):\n try:\n return int(klassenstufe)\n except:\n return 14",
"def calculate(self):\n\n result = \"FINITE\"\n pos = 0\n vis = set([])\n while 0 <= pos < self.n:\n vis.add(pos)\n if self.numa[pos]:\n pos += self.numb[pos]\n else:\n pos -= self.numb[pos]\n if pos in vis:\n result = \"IN\" + result\n break\n\n return str(result)",
"def compute(num):\n # 567 / 9 = 63, 235 / 47 = 5\n num = (num * 63 + 7492) * 5 - 498\n if num < 0: # modulus won't give correct result if number is negative\n num *= -1\n res = (num // 10) % 10\n return res",
"def num (self):\n return self.value[0]/self.value[1]",
"def genus(self):\n return 1 - self.euler_characteristic() // 2",
"def CLng(num):\n return int(round(float(num)))",
"def count_digit(x, i):\n \"\"\" GRAPSTE TON KWDIKA SAS APO KATW \"\"\"\n su = 0\n s = 0\n k = x\n while(i>1):\n x = x//10\n i = i-1\n s = x%10\n while(k>0):\n if((k%10)==s):\n su = su + 1\n k = k//10\n return su",
"def redondear(valor,anterior,D):\n global TECLA\n resta = int(anterior) - int(valor)\n if resta>0:\n valor=int(valor)+1\n elif resta<0:\n valor=int(valor)-1\n if TECLA:\n if valor<=103 and valor>=97:\n return 100\n elif valor<=203 and valor>=197:\n return 200\n elif valor<=303 and valor>=297:\n return 300\n elif valor<=403 and valor>=397:\n return 400\n return int(valor)/D*D",
"def detectar_pico_eth():\n\n ultimos_precios = persistence.traer_ultimos_precios_eth()\n prev = int(ultimos_precios[0])\n now = int(ultimos_precios[1])\n porcentaje = 0\n\n porcentaje = calcular_porcentaje(prev, now)\n porcentaje = round(porcentaje, 2)\n if porcentaje > 1.5 or porcentaje < -1.5:\n return porcentaje\n return 0",
"def getProperRandom( quant, curNum = None):\n\n\n\tcurRandomNumber = np.ceil( random.random() * quant - 1)\n\t\n\tif curNum == None:\n\n\t\treturn int(curRandomNumber)\n\t\n\telse:\n\t\t\n\t\twhile int(curRandomNumber) - 1 == curNum:\n\t\t\tcurRandomNumber = np.ceil( random.random() * quant - 1)\n\t\n\t\treturn int(curRandomNumber)",
"def get_num_alt(self, dec):\n return len(self.decisions[dec].value)",
"def looks_equivalent_number(self) -> Optional[float]:\n return self._get_property(LOOKS_EQUIVALENT_NUMBER_PROP, float)",
"def detectar_constantes_doge():\n\n ultimos_precios = persistence.traer_ultimos_precios_doge()\n prev = float(ultimos_precios[0])\n porcentaje = 0\n counter = 0\n for i in range(1,60):\n if prev < float(ultimos_precios[i]):\n counter = counter + 1\n elif prev > float(ultimos_precios[i]):\n counter = counter - 1\n prev = float(ultimos_precios[i])\n\n porcentaje = calcular_porcentaje(float(ultimos_precios[0]), float(ultimos_precios[i]))\n porcentaje = round(porcentaje, 2)\n if counter > 10 and porcentaje > 1:\n return porcentaje\n elif counter < -10 and porcentaje < -1:\n return porcentaje\n else:\n return 0",
"def test_num_reac(self):\n self.assertEqual(self.Nreac, 1)",
"def get_number_toroll(self, state):\n if CardEnum.TrainStation in state.Deck.keys() and state.Deck[CardEnum.TrainStation] > 0:\n return 2\n\n return 1",
"def checkdia(tarea_mensual):\n\n if tarea_mensual == 1:\n return 1\n else:\n return 0",
"def get_nominal(self, key):\n return ((hash(key) % 12) + 6.0) * 3",
"def foetus_pics(self):\n pic = 0\n if 0.0 not in self.allele:\n self.contamination = 2\n pic = 3\n elif 0.0 == self.allele[1]:\n pic = 1\n else:\n pic = 2\n return pic",
"def check_for_int(check):",
"def ramfinc(self):\n return 0",
"def sumDigit():",
"def visit_Num(self, node):\n token = node.token\n if token.type in (INTEGER, FLOAT):\n return token.value",
"def get_number(number_seqeunce):\n number_sequence.append(None)\n return [i for i in number_seqeunce if number_seqeunce.count(i) % 2 != 0][0]",
"def _get_number(self):\n return Decimal(str(self)[1:])",
"def evaluate(self) -> int:"
] | [
"0.73404014",
"0.6625964",
"0.65091646",
"0.64411956",
"0.6403674",
"0.6403674",
"0.6398264",
"0.6342135",
"0.6268537",
"0.6255234",
"0.618184",
"0.61722934",
"0.6116968",
"0.611175",
"0.6076584",
"0.6033818",
"0.60264623",
"0.6019782",
"0.6014425",
"0.5991572",
"0.5981616",
"0.5923165",
"0.5918243",
"0.5891409",
"0.5884352",
"0.5883219",
"0.58691597",
"0.5845264",
"0.58233297",
"0.58205265",
"0.58137983",
"0.58072835",
"0.5804621",
"0.58005357",
"0.5795183",
"0.5795065",
"0.5795065",
"0.57680666",
"0.57360846",
"0.57215554",
"0.571223",
"0.57112604",
"0.57057655",
"0.5699716",
"0.56939024",
"0.56790894",
"0.5677225",
"0.5676229",
"0.5671775",
"0.56614894",
"0.5655803",
"0.5652516",
"0.5650606",
"0.5646879",
"0.5645303",
"0.5638471",
"0.5637445",
"0.5637316",
"0.5630293",
"0.56295276",
"0.56218195",
"0.56218123",
"0.5619115",
"0.5608937",
"0.5604584",
"0.5602088",
"0.56000125",
"0.55945194",
"0.55855507",
"0.5578513",
"0.5567222",
"0.5567222",
"0.5565889",
"0.55644727",
"0.5564156",
"0.5563845",
"0.5561674",
"0.5561181",
"0.55611575",
"0.55492",
"0.55469006",
"0.55316085",
"0.55282736",
"0.55271626",
"0.5519212",
"0.55188894",
"0.5516044",
"0.55097055",
"0.55074084",
"0.5504954",
"0.5500327",
"0.5499967",
"0.54992706",
"0.54977643",
"0.54969317",
"0.5488919",
"0.54863554",
"0.548357",
"0.5476618",
"0.54752374",
"0.54750866"
] | 0.0 | -1 |
Deterina numerele care au cifrele in ordine descrescatoare | def get_longest_digit_count_desc(lst):
rezultat = []
for i in lst:
if is_desc(i):
rezultat.append(i)
return rezultat | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getNumber():",
"def nze(self) -> int:",
"def nze(self) -> int:",
"def degre(self):\n\t\tif self.__tete:\n\t\t\treturn len(self.__tete.plus_petit().get_indeterminee())\n\t\telse:\n\t\t\t\"\"\" concession a la definition mathematique du degre du polynome nul \"\"\"\n\t\t\treturn (-1)",
"def number(self):",
"def num(self):\n return self.num",
"def numerocuadrado(posicion_del_mouse):\r\n\r\n for j in range(16):\r\n if Totalcuadrados[j].collidepoint(posicion_del_mouse):\r\n return j+1",
"def _num(self):\n try:\n num = int(self.__rId[3:])\n except ValueError:\n num = 9999\n return num",
"def numerize():\n pass",
"def nr():\n pass",
"def number(self):\n if self.content is None:\n return -1\n return self.content.number",
"def get_lengte(self):",
"def get_oglindit(numar):\n if numar < 0:\n return numar\n numar_str = str(numar)\n numar_str = numar_str[::-1]\n return int(numar_str)",
"def number(self):\n return self._num",
"def get_num(self):\n l = []\n current = self\n while current:\n l.append(str(current.value))\n current = current.next\n\n l = list(reversed(l))\n string = \"\".join(l)\n return int(string)",
"def getInteger(self):",
"def getInteger(self):",
"def leerUltrasonido() -> int:\n pass",
"def getFreiePlaetze(self):\n frei = 0\n for reihe in self.belegung:\n for platz in reihe:\n if not platz.belegt(): frei += 1\n return frei",
"def __int__(self) -> int:\n\n return self.centi",
"def valeur(self) -> int:\r\n if self.signe == '0':\r\n return super().valeur()\r\n if self == self.Minimum:\r\n return -2 ** (len(self) - 1)\r\n return - abs(self).valeur()",
"def valor_absoluto(numero):\r\n if numero >= 0:\r\n return numero\r\n else:\r\n return - numero",
"def count():\r\n c = eNine.get()\r\n eNine.delete(0, END)\r\n count = int(c)\r\n count += 1\r\n eNine.insert(0, count)",
"def numer(self, a):\n return a",
"def get_number(self):\n return self.__number",
"def z(self) -> int:",
"def get_num_val(self):\n if self.val == \"A\":\n return 0\n if self.val == \"K\":\n return 12\n if self.val == \"Q\":\n return 11\n if self.val == \"J\":\n return 10\n return_value = int(self.val) - 1\n return return_value",
"def _pega_no(self, index):\n ponteiro = self.inicio\n for i in range(index):\n if ponteiro:\n ponteiro = ponteiro.prox\n else:\n raise IndexError(\"list index out of range\")\n return ponteiro",
"def getNum(self) :\n return self._num",
"def getNumber(self):\n return self.number",
"def leerIRsiguelineas() -> int:\n pass",
"def numero_endereco(self):\n return self._numero_endereco",
"def add_neurone(self, couche, nbr=1):\n if self.control == 0:\n if couche >= 0 and couche <= len(self.couche) - 1 and nbr > 0:\n self.couche[couche] += nbr\n else:\n print(\"Le réseau est deja créé, vous en pouvez plus le modifier\")",
"def cb_peut_tenir(self):\n nb = 0\n for membre in self.membres:\n if membre.tenu is None and membre.peut_tenir():\n nb += 1\n\n return nb",
"def conteo_numero(numero,letras=\"abcdefghijklmnñopqrstuvwxyz\"):\n dicc=conteos_mensaje(numero_a_letras(numero),letras)\n if numero==1:\n if 'z' in letras:\n dicc['z']+=1\n else:\n if 'c' in letras:\n dicc['c']+=1\n if 'e' in letras:\n dicc['e']+=1\n if 's' in letras:\n dicc['s']+=1\n return dicc",
"def gera_num_cc(abv):\n \n # Ao recebermos a indicacao de que entidade se pretende gerar um numero, usamos a funcao auxiliar escolhe_iin_comp para escolher aleatoriamente os digitos iniciais e o comprimento do cartao.\n # O numero final comeca por ser os digitos iniciais, juntando a estes, do lado direito, numeros aleatorios ate chegarmos ao comprimento pretendido menos 1. O ultimo digito sera o digito de verificacao.\n \n dig_iniciais , comp = escolhe_iin_comp(abv) \n num_cc = dig_iniciais\n \n for i in range(comp-len(dig_iniciais)-1): \n num_cc = num_cc + str(int(random()*10)) \n \n num_cc = num_cc + digito_verificacao(num_cc)\n \n return int(num_cc)",
"def mostrarBicicletasDisponiveis(self) -> int:\n estoque_atual = Loja().mostrarEstoque()\n print(f'Bicicletas disponíveis: {estoque_atual}')\n return estoque_atual",
"def give_verse_num_details(self):\n\t\treturn str(self.kandaNum) + '.' + str(self.vargaNum) + '.' + str(self.subvargaNum) + '.' + str(self.verseNum)",
"def getSlipNum():\n query = Slip.query()\n results = query.fetch(limit = MAX_SLIPS)\n temp = 0\n for result in results:\n if result.number > temp:\n temp = result.number\n slipNum = temp\n slipNum += 1\n return slipNum",
"def getNumber(self):\n return self.__number",
"def n(self):\n pass",
"def pos_number(self):\n return self._pos_number.zfill(2)",
"def orden_llegada(self, agenda):\n from django.db import connection\n with connection.cursor() as cursor:\n cursor.execute(\"\"\"\n select count(*) cantidad from agendamientos_agenda a\n join agendamientos_agendadetalle d on a.id = d.agenda_id\n where a.fecha = ?\n and medico_id = ?\n and turno_id = ? \"\"\", agenda.fecha, agenda.medico.id, agenda.turno.id)\n orden = cursor.fetchone()\n if not orden:\n orden = 0\n\n print(\"maximo orden \" + orden)\n\n return orden+1 # todo controlar que no sea mayor al máximo por médico",
"def GetNotchNum(self):\n num = self.ordChar(self._notch) + 1\n return num",
"def counter(self) -> int:",
"def counter(self) -> int:",
"def to_ordinal(self):\n return mod(self.number - 1 + 39 * (self.number - self.name), 260)",
"def dis(self):\n return self.nlegomena(2)",
"def curve_number(self):",
"def custo(EstadoRestaUm, resultante):\n return 1",
"def atomic_number(self) -> int:\n return elements.index(self.label) + 1",
"def rank(self):\r\n\t\trank = self.n % 13\r\n\t\treturn rank",
"def get_update_number( self ):",
"def tracenb(self):\n trace_nb = self._pna.query('CALC{}:PAR:MNUM?'.format(self._channel))\n if trace_nb:\n return int(trace_nb)\n else:\n raise InstrIOError(cleandoc('''Agilent PNA did not return the\n trace number on channel {} '''.format(self._channel)))",
"def modified_sommerfeld_number(self):\n return (\n self.radius_stator * 2 * self.omega * self.viscosity * (self.length ** 3)\n ) / (8 * self.load * (self.radial_clearance ** 2))",
"def number(self):\n return self._number",
"def number(self):\n return self._number",
"def vindhoogstecognummer(vhcn_lijst):\n try:\n return int(max(vhcn_lijst)[0])\n except IOError:\n return 0\n except IndexError:\n return 0\n except ValueError:\n return 0",
"def get_document_number(self, txt_line, inv_type):\n number = 0\n if txt_line.invoice_id.type in ['in_invoice', 'in_refund']:\n if not txt_line.invoice_id.supplier_invoice_number:\n raise exceptions.except_orm(\n _('Invalid action !'),\n _(\"Unable to make txt file, because the bill has no\"\n \" reference number free!\"))\n else:\n number = self.get_number(\n txt_line.invoice_id.supplier_invoice_number.strip(),\n inv_type, 20)\n elif txt_line.invoice_id.number:\n number = self.get_number(\n txt_line.invoice_id.number.strip(), inv_type, 20)\n return number",
"def _get_id(self, num, comp_dict, attr_name='' , change=False) -> int:\n keys = comp_dict.keys()\n for key in keys:\n b_id, min_v, max_v = key\n if min_v <= num and num < max_v:\n if change and attr_name:\n self.counter_change(attr_name, b_id)\n value_count_dict = self._value_count_dict[attr_name]\n\n if num in value_count_dict:\n value_count_dict[num][0] += 1\n else:\n value_count_dict[num] = [1]\n\n return b_id\n # print('did not find: ...')\n # print(num)\n # print(comp_dict)\n return -1",
"def getN(self):\r\n return self.N",
"def test_hasta_el_numeral(self):\n fwa = FakeWikiArchivo('abcd <a href=\"/wiki/foobar#xy\">FooBar</a> dcba')\n _, r = self.peishranc(fwa)\n self.assertEqual(r, [(u'foobar', SCORE_PEISHRANC)])",
"def get_iter_num(self):\n\tif len(self.cost) > 0:\n first_key = list(self.cost.keys())[0]\n num = len(self.cost[first_key]) - 1\n\telse:\n\t first_key = list(self.prim_var.keys())[0]\n num = len(self.prim_var[first_key]) - 1\n\treturn num",
"def acc_num_gen():\n try:\n max_agg = Dataset.objects.all().aggregate(models.Max('accession_number'))\n max_val = max_agg['accession_number__max']\n match = re.search('\\d+', max_val)\n int_val = int(max_val[match.span()[0]:match.span()[1]])\n except (TypeError, AttributeError):\n int_val = 0\n \n if int_val < 200:\n int_val = 200\n else:\n int_val += 1\n return \"ds%06d\" % (int_val)",
"def getN(self)->int:\n return self.n",
"def fget(self):\n if not hasattr(self, \"_n\"):\n self._n = 0\n self._n += 1\n return self._n",
"def pgcd_numerateurs(self):\n\t\tl = []\n\t\tif self.__valide:\n\t\t\tfor m in self.liste_decroissante():\n\t\t\t\te = abs(m.get_coefficient().get_num().valeur())\n\t\t\t\tif not (e in l):\n\t\t\t\t\tl.append(e)\n\t\treturn pgcd_liste(l)",
"def nom(self, i):\n pass",
"def entero(self):\n return int(\"\".join(self.binario), 2)",
"def toordinal(self):\n return 0",
"def toordinal(self):\n return 0",
"def numer(self, a):\n raise NotImplementedError",
"def get_reynolds_number(self, velocity, refLength):\n\t\tre_num = self.Density * velocity * refLength / self.Dynamic_viscosity\n\t\treturn re_num",
"def get(self) -> int:\n return self.nums.pop() if self.nums else -1",
"def n(self) :\n\t\ttry :\n\t\t\treturn self._n\n\t\texcept Exception as e:\n\t\t\traise e",
"def prebaci_dan_nazad(self):\r\n value = int(self.brojDanaCombo.currentText()) #integer broj dana\r\n self.emit(QtCore.SIGNAL('promjeni_datum(PyQt_PyObject)'), -value)\r\n msg = 'request pomak {0} dana unazad'.format(value)\r\n logging.info(msg)",
"def _c13_num(pep_query, isolation_mz):\n return int(\n round(\n pep_query.pep_exp_z *\n abs(pep_query.pep_exp_mz - isolation_mz)\n )\n )",
"def nb(self, compte=None, datel=None, rapp=False, exclude_id=None):\n query = Ope_titre.objects.filter(titre=self)\n if compte:\n query = query.filter(compte=compte)\n if rapp:\n query = query.filter(ope_ost__rapp__isnull=False)\n if datel:\n query = query.filter(date__lte=datel)\n if exclude_id:\n query = query.exclude(pk=exclude_id)\n nombre = query.aggregate(nombre=models.Sum('nombre'))['nombre']\n if not nombre:\n return 0\n else:\n return decimal.Decimal(smart_text(nombre))",
"def cria_carro_ford(self):\n\n self.total_de_carros_ford += 1\n print(\"Carro Ford #\", self.total_de_carros_ford, \" criado\")",
"def atomic_number(self):\n return 0",
"def Order(self) -> int:",
"def _get_docket_numbers(self):\n return None",
"def COUNTER_TOTAL():\n return 3",
"def getInteger(self):\n pass",
"def number(self) -> int:\n return self._id",
"def getnumanz(self): # 3\n res,resargs = self.__obj.getnumanz()\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _numanz_return_value = resargs\n return _numanz_return_value",
"def contar_letras(cad, letra):\n\n n = 0\n for i in cad:\n if i == letra:\n n += 1\n print(n)\n return n",
"def num_id(self) -> str:\n return pulumi.get(self, \"num_id\")",
"def getOccurence(self) -> int:\n ...",
"def REC_YAHTZEE():\n return 12",
"def n(self):\n return self._n",
"def n(self):\n return self._n",
"def get_jumlah():\n return Hero.__jumlah",
"def obtem_n_balcao(self):\n\n return self.n_balcao",
"def fim_da_rodada(self, recompensa, m, numero_de_cacadores):\n #print('Jogador 4 {}'.format(self.historico[-1]))\n pass",
"def __len__(self):\n return self.aktualni_delka",
"def celex_pron_loc(language, lemma):\n pron = 5\n if language == \"german\" or language== \"dutch\": pron = pron -2 #german one less\n if lemma == \"wordform\": pron += 1\n return pron",
"def tam_relativo(self, code):\n return len(self.compartimentos[code]) / float(self.n)",
"def cliquer_sur_unité(self):",
"def descontarCantidad(self,detalle,producto,cantidad):\n query=LoteModel.obtenerLoteProducto(producto,self.sesion)\n valores=[]\n for a in query:\n loteProducto=LoteProductoModel.buscarLoteProducto(self.sesion,producto,a.codigo).first()\n if cantidad<=loteProducto.cantidad:\n loteProducto.descontarCantidad(cantidad)\n loteProducto.modificar(self.sesion)\n valores.append([loteProducto,cantidad])\n break\n else:\n cantidad-=loteProducto.cantidad\n valores.append([loteProducto,loteProducto.cantidad])\n loteProducto.descontarCantidad(loteProducto.cantidad)\n loteProducto.modificar(self.sesion)\n self.lotesVentas[detalle]=valores\n detalle.agregarLotes(self.sesion,self.lotesVentas[detalle])",
"def prev_num(self):\n return self.page - 1"
] | [
"0.6986856",
"0.668469",
"0.668469",
"0.6638649",
"0.66317236",
"0.6619781",
"0.643815",
"0.6296097",
"0.62792224",
"0.624606",
"0.6207371",
"0.61288446",
"0.61034036",
"0.6069976",
"0.60504335",
"0.6050144",
"0.6050144",
"0.6035699",
"0.5996552",
"0.59795433",
"0.59493756",
"0.59483296",
"0.5938163",
"0.59208757",
"0.59207803",
"0.5906767",
"0.590138",
"0.5884128",
"0.5881211",
"0.5870519",
"0.58692074",
"0.58443624",
"0.5842085",
"0.5824946",
"0.58223003",
"0.58111066",
"0.5794081",
"0.57939917",
"0.57862526",
"0.5757712",
"0.5756614",
"0.5739758",
"0.57243466",
"0.5714593",
"0.57072043",
"0.57072043",
"0.570604",
"0.57013863",
"0.569372",
"0.5692705",
"0.56873775",
"0.568599",
"0.5670943",
"0.5668185",
"0.56669295",
"0.5660635",
"0.5660635",
"0.56452596",
"0.5641187",
"0.5639729",
"0.56232285",
"0.56125057",
"0.56066406",
"0.56062865",
"0.5588698",
"0.5588632",
"0.5579745",
"0.5579024",
"0.5578058",
"0.5575501",
"0.5575501",
"0.5569833",
"0.55648863",
"0.5563362",
"0.5563307",
"0.555851",
"0.55549866",
"0.554771",
"0.55476683",
"0.5542735",
"0.55399656",
"0.5538538",
"0.55257773",
"0.5523088",
"0.5516666",
"0.5515633",
"0.5502271",
"0.55018437",
"0.54878044",
"0.5471463",
"0.54662097",
"0.54662097",
"0.5458446",
"0.54572415",
"0.54546684",
"0.54516935",
"0.54513407",
"0.54464513",
"0.5444996",
"0.5444275",
"0.54433894"
] | 0.0 | -1 |
Determina daca un numar este prim | def is_prime(x):
if x < 2:
return False
for i in range(2, x // 2 + 1):
if x % i == 0:
return False
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def disc(P):\n ans = P.resultant(P.prime()) / P[-1]\n if P.isinteger():\n ans = int(ans.round())\n if P.deg % 4 in [0, 1]:\n return ans\n else:\n return -ans",
"def check_prize(correct_num):",
"def getNumber():",
"def getPrime(bits):\n\twhile(True) :\n\t\t# on continue a tirer des nombres tant que l'on n'a pas trouve de nombre premier\n\t\tp = getrandbits(bits)\n\t\tif(miller_rabin(p,100)) :\n\t\t\treturn p",
"def comprobar_primo(num):\n primo = True\n for i in range(2, num):\n if num%i == 0:\n primo = False\n return primo",
"def carbon_prime(C,p,p0):\r\n \r\n if p > p0:\r\n return C\r\n else:\r\n return .03",
"def primitive_root(num: int) -> int:\n\n if not is_prime(num):\n raise NotPrimeError()\n\n if num == 2:\n return 1\n\n # the prime divisors of p-1 are 2 and (p-1)/2 because\n # p = 2x + 1 where x is a prime\n p_1 = 2\n p_2 = (num - 1) // p_1\n\n # test random g's until one is found that is a primitive root mod p\n while True:\n # g is a primitive root if for all prime factors of p-1, p[i]\n g = random.randint(2, num-1)\n\n # g^((p-1)/p[i]) (mod p) is not congruent to 1\n if not pow(g, (num-1) // p_1, num) == 1:\n if not pow(g, (num-1) // p_2, num) == 1:\n return g",
"def multiplicaciones(): #906609 tiene que darme\n ultimo_palindromo = 0\n total = 0\n for primer_numero in range(100, 1000):\n for segundo_numero in range(100, 1000):\n #total se encarga de hacer la multiplicacion entre los numeros\n total = primer_numero * segundo_numero\n # llamamos a la funcion que verifica si la multiplicacion que envia es un palindromo\n if obtener_palindromo(total):\n #luego de verificar que la multiplicacion era palindromo pasamos a evaluarla hasta llegar al ultimo palindromo\n #entre 100 y 1000\n if ultimo_palindromo < total:\n ultimo_palindromo = total\n return ultimo_palindromo",
"def evaluate_number(number : int)->int:\n if type(number) == int and number >1 and number < 100:\n num = total_numbers = porc = 0\n while porc < number:\n num = num + 1\n clasificate = is_bouncy(str(num))\n result = evaluate(clasificate , num)\n if result:\n total_numbers = total_numbers + 1\n porc = total_numbers * 100 / num\n return num\n return 0",
"def numerize():\n pass",
"def residuo_cero(numero):\n for x in range (1,10):\n if(numero % x == 0):\n return x \n return numero",
"def num(self):\n return self.num",
"def getnumparam(self,partype_): # 3\n if not isinstance(partype_,parametertype): raise TypeError(\"Argument partype has wrong type\")\n res,resargs = self.__obj.getnumparam(partype_)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n _numparam_return_value = resargs\n return _numparam_return_value",
"def esprimo(numero):\n\tcontador = 2\n\t\n\tresultado = True\n\t\n\t#Loop principal. Corre hasta que el contador sea numero - 1.\n\twhile contador < numero:\n\t\t#Hace la division.\n\t\tresto = numero % contador\n\t\t\n\t\t#Mientras la division no sea 0, se salta esta parte completamente.\n\t\twhile resto == 0:\n\t\t\t#Si la division es 0, esto entrara en un loop infinito.\n\t\t\tresultado = False\n\t\t\t#por eso rompemos el loop con break\n\t\t\tbreak\n\t\t\n\t\tcontador += 1\n\treturn resultado",
"def componeProbs(p,p_prime):\n return p + p_prime * (1-p)",
"def generarPrimo(self, bits):\n while True:\n p = primes.bigppr(bits)\n if p & 3 == 3:\n return p",
"def number(self):",
"def r1(P):\n assert P.isreal()\n ans = 0\n s = P.sturm()\n while s:\n ans += s\n P = P.gcd(P.prime())\n s = P.sturm()\n return ans",
"def __int__(self) -> int:\n # If denominator is 1:\n if self.denom:\n return (-self.numer_prod() if self.neg\n else self.numer_prod())\n else:\n return int(self.__float__())",
"def num (self):\n return self.value[0]/self.value[1]",
"def nr():\n pass",
"def pe_prob_003(num):\n factors = get_unique_factors(num)\n\n return factors[-1] if factors else 1",
"def numer(self, a):\n return a",
"def CLng(num):\n return int(round(float(num)))",
"def __init__(self, prim):\n self.actual = prim",
"def compute(num):\n # 567 / 9 = 63, 235 / 47 = 5\n num = (num * 63 + 7492) * 5 - 498\n if num < 0: # modulus won't give correct result if number is negative\n num *= -1\n res = (num // 10) % 10\n return res",
"def get_prime_digits_for_one(a: int) -> bool:\r\n b = a\r\n c = 0\r\n c1 = 0\r\n while b > 0:\r\n c1 += 1\r\n n = b % 10\r\n if isprime(n):\r\n c += 1\r\n b = b // 10\r\n if c == c1:\r\n return True\r\n else:\r\n return False",
"def getnumparam(self,partype_):\n numparam_ = ctypes.c_int32()\n res = __library__.MSK_XX_getnumparam(self.__nativep,partype_,ctypes.byref(numparam_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n numparam_ = numparam_.value\n _numparam_return_value = numparam_\n return (_numparam_return_value)",
"def good_prime(p):\n return p % 4 == 3 and probablyPrime(p, accuracy=100)",
"def get_prime(self):\n return self.prime",
"def next_Prim(L):\r\n return 1 + L[0]",
"def problem41():\n for i in range(len(PANDIGITAL), 1, -1):\n cur_max = 0\n for p in itertools.permutations(PANDIGITAL[:i]):\n n = int(\"\".join(p))\n if pelib.is_prime(n) and n > cur_max:\n cur_max = n\n\n if cur_max > 0:\n return cur_max",
"def premier(p: int) -> bool:\n if p < 2: return False\n k = 2\n while k**2 <= p:\n if p%k == 0:\n return False\n k+=1\n return True",
"def get_oglindit(numar):\n if numar < 0:\n return numar\n numar_str = str(numar)\n numar_str = numar_str[::-1]\n return int(numar_str)",
"def problem2(m, p):\n total = 0\n for k in range(m, m ** p):\n if is_prime(k):\n total = total + sum_of_digits(k)\n return total",
"def _rawprng(self):\n self.p += 1 \n if self.p >= self.o:\n\t\t\tself.p = 0\n t = 1768863 * self.s[self.p] + self.c * 2.3283064365386963e-10\n self.c = int(t) | 0\n self.s[self.p] = t - self.c\n return self.s[self.p]",
"def eFeliz(numero, vezes):\r\n\tfor i in range(0,vezes):\r\n\t\tval = 0\r\n\t\tfor k in retornaDigitos(numero):\r\n\t\t\tval += pow(k,2)\r\n\t\tif val == 1:\r\n\t\t\treturn True\r\n\t\telse:\r\n\t\t\tnumero = val;\r\n\t\tprint val\r\n\treturn False",
"def valor_absoluto(numero):\r\n if numero >= 0:\r\n return numero\r\n else:\r\n return - numero",
"def __make_numerator_integer(self):\n while self.numerator % 1 !=0:\n self.denominator *=10\n self.numerator *=10",
"def euler10(num):\n total = 0\n curr_number = 2\n while curr_number < num:\n if projecteuler.is_prime(curr_number):\n total += curr_number\n curr_number += 1\n return total",
"def prim_method(self):",
"def prim_method(self):",
"def _pettifor_numbers():\n return { \"Li\": 0.45,\n \"Be\": 1.5,\n \"B\": 2.0,\n \"C\": 2.5,\n \"N\": 3.0, \n \"O\": 3.5,\n \"F\": 4.0,\n \n \"Na\": 0.4,\n \"Mg\": 1.28,\n \"Al\": 1.66,\n \"Si\": 1.92,\n \"P\": 2.18,\n \"S\": 2.44,\n \"Cl\": 2.70,\n \n \"K\": 0.35,\n \"Ca\": 0.60,\n \"Sc\": 0.74,\n \"Ti\": 0.79,\n \"V\": 0.84,\n \"Cr\": 0.89,\n \"Mn\": 0.94,\n \"Fe\": 0.99,\n \"Co\": 1.04,\n \"Ni\": 1.09,\n \"Cu\": 1.20,\n \"Zn\": 1.44,\n \"Ga\": 1.68,\n \"Ge\": 1.92,\n \"As\": 2.16,\n \"Se\": 2.40,\n \"Br\": 2.64,\n\n \"Rb\": 0.30,\n \"Sr\": 0.55,\n \"Y\": 0.70,\n \"Zr\": 0.76,\n \"Nb\": 0.82,\n \"Mo\": 0.88,\n \"Tc\": 0.94,\n \"Ru\": 1.00,\n \"Rh\": 1.06,\n \"Pd\": 1.12,\n \"Ag\": 1.18,\n \"Cd\": 1.36,\n \"In\": 1.60,\n \"Sn\": 1.84,\n \"Sb\": 2.08,\n \"Te\": 2.32,\n \"I\": 2.56,\n \n \"Cs\": 0.25,\n \"Ba\": 0.50,\n \"La\": 0.748,\n \"Hf\": 0.775,\n \"Ta\": 0.83,\n \"W\": 0.885,\n \"Re\": 0.94,\n \"Os\": 0.995,\n \"Ir\": 1.05,\n \"Pt\": 1.105,\n \"Au\": 1.16,\n \"Hg\": 1.32,\n \"Tl\": 1.56,\n \"Pb\": 1.80,\n \"Bi\": 2.04,\n \"Po\": 2.28, \n \"At\": 2.52 }",
"def is_prime(self):\n pass",
"def __Relu_prime(self, x):\n \n return x/x",
"def PN(self, n):\n if not self.isVaild():\n pass\n if n < self.C:\n return self.P0()*(self.r()**n)/math.factorial(n)\n else:\n return self.P0()*(self.r()**n)/(math.factorial(self.C)*self.C**(n-self.C))",
"def primenumber(x):\n if x >= 2:\n for y in range(2,x):\n if not (x % y):\n return False\n else:\n return False\n return True",
"def generate_prime_module() -> int:\n p = generate_random_prime()\n print('[CLIENT LOG] generate prime module (p) with the value equal {}'.format(p))\n return p",
"def prime():\n number = random.randint(1, 100)\n if len(primfacs(number)) == 1:\n return number, 'yes'\n return number, 'no'",
"def is_prime_by_python(num):\n if num == 2:\n return True\n elif num % 2 == 0 or num <= 1:\n # even or smaller then one\n return False\n else:\n res = True\n partial_num_range = int(num / 4) + 1\n\n for i in range(1, partial_num_range):\n if num % (2 * i + 1) == 0:\n res = False\n break\n return res",
"def lentero():\r\n\twhile True:\r\n\t\tn = raw_input(\"Ingrese el valor deseado: \")\r\n\t\ttry:\r\n\t\t\tn_1 = int(n)\r\n\t\t\treturn n_1\r\n\t\texcept ValueError:\r\n\t\t\tprint(\"POR FAVOR: Ingrese un valor numerico y entero\")",
"def Sgn(num):\n n = float(num)\n if n < 0:\n return -1\n elif n == 0:\n return 0\n else:\n return 1",
"def nextPrime(self):\n\t\tnum = self.cur + 1\n\t\twhile not self.isPrime(num):\n\t\t\tnum += 1\n\t\tself.cur = num\n\t\tself.prev.append(num)\n\t\t# print num\n\t\treturn num",
"def isprimitive(g,n):\n\t# SAGE equivalent is mod(g,n).is_primitive_root() in IntegerMod class\n\treturn is_primitive_root(g,n)",
"def P(self,numSeg):\n w=self.x/numSeg\n return (w/3)*(self.F(0)+self.sumaImpar(numSeg,w)+self.sumaPar(numSeg,w)+self.F(self.x))",
"def gera_num_cc(abv):\n \n # Ao recebermos a indicacao de que entidade se pretende gerar um numero, usamos a funcao auxiliar escolhe_iin_comp para escolher aleatoriamente os digitos iniciais e o comprimento do cartao.\n # O numero final comeca por ser os digitos iniciais, juntando a estes, do lado direito, numeros aleatorios ate chegarmos ao comprimento pretendido menos 1. O ultimo digito sera o digito de verificacao.\n \n dig_iniciais , comp = escolhe_iin_comp(abv) \n num_cc = dig_iniciais\n \n for i in range(comp-len(dig_iniciais)-1): \n num_cc = num_cc + str(int(random()*10)) \n \n num_cc = num_cc + digito_verificacao(num_cc)\n \n return int(num_cc)",
"def numer(self, a):\n raise NotImplementedError",
"def getGlideinCpusNum(glidein):\n \n glidein_cpus = 1\n cpus = str(glidein['attrs'].get('GLIDEIN_CPUS', 1))\n if cpus.upper() == 'AUTO':\n glidein_cpus = 1\n else:\n glidein_cpus = int(cpus)\n\n return glidein_cpus",
"def getProperRandom( quant, curNum = None):\n\n\n\tcurRandomNumber = np.ceil( random.random() * quant - 1)\n\t\n\tif curNum == None:\n\n\t\treturn int(curRandomNumber)\n\t\n\telse:\n\t\t\n\t\twhile int(curRandomNumber) - 1 == curNum:\n\t\t\tcurRandomNumber = np.ceil( random.random() * quant - 1)\n\t\n\t\treturn int(curRandomNumber)",
"def get_num_val(self):\n if self.val == \"A\":\n return 0\n if self.val == \"K\":\n return 12\n if self.val == \"Q\":\n return 11\n if self.val == \"J\":\n return 10\n return_value = int(self.val) - 1\n return return_value",
"def get_ppm(self):\n return self.PARA * math.pow((self.get_resistance()/ self.RZERO), -self.PARB)",
"def get_ppm(self):\n return self.PARA * math.pow((self.get_resistance()/ self.RZERO), -self.PARB)",
"def is_numberish(G):\n return True",
"def visit_Num(self, node):\n token = node.token\n if token.type in (INTEGER, FLOAT):\n return token.value",
"def _c13_num(pep_query, isolation_mz):\n return int(\n round(\n pep_query.pep_exp_z *\n abs(pep_query.pep_exp_mz - isolation_mz)\n )\n )",
"def getInteger(self):",
"def getInteger(self):",
"def _is_prime(self, num):\n if num == 2:\n return True\n if num < 2 or num % 2 == 0:\n return False\n for n in range(3, int(num ** 0.5) + 2, 2):\n if num % n == 0:\n return False\n return True",
"def is_prime(num):\n\tif num is 1:\n\t\treturn False\n\tif num % 2 is 0:\n\t\treturn num is 2\n\n\tdivision = 3\n\twhile (division * division) <= num:\n\t\tif num % division is 0:\n\t\t\treturn False\n\t\tdivision += 2\n\treturn True",
"def smallest_num():\n num = 1\n i = 1\n pass",
"def p(x):\n if x<0 or x>1:\n return 0\n else:\n return 1",
"def headbut_miss(num):\r\n\tglobal php\r\n\tif num == 0:\r\n\t\tphp -= 10\r\n\t\treturn 0\r\n\telse:\r\n\t\treturn num",
"def number(self):\n return self._num",
"def parler(self, annonce):\n\n annonceJ = input(\"valeur annonce \" + str(self.numero) + \" :\")\n\n while (int(annonceJ) != 0 and int(annonceJ) <= annonce) or int(annonceJ) > 4:\n annonceJ = input(\"valeur annonce \" + str(self.numero) + \" :\")\n\n return int(annonceJ)",
"def num (x):\n\n if not x:\n return None\n seed = 1\n scale = Fraction(1,1)\n lone = None\n num = Fraction(0,1)\n while not x == seed:\n\n if not lone and le(_abs(sub(x,seed)),pos):\n lone = True\n if le(seed,x):\n seed = seed*2+1\n num += scale\n lone = lone or le(x,seed)\n else:\n seed = seed*2\n num -= scale\n lone = lone or le(seed,x)\n if lone:\n scale *= Fraction(1,2)\n return num",
"def decomp(num):\n base = 10 ** (len(str(num))-1)\n divisor, resto = divmod(num, base)\n return divisor * base, resto",
"def prodi( iterable ):\n p= 1\n for n in iterable:\n p *= n\n return p",
"def test_hasta_el_numeral(self):\n fwa = FakeWikiArchivo('abcd <a href=\"/wiki/foobar#xy\">FooBar</a> dcba')\n _, r = self.peishranc(fwa)\n self.assertEqual(r, [(u'foobar', SCORE_PEISHRANC)])",
"def is_prime(n):\n return mr_prime(n)",
"def isprime(n):\r\n\treturn is_prime(n)",
"def prime(self, y, a):\n return y - a",
"def esPrimo(self, x):\r\n divisor = 0\r\n for i in range(2, x+1):\r\n if x%i == 0:\r\n divisor = divisor + 1\r\n if divisor > 1:\r\n return False\r\n return True",
"def number(self, ket):\n \n final = 0.0\n q = 0\n for i in ket:\n if i != 0:\n final += 2**q\n q += 1 \n return final",
"def relu_prime(z: float) -> float:\n return 1.0 if z > 0 else 0.0",
"def check_prime(p):\n # type: (int) -> RE\n if not gmpy2.is_prime(p):\n return RSAPublicKeyResult.NON_PRIME\n return RSAPublicKeyResult.OK",
"def is_pent(n):\n pen_test = (1 + sqrt(24*n + 1))/6\n if pen_test == int(pen_test):\n return True\n return False",
"def number(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"number\")",
"def detectar_pico_eth():\n\n ultimos_precios = persistence.traer_ultimos_precios_eth()\n prev = int(ultimos_precios[0])\n now = int(ultimos_precios[1])\n porcentaje = 0\n\n porcentaje = calcular_porcentaje(prev, now)\n porcentaje = round(porcentaje, 2)\n if porcentaje > 1.5 or porcentaje < -1.5:\n return porcentaje\n return 0",
"def prime(self, y, a):\n return y - a/(a*(1-a))",
"def coPrime(x):\n\n n = x * 2 + 100000 # Upper limit for range of random integers\n y = random.randint(x * 2, n)\n if (fractions.gcd(x, y) != 1):\n return coPrime(x)\n else:\n return y",
"def is_prime(num):\n import math\n\n\n if num % 2 == 0 and num > 2:\n return False\n for i in range(3, int(math.sqrt(num))+1, 2):\n if num % i == 0:\n return False\n return True",
"def is_number(G):\n return True",
"def equivalence(self, n):\n return n % self.prime",
"def checkPriors(para):\n\t\n\t# extract parameters\n\tA = para[0]\n\tw = para[1]\n\tp = para[2]\n\t\n\t# check them\n\tif (A<0.01 or A>10.0): A = s.uniform.rvs(0.01,10.)\n\t\n\tif (w<0.01 or w>10.0): w = s.uniform.rvs(0.01,10.)\n\t\t\n\tif ( p<0. or p>2*np.pi): p = s.uniform.rvs(0.0,2*np.pi)\n\t\n\treturn np.array([A,w,p])",
"def reprime(self):\n self.__primed = 1",
"def evaluate(self) -> int:",
"def check_prime(x, y):\n pri = (3,5,7,11,13,17,19,23)\n for i in pri:\n if (x % i == 0) and (y % i == 0):\n return i\n return 0",
"def amstrong(num) :\r\n sum = 0\r\n temp = num\r\n order=len(str(num))\r\n while num > 0 :\r\n r = num % 10\r\n sum += r ** order\r\n num //= 10\r\n\r\n if sum == temp :\r\n print(f\"{sum} is Amstrong Number\")\r\n else :\r\n print(\"Number is not Amstrong Number\")",
"def n():\n # For documentation purposes",
"def detectar_pico_btc():\n\n ultimos_precios = persistence.traer_ultimos_precios_btc()\n prev = int(ultimos_precios[0])\n now = int(ultimos_precios[1])\n porcentaje = 0\n\n porcentaje = calcular_porcentaje(prev, now)\n porcentaje = round(porcentaje, 2)\n if porcentaje > 1.5 or porcentaje < -1.5:\n return porcentaje\n return 0",
"def isprime(n):\n\treturn is_prime(n)"
] | [
"0.65439105",
"0.6336349",
"0.6313672",
"0.60297626",
"0.5971417",
"0.59622616",
"0.59525704",
"0.59505856",
"0.5943629",
"0.5943024",
"0.5938789",
"0.58874506",
"0.58661157",
"0.58579594",
"0.58364916",
"0.58217156",
"0.5816223",
"0.5807556",
"0.5789509",
"0.57876563",
"0.57671463",
"0.5749493",
"0.57427573",
"0.5738594",
"0.57364476",
"0.5733605",
"0.57312906",
"0.5717417",
"0.57132584",
"0.57081354",
"0.5698485",
"0.5670422",
"0.56687415",
"0.5642234",
"0.5635284",
"0.5631567",
"0.5626189",
"0.56172025",
"0.5609571",
"0.55963296",
"0.5587705",
"0.5587705",
"0.5586117",
"0.5578499",
"0.55366796",
"0.552069",
"0.5513319",
"0.5503121",
"0.5484574",
"0.5483206",
"0.5478858",
"0.54574025",
"0.54521906",
"0.5447196",
"0.54462445",
"0.5443453",
"0.5440546",
"0.543362",
"0.54323095",
"0.5431727",
"0.5426237",
"0.5426237",
"0.5398534",
"0.53964245",
"0.53940076",
"0.53934366",
"0.53934366",
"0.53918713",
"0.5383722",
"0.53828084",
"0.537708",
"0.53701276",
"0.536616",
"0.53589743",
"0.53510326",
"0.5348434",
"0.534624",
"0.5345197",
"0.53450704",
"0.53391594",
"0.5331486",
"0.5330885",
"0.53224874",
"0.5320269",
"0.5319347",
"0.5318845",
"0.5311495",
"0.530834",
"0.5305023",
"0.53039664",
"0.53020704",
"0.53019893",
"0.5296677",
"0.5288567",
"0.52855676",
"0.5281438",
"0.52791435",
"0.5277668",
"0.52734196",
"0.5262882",
"0.52618474"
] | 0.0 | -1 |
Determina numerele care nu sunt prime | def get_longest_all_not_prime(lst):
rezultat = []
for i in lst:
if not is_prime(i):
rezultat.append(i)
return rezultat | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def comprobar_primo(num):\n primo = True\n for i in range(2, num):\n if num%i == 0:\n primo = False\n return primo",
"def euler10(num):\n total = 0\n curr_number = 2\n while curr_number < num:\n if projecteuler.is_prime(curr_number):\n total += curr_number\n curr_number += 1\n return total",
"def primfact(e):\n for n in range(2, e):\n for x in range(2, n):\n if n % x == 0:\n break\n else:\n print n,",
"def primenumber(x):\n if x >= 2:\n for y in range(2,x):\n if not (x % y):\n return False\n else:\n return False\n return True",
"def getPrime(bits):\n\twhile(True) :\n\t\t# on continue a tirer des nombres tant que l'on n'a pas trouve de nombre premier\n\t\tp = getrandbits(bits)\n\t\tif(miller_rabin(p,100)) :\n\t\t\treturn p",
"def prime():\n number = random.randint(1, 100)\n if len(primfacs(number)) == 1:\n return number, 'yes'\n return number, 'no'",
"def esprimo(numero):\n\tcontador = 2\n\t\n\tresultado = True\n\t\n\t#Loop principal. Corre hasta que el contador sea numero - 1.\n\twhile contador < numero:\n\t\t#Hace la division.\n\t\tresto = numero % contador\n\t\t\n\t\t#Mientras la division no sea 0, se salta esta parte completamente.\n\t\twhile resto == 0:\n\t\t\t#Si la division es 0, esto entrara en un loop infinito.\n\t\t\tresultado = False\n\t\t\t#por eso rompemos el loop con break\n\t\t\tbreak\n\t\t\n\t\tcontador += 1\n\treturn resultado",
"def three():\r\n \r\n number = 600851475143\r\n \r\n i = 2\r\n prime = 0\r\n \r\n while number >= i:\r\n if number % i == 0:\r\n prime = i\r\n number = number / i\r\n i = 2\r\n else:\r\n i = i + 1\r\n return prime",
"def esPrimo(self, x):\r\n divisor = 0\r\n for i in range(2, x+1):\r\n if x%i == 0:\r\n divisor = divisor + 1\r\n if divisor > 1:\r\n return False\r\n return True",
"def get_prime_digits_for_one(a: int) -> bool:\r\n b = a\r\n c = 0\r\n c1 = 0\r\n while b > 0:\r\n c1 += 1\r\n n = b % 10\r\n if isprime(n):\r\n c += 1\r\n b = b // 10\r\n if c == c1:\r\n return True\r\n else:\r\n return False",
"def get_prime_factor(n):\n if n % 2 == 0:\n return 2\n for num in range(3, n + 1, 2):\n if n % num == 0:\n return num",
"def main() -> int:\n\n a = None\n for n, g in enumerate(gen_primes(100000, 1000000)):\n repeat, indices = check_if_has_3_repeated_digits(str(g))\n if repeat:\n a = check_for_family_of_primes(repeat, indices, list(str(g)))\n if len(a) > 7 and min(a) > 100000:\n EULER_LOGGER.debug(f\"{a}\")\n a = min([int(i) for i in a])\n break\n\n return a",
"def es_primo(n):\n \n for i in range(2, n):\n if n % i == 0:\n return False\n return True",
"def residuo_cero(numero):\n for x in range (1,10):\n if(numero % x == 0):\n return x \n return numero",
"def primes():\n yield 2\n candidate = 3\n while True:\n for i in range(3, int(sqrt(candidate)) + 1, 2):\n if (candidate % i) == 0:\n break\n else:\n yield candidate\n candidate += 2",
"def primes():\n yield 2\n candidate = 3\n while True:\n for i in range(3, int(sqrt(candidate)) + 1, 2):\n if (candidate % i) == 0:\n break\n else:\n yield candidate\n candidate += 2",
"def isprime(n):\r\n\treturn is_prime(n)",
"def eFeliz(numero, vezes):\r\n\tfor i in range(0,vezes):\r\n\t\tval = 0\r\n\t\tfor k in retornaDigitos(numero):\r\n\t\t\tval += pow(k,2)\r\n\t\tif val == 1:\r\n\t\t\treturn True\r\n\t\telse:\r\n\t\t\tnumero = val;\r\n\t\tprint val\r\n\treturn False",
"def check_prime(x, y):\n pri = (3,5,7,11,13,17,19,23)\n for i in pri:\n if (x % i == 0) and (y % i == 0):\n return i\n return 0",
"def count_prime():\n nums = []\n for i in range(2, 10000):\n if is_prime(i):\n nums.append(i)\n return nums",
"def prime_test(n,p):\n for i in range(2, p):\n thing = 1\n while thing == 1:\n if n % i == 0:\n n = n/i\n else:\n thing = 0\n if n == 1:\n return False\n return True",
"def try_formula(a, b):\n n = 0\n while True:\n x = (n**2) + (a*n) + b\n if not is_prime(x):\n break\n n += 1\n return n",
"def eulerTotient(n): #\n result = 1\n for i in range(2, n): \n if (nt.gcd(i, n) == 1): \n result+=1\n return result",
"def is_prime(num):\n import math\n\n\n if num % 2 == 0 and num > 2:\n return False\n for i in range(3, int(math.sqrt(num))+1, 2):\n if num % i == 0:\n return False\n return True",
"def is_prime(n):\n return mr_prime(n)",
"def return_prime(x):\n \n for m in range(x+1):\n if m!=0 and x%m==0 and m!=1 and x!=m:\n return 'not prime'\n return 'prime'",
"def problem2(m, p):\n total = 0\n for k in range(m, m ** p):\n if is_prime(k):\n total = total + sum_of_digits(k)\n return total",
"def prime_pi(n):\n if n < 2:\n return 0\n\n primes = sieve(n)\n return len(primes)",
"def isprime(n):\n\treturn is_prime(n)",
"def multiplicaciones(): #906609 tiene que darme\n ultimo_palindromo = 0\n total = 0\n for primer_numero in range(100, 1000):\n for segundo_numero in range(100, 1000):\n #total se encarga de hacer la multiplicacion entre los numeros\n total = primer_numero * segundo_numero\n # llamamos a la funcion que verifica si la multiplicacion que envia es un palindromo\n if obtener_palindromo(total):\n #luego de verificar que la multiplicacion era palindromo pasamos a evaluarla hasta llegar al ultimo palindromo\n #entre 100 y 1000\n if ultimo_palindromo < total:\n ultimo_palindromo = total\n return ultimo_palindromo",
"def nPrime(n):\n\n start = 1\n while n != 1:\n start += 2\n if isPrime(start):\n n -= 1\n # end of if\n\n return start",
"def find_prime(num):\n\n if not isinstance(num, int) or isinstance(num, bool):\n raise TypeError(\"number input must be an integer\")\n\n if num <= 1:\n raise ValueError(\"number must be greater than 1\")\n\n pri_num = [2]\n\n # The code below will test if every iteration of 'var' is a prime number\n for var in range(2, num + 1):\n res = 0\n for var2 in pri_num:\n if var == 2:\n break\n elif (var % var2) == 0:\n break\n elif (var2 == pri_num[-1]):\n res = var\n if res:\n pri_num.append(res)\n print(pri_num)\n\n return 0",
"def is_prime(n):\n if n < 2:\n return False\n if n == 2 or n == 3:\n return True\n elif n % 2 == 0:\n return False\n else:\n x = 0\n for i in range(3, n, 2):\n if n % i == 0:\n x = 1\n return x == 0",
"def _is_prime(self, num):\n if num == 2:\n return True\n if num < 2 or num % 2 == 0:\n return False\n for n in range(3, int(num ** 0.5) + 2, 2):\n if num % n == 0:\n return False\n return True",
"def main():\n numbers = int(input())\n count = 0\n for num in range(1, numbers+1):\n if num > 1:\n for i in range(2, num):\n if (num % i) == 0:\n break\n else:\n count += 1\n print(count)",
"def is_prime_by_python(num):\n if num == 2:\n return True\n elif num % 2 == 0 or num <= 1:\n # even or smaller then one\n return False\n else:\n res = True\n partial_num_range = int(num / 4) + 1\n\n for i in range(1, partial_num_range):\n if num % (2 * i + 1) == 0:\n res = False\n break\n return res",
"def isprime(x):\n if x <= 1: return False \n if x % 2 == 0: return x == 2\n for k in range(3, int(sqrt(x))+1, 2): \n if x % k == 0: return False\n return True",
"def is_prime(num):\n\tif num is 1:\n\t\treturn False\n\tif num % 2 is 0:\n\t\treturn num is 2\n\n\tdivision = 3\n\twhile (division * division) <= num:\n\t\tif num % division is 0:\n\t\t\treturn False\n\t\tdivision += 2\n\treturn True",
"def primes():\n yield 1\n primes = []\n for n in itertools.count(2):\n if not any(n % p == 0 for p in primes):\n # No divisor found among previous primes\n yield n\n primes.append(n)",
"def prime_factor(n):\n while n > 1:\n k = 2 \n while n % k != 0:\n k = k+1\n n = n // k\n print(k)",
"def is_prime(number):\n if number <=3:\n return True\n \n for i in range(2, number):\n if number % i == 0:\n return False\n \n return True",
"def is_prime_number(number_):\n flag = 0\n for values in range(2, number_//2):\n if number_ % values == 0:\n flag += 1\n if flag == 1:\n return True\n else:\n return False",
"def isprime(x):\n # 1 and 0 are not primes\n if( x < 2):\n return False\n if( x == 2):\n return True\n # All evens are not prime\n if (x % 2 == 0):\n return False\n\n # check others, up x / 2\n else:\n for y in range(3, int(x**(0.5)+1), 2):\n ##print(y)\n if( x % y == 0):\n return False\n return True",
"def is_prime(num):\n\n if num == 2:\n return True\n for i in range(2, num):\n if num % i == 0:\n return False\n return True",
"def prime_checker(num):\n\n assert num > 0\n\n if num < 2:\n return False\n\n if num == 2:\n return True\n\n if num % 2 == 0:\n return False\n\n n = 3\n\n while n * n <= num:\n\n if num % n == 0:\n return False\n\n else:\n num += 2\n\n return True",
"def isPrime(n):\r\n # Znamo da 1 nije prost broj\r\n if n == 1:\r\n return False\r\n\r\n i = 2\r\n # Petlja se vrti od 2 do int(sqrt(x)) \r\n while i*i <= n:\r\n # Provjera da li i dijeli x bez ostatka\r\n if n % i == 0:\r\n # To znači da n ima faktor između 2 i sqrt(n)\r\n # Stoga nije prost broj\r\n return False\r\n i += 1\r\n # Ako nismo pronašli nijedan faktor u gornjoj petlji\r\n # onda je n prost broj\r\n return True",
"def test_prime(n):\n if SIEVE[n]:\n return True\n else:\n return False",
"def coPrime(x):\n\n n = x * 2 + 100000 # Upper limit for range of random integers\n y = random.randint(x * 2, n)\n if (fractions.gcd(x, y) != 1):\n return coPrime(x)\n else:\n return y",
"def primes():\n yield 2\n found_primes = [2]\n a = 3\n while True:\n for p in found_primes:\n if p**2 > a:\n found_primes.append(a)\n yield a\n a += 2\n break\n elif a % p == 0:\n a += 2\n break",
"def solution(resources, args):\n largest_prime_factor = 1\n number = args.number\n prime_generator = primes.get_prime_generator()\n\n while number > 1:\n prime = next(prime_generator)\n if number % prime == 0:\n number /= prime\n largest_prime_factor = prime\n\n if largest_prime_factor == 1:\n largest_prime_factor = args.number\n\n return largest_prime_factor",
"def is_prime(self):\n pass",
"def isprime(number):\n\n if number == 1:\n return False\n for i in range(2, int(number**0.5) + 1):\n if number % i == 0:\n return False\n return True",
"def prime(self, y, a):\n return y - a",
"def next_prime(n):\n i = n + 1\n while not is_prime(i):\n i += 1\n return i",
"def good_prime(p):\n return p % 4 == 3 and probablyPrime(p, accuracy=100)",
"def recursive_prime(x):\n \n for m in range(x+1):\n if m!=0 and x%m==0 and m!=1 and x!=m:\n return 'not prime'\n return 'prime'",
"def is_prime(num):\n if num < 2:\n return False\n elif num == 2:\n return True\n\n for i in range(2, int(num**(1/2))+1):\n if num % i == 0:\n return False\n\n return True",
"def is_prime(num):\n\n assert num >= 0, \"Num should be a positive integer!\"\n\n if num < 2:\n return False\n\n if num == 2:\n return True\n\n if num % 2 == 0:\n return False\n\n n = 3\n while n * n <= num:\n if num % n == 0:\n return False\n n += 2\n\n return True",
"def isprime(n):\n if n == 2: return True\n if n == 3: return True\n if n % 2 == 0: return False\n if n % 3 == 0: return False\n i = 5\n w = 2\n while i * i <= n:\n if n % i == 0:\n return False\n i += w\n w = 6 - w\n return True",
"def low_prime(n):\n if n < 2 or n - round(n) != 0:\n print('Numbers smaller than 2 and non-integers do not have prime',\n 'factors')\n return None\n for i in range(2, int(sqrt(n) + 2)):\n if n % i == 0 and is_prime(i):\n return i\n return n",
"def is_prime(num):\n if num == 0 or num == 1:\n return False\n for x in range(2, num):\n if num % x == 0:\n return False\n else:\n return True",
"def prime(n):\n \n flag = 1 # this will be 0 --> if no prime \n for i in range(2, n):\n if (n%i == 0):\n flag = 0\n break #Most important to break once number is decided as not prime; even once divisible, no need to check further for that number \n else :\n flag = 1\n \n return flag",
"def is_prime(number):\n number = int(number)\n\n if number < 2:\n return False\n if number < 4:\n return True\n if number % 2 == 0:\n return False\n for d in range(3, number // 2, 2):\n if number % d == 0:\n return False\n return True",
"def isprime(n=936):\n if n < 3: return False\n for i in range(2, n):\n if n % i == 0:\n return False\n return True",
"def is_prime(number):\n #for i in range(2, ceil(sqrt(number))):\n for i in range(2, number):\n if number % i == 0:\n return False\n return True",
"def is_prime(num):\n\n # Quick test for small prime numbers\n if num <= 3:\n return True\n\n # Quick test for even numbers\n # We only need to check up to the square root of n\n for a in range (2, math.sqrt(num)):\n if num % a == 0:\n return False\n \n # Implement an algorithm below to test for primes,\n # e.g. Sieve of Eratosthenes\n # this algorithm just divides by everything\n for i in range(1, int(math.sqrt(num))):\n if num % i == 0:\n return False \n return True\n \n # the code I submitted\n \n def is_prime(num):\n # quick test to test small numbers\n if num <= 3:\n return (True)\n elif num % 2 == 0:\n return (False)\n sqr = int(math.sqrt(num)) + 1\n for a in range(3, sqr, 2):\n if num % a == 0:\n return False\n return True",
"def check_almost_prime(num, primes_list):\n no_factors = 0\n max_prime = num - 1\n for prime in primes_list:\n if prime > max_prime:\n break\n while num % prime == 0:\n no_factors += 1\n num /= prime\n if no_factors > 2:\n return 0\n if no_factors == 2:\n return 1\n return 0",
"def is_prime(num):\n for x in range(2, num + 1):\n if num % x == 0:\n return False\n return True",
"def isprime(n):\n if n % 2 == 0:return False\n return all(n % i for i in range(3, int(n**0.5) + 1, 2))",
"def problem10():\n total_sum = 0\n for x in xrange(1, 2000000):\n if is_prime(x):\n total_sum += x\n return total_sum",
"def is_prime(num):\n if num < 2:\n return False\n\n for i in range(2, num):\n if num % i == 0:\n return True",
"def is_prime(num1):\n num2 = 2\n while num2 < num1:\n if num1 % num2 == 0:\n return False\n num2 += 1\n return True",
"def primos(x):\n def esprimo(n):\n \"\"\"\n Determines whether a natural number is a prime number\n :param n: Agiven natural number\n :return: True if prime, False otherwise\n \"\"\"\n toret = False\n if x == 2:\n toret = True\n elif x % 2 == 0:\n toret = False\n else:\n for i in range(3, x, 2):\n if x % i == 0:\n break\n else:\n toret = True\n # Se ejecuta cuando no se rompe el bucle\n\n return toret\n\n toret = []\n for i in range(0, x):\n if esprimo(i):\n toret.append(i)\n\n return toret",
"def primes():\n yield 2\n found = []\n for i in itertools.count(start=3, step=2):\n for p in found:\n if i % p == 0:\n break\n else:\n yield i\n found.append(i)",
"def count_prime_args(num):\n nums = []\n for i in range(2, num):\n if is_prime(i):\n nums.append(i)\n return nums",
"def prime(self, y, a):\n return y - a/(a*(1-a))",
"def is_prime(num):\n for n in range(2, num):\n if num % n == 0:\n return False\n\n else:\n return True",
"def is_prime(n):\n k = 2\n while n % k != 0:\n k += 1\n if k < n:\n return False\n else:\n return True",
"def test_is_prime_valid(self):\n sol = solution.Solution();\n self.assertTrue(sol.isPrime(2))\n self.assertTrue(sol.isPrime(3))\n self.assertTrue(sol.isPrime(7))\n #self.assertTrue(sol.isPrime(863))",
"def isPrime(num):\r\n if num < 1:\r\n return False\r\n elif num == 2:\r\n return True\r\n else:\r\n for i in range(2, num):\r\n if num % i == 0:\r\n return False\r\n return True",
"def get_larger_prime(n):\n result = n + (1 if n % 2 == 0 else 2)\n while not is_prime(result):\n result += 2\n return result",
"def sopf(n, primes):\r\n total = 0\r\n for p in primes:\r\n if n % p == 0:\r\n total += p\r\n while n // p == 0:\r\n n //= p\r\n return total",
"def is_prime(num: int) -> bool:\n return factorial(num - 1) % num != 0",
"def is_prime(number: int):\n\n for index in range(2, (number//2) + 1):\n if number%index == 0:\n return False\n return True",
"def perfect(num):\r\n for i in xrange(2, num+1):\r\n if (2**(i-1))*(2**(i)-1) > num:\r\n prime_want = i\r\n break\r\n prime = filter(is_prime, range(1, prime_want))\r\n return sum(map(lambda i: (2**(i-1))*(2**(i)-1), prime))",
"def primefactors(num):\n\n while num % 2 == 0:\n print(2)\n num = num / 2\n for i in range(3,int(math.sqrt(num))+1,2):\n while ( num % i == 0 ):\n print (i)\n num = num / i\n if num > 2:\n print (num)",
"def isprime(n):\n if n < 2:\n return False\n if n == 2:\n return True\n if n == 3:\n return True\n if n % 2 == 0:\n return False\n if n % 3 == 0:\n return False\n i = 5\n w = 2\n\n while i * i <= n:\n if n % i == 0:\n return False\n i += w\n w = 6 - w\n return True",
"def getNPrime(num):\n prime_numbers = []\n for i in range(num):\n if isPrime(i + 1):\n prime_numbers.append(i)\n return prime_numbers",
"def is_prime(num):\n if not isinstance(num, int):\n return False\n if num <= 1:\n return False\n if num == 2 or num == 3:\n return True\n if num % 6 in [0, 2, 3, 4]:\n return False\n div_max = int(math.sqrt(num))\n for div in range(5, div_max + 1, 2):\n if num % div == 0:\n return False\n return True",
"def is_prime(num):\n if is_even(num) and num != 2 or num == 1:\n return False\n\n for dd in range(3, int(mt.sqrt(num)) + 1):\n if num % dd == 0:\n return False\n\n return True",
"def isPrime(x):\n \n # your code here\n Prime_num = False\n \n if x > 1:\n # Iterate from 2 to n / 2\n for i in range(2, int(x/2)+1):\n\n # If num is divisible by any number between\n # 2 and n / 2, it is not Prime_num\n if (x % i) == 0:\n Prime_num = False\n break\n else:\n Prime_num = True\n else:\n Prime_num = False\n \n return Prime_num",
"def is_prime(num):\n for i in range(2, num):\n if num % i == 0:\n return False\n return True",
"def isprime(n):\n if n == 1:\n return False\n if n == 2:\n return True\n if n == 3:\n return True\n if n % 2 == 0:\n return False\n if n % 3 == 0:\n return False\n\n i = 5\n w = 2\n\n while i * i <= n:\n if n % i == 0:\n return False\n\n i += w\n w = 6 - w\n\n return True",
"def test_25(self):\n self.assertFalse(is_prime(25))",
"def is_prime(num):\n for n in range(2,num):\n if num % n == 0:\n print \"Not Prime\"\n break\n else: \n print 'The number is prime'",
"def prime_checker(num):\n if num <= 0:\n return \"Error: num must be a positive nonzero integer\"\n elif num <= 3:\n return num > 1\n elif num % 2 == 0 or num % 3 == 0:\n return False\n else:\n k = 5\n while k * k < num:\n if (num % k == 0) or (num % (k+2) == 0):\n return False\n k += 6\n return True",
"def isprime(n):\n if n == 2:\n return True\n if n == 3:\n return True\n if n % 2 == 0:\n return False\n if n % 3 == 0:\n return False\n\n i = 5\n w = 2\n\n while i * i <= n:\n if n % i == 0:\n return False\n\n i += w\n w = 6 - w\n\n return True",
"def is_prime(n):\n if n <= 1:\n return False\n elif n <= 2:\n return True\n elif n % 2 == 0:\n return False\n else:\n for i in range(3, int(n**.5) + 1, 2):\n if n % i == 0:\n return False\n return True",
"def is_prime(n):\n \n for i in range(3, int(n**0.5+1), 2):\n if n % i == 0:\n print(n,'is not prime')\n return False\n\n print(n,'is prime') \n return True",
"def is_prime(num):\r\n if num == 0 or num == 1:\r\n return False\r\n for i in range(2, num):\r\n if num % i == 0:\r\n return False\r\n else:\r\n return True",
"def isprime(n):\n if n == 2:\n return True\n if n == 3:\n return True\n if n % 2 == 0:\n return False\n if n % 3 == 0:\n return False\n\n i = 5\n w = 2\n\n while i ** 2 <= n:\n if n % i == 0:\n return False\n\n i += w\n w = 6 - w\n\n return True"
] | [
"0.72874737",
"0.71890366",
"0.70929486",
"0.7015428",
"0.69767654",
"0.6868733",
"0.68496084",
"0.68421936",
"0.6825397",
"0.6800427",
"0.6788024",
"0.6787027",
"0.6786647",
"0.6762163",
"0.6752517",
"0.6752517",
"0.6743149",
"0.67369336",
"0.6724455",
"0.67098033",
"0.6694257",
"0.66883785",
"0.66870415",
"0.66786975",
"0.66731226",
"0.66728616",
"0.6660145",
"0.66535443",
"0.66373825",
"0.66250616",
"0.6613343",
"0.6598364",
"0.6588698",
"0.65794826",
"0.65786856",
"0.6569863",
"0.6568235",
"0.65652335",
"0.6554254",
"0.65499926",
"0.6541926",
"0.65237856",
"0.651992",
"0.651395",
"0.6508887",
"0.65068036",
"0.65048987",
"0.65024084",
"0.647999",
"0.64768314",
"0.6475016",
"0.64750123",
"0.6474036",
"0.64722794",
"0.64659536",
"0.6462866",
"0.6462464",
"0.6462091",
"0.6460169",
"0.645837",
"0.6457251",
"0.64554876",
"0.64506334",
"0.64499325",
"0.6444342",
"0.64353156",
"0.64339525",
"0.6429307",
"0.64190614",
"0.6416896",
"0.64167744",
"0.64166933",
"0.6408306",
"0.640617",
"0.64054954",
"0.6404313",
"0.639966",
"0.63986814",
"0.6398364",
"0.63946027",
"0.6393961",
"0.63925713",
"0.63907456",
"0.6386794",
"0.6386168",
"0.6369538",
"0.63684565",
"0.636721",
"0.6363316",
"0.636269",
"0.63498765",
"0.6339347",
"0.6336765",
"0.6334395",
"0.63321424",
"0.63301027",
"0.6327559",
"0.6323886",
"0.63231725",
"0.6317226",
"0.63113075"
] | 0.0 | -1 |
Send a message when the command /start is issued. | def start(update: Update, context: CallbackContext) -> None:
output = "Test"
update.message.reply_text(output) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def start(msg: telebot.types.Message):\n logger.info(f'New /start command from id: {msg.from_user.id}.')\n\n bot.send_message(\n msg.from_user.id,\n 'Hello, welcome to TicTacDrop!',\n reply_markup=buttons.get_play_markup()\n )\n\n utils.save_user(msg.from_user)",
"def start(bot, update):\n update.message.reply_text(msgStart)",
"def start(update, context):\n update.message.reply_text(START_HELLO)",
"def start(self, bot, update):\n start_text = \"This is the bot!\"\n bot.send_message(chat_id=update.message.chat_id, text=start_text)",
"def command_start(self, bot, update):\n\n msg = (\"Hi! I'm @MylesBot, a Telegram bot made by @MylesB about \"\n \"@MylesB.\")\n\n self.send_message(bot, update, msg)",
"def start(update, context):\n context.bot.send_message(chat_id=update.effective_chat.id, text=BOT_START_TEXT)",
"def start():\n\n start_server()",
"def start(update, context):\n update.message.reply_text('Hi!')",
"def start(update, context):\n update.message.reply_text('Hi!')",
"def start(update, context):\n update.message.reply_text('Hi!')",
"def start(update, context):\n update.message.reply_text('Hi!')",
"def cmd_start(update, context):\n\n context.bot.send_message(\n chat_id=update.effective_chat.id,\n text=f\"Halo salam kenal! Aku Bot dari Planet Ban {emo_heart_eyes}\")",
"def start(self):\n cmd = self.doCommand(self.args)\n if cmd is not None:\n cmd.join()\n else:\n self.out = self.error",
"def cmd_start(self, app_name=None):\n rc = self.socket_command_with_project('start', app_name)\n return rc",
"def start(bot, update):\n update.message.reply_text('Hi! Welcome to my bot try /help')",
"def _do_start(self, chat_id, user_id, args, update):\n \n self.tclient.send_message('Hallo! Ich bin ein Bot, um dir zu helfen, dir deine Nasensprüche zu merken!', chat_id)",
"def start(update, context):\n\tupdate.message.reply_text('Hi!') \n\tupdate.message.reply_text('TichuBot is ready')",
"def start_execution(self):\n self.send_message(\"control.start\",None)",
"def fstart(wrapper: MessageDispatcher, message: str):\n channels.Main.send(messages[\"fstart_success\"].format(wrapper.source))\n wrapper.target = channels.Main\n start(wrapper, forced=True)",
"def start():\n logging.info(\"Execution Started\")",
"def start_bot(self):\n self.proc = subprocess.Popen(\"./start\", stdin=subprocess.PIPE,\n\t\t\t\t\t\t\t\t\t stdout=subprocess.PIPE,\n\t\t\t\t\t\t\t\t\t cwd=os.path.abspath(self.path))",
"def start_as_service(self):\n from ..program_manager import ProgramManager\n send_action(ProgramManager.NAME, 'start', self.name)",
"def start():",
"def start():",
"def start():",
"def start():",
"def start(bot, update):\n update.message.reply_text('Hi!')",
"def start_cmd(wrapper: MessageDispatcher, message: str):\n if wrapper.target is channels.Main:\n start(wrapper)",
"def start(self):\n self._msg_disp.start()\n self._msg_disp.process_message(DhsStart())",
"def camstart():\n\n\trespond = send_command('camstart')",
"def start():\n log(\"=========== hook: start ===========\")",
"async def on_start(self):\n m = \"**{}** has started a game of {}! To participate, say `I`! **{} players needed.**\".format(\n self.message.author.display_name, self.name, self.num)\n await client.say(self.message, m)",
"def help_command_handler(update, context):\n update.message.reply_text('Type /start')",
"def start( self ):\n pathCheck( self.command )\n cout = '/tmp/' + self.name + '.log'\n if self.cdir is not None:\n self.cmd( 'cd ' + self.cdir )\n self.cmd( self.command + ' ' + self.cargs % self.port +\n ' 1>' + cout + ' 2>' + cout + '&' )\n self.execed = False",
"def start():\n server = current_server()\n logger.info('Starting Flexx event loop.')\n server.start()",
"def start(update: Update, context: CallbackContext) -> None:\n update.message.reply_text('Bem Vindo!')",
"def start(update, context):\n context.bot.send_message(\n chat_id=update.effective_chat.id, text=\"Hi human\")",
"def start(update,context):\r\n update.message.reply_text('welcome to voice bot')",
"def start(update, context):\n update.message.reply_text(\"Hola, Geeks!\")",
"def start(self):\n\t\tif self._send_greenlet is None:\n\t\t\tself._send_greenlet = gevent.spawn(self._send_loop)",
"def start(_bot, update):\n start_message = 'Hi\\nuse /add for add retro reminder'\n update.message.reply_text(start_message)",
"def start(self, event):\n self.send_presence()\n self.get_roster()\n self.send_message(mto=self.recipient, mbody=self.msg, mtype='chat')\n self.disconnect(wait=True)",
"def start(self):\n self.start_time = dt.datetime.now()\n self.call = ' '.join(sys.argv)\n self.commands = []",
"def start_message(self, update, context):\n\n user = self.User(update)\n output = \"Greetings, we're happy that you decided to join and use the Bus4U service!\\n\" \\\n \"in order to see all the possible commands you can type /help\\n\" \\\n \"Also we want you to know that every command that you type and the server response will\" \\\n \"be logged and you can access your history with /history.\\n\\n\" \\\n \"we hope you'll enjoy the product and wish you the best.\\n Never Miss a Bus.\"\n user.send_message(output)\n self.data_base.log(user, \"*Showed Greeting Message*\")",
"def start(bot, update, session, chat, user):\n if chat.is_maintenance:\n call_tg_func(update.message.chat, 'send_message', ['Hello there'],\n {'reply_markup': admin_keyboard})\n else:\n call_tg_func(update.message.chat, 'send_message', [help_text],\n {'reply_markup': main_keyboard, 'parse_mode': 'HTML'})",
"def on_start(self):\n self.run_in_background(self.__run_client)",
"def start(self):\n self._state = 'Started'",
"def start(update, context):\n update.message.reply_text('Hi! \\n /traccia per tracciare instantaneamente i prezzi \\n /check per far partire il check periodico \\n /stopcheck per far fermare il check periodico')",
"def start(self):\n control_process = mp.Process(target = self._start, args = [])\n control_process.start()",
"def start():\n\n print(\"Hi. I'm your Amazon customer service assistant.\")\n print('What can I help you about your orders?')",
"def start(self):\n if self._start_event is None:\n _call_spawn_callbacks(self)\n hub = get_my_hub(self) # pylint:disable=undefined-variable\n self._start_event = hub.loop.run_callback(self.switch)",
"def start(update, context):\n update.message.reply_text('Hi! Glad you found me. Now I am not so lonely and have another friend. Are you ready to play?')",
"def start( *args, **kwargs ):",
"def webserver_start():\n run(_webserver_command())",
"def start(update, context):\r\n update.message.reply_text('Hi!')\r\n r2d2.say(update.message.text)",
"def start(bot, update):\n me = bot.get_me()\n\n # Welcome message\n msg = _(\"Hello!\\n\")\n msg += _(\"I'm {0} and I came here to help you.\\n\").format(me.first_name)\n msg += _(\"What would you like to do?\\n\\n\")\n msg += _(\"/support - Opens a new support ticket\\n\")\n msg += _(\"/settings - Settings of your account\\n\\n\")\n\n # Commands menu\n main_menu_keyboard = [[telegram.KeyboardButton('/support')],\n [telegram.KeyboardButton('/settings')]]\n reply_kb_markup = telegram.ReplyKeyboardMarkup(main_menu_keyboard,\n resize_keyboard=True,\n one_time_keyboard=True)\n\n # Send the message with menu\n bot.send_message(chat_id=update.message.chat_id,\n text=msg,\n reply_markup=reply_kb_markup)",
"def TerminalClientStart(self):\n pass",
"def ConsoleStart(self):\n pass",
"def start(self):\n self.save_checkpoint(\"setup\")\n\n logging.info(\"Starting game...\")\n body = render_message(\n \"welcome.html\",\n game_name=self.name,\n night_end=self.night_end.strftime(\"%I:%M %p\"),\n day_end=self.day_end.strftime(\"%I:%M %p\"),\n players=self.game.players,\n )\n self.send_message(mafia.events.PUBLIC, \"%s: Start\" % self.name, body)\n self.game.begin()\n self.started = True\n\n self.save_checkpoint(\"start\")",
"def on_start(self):\n self.logger.debug(\"Starting...\")\n pass",
"def startCommand(self):\n commandLine = \"su - %s -c \\\"%s/startservers \\\" \" % (self.runAsUser, self.boHome)\n return self.submitCommand(commandLine)",
"def start(self):\n gevent.spawn(self.run)",
"def start(self):\n ...",
"def sendStart (self, args) :\n \n data = streamModule.WriteBuffer()\n \n data.writeStruct('B', len(args))\n\n for arg in args :\n data.writeVarLen('B', arg)\n\n return self.sendCommand(\"CMD_IN_DO_START\", data.getvalue()).addCallback(self._sendStart_result)",
"def start(self):\n if self.debug:\n print(\"%s start\" % self.name)",
"def start(self) -> None:\n logger.log(self.log_level, f'Start {self.name}...')\n self.started = True\n super().start()",
"def start_handler(bot, update):\n chat_id = update.message.chat_id\n\n logger.info(f\"Start command received. Chat ID: {chat_id}\")\n\n update.message.reply_text(config.START_MESSAGE)\n\n chat_ids = redis.get('chat_ids')\n\n if chat_ids is None:\n chat_ids = list()\n\n else:\n chat_ids = json.loads(chat_ids)\n\n chat_ids.append(chat_id)\n\n # redis.set('chat_ids', chat_ids)\n redis.set('chat_ids', list(set(chat_ids)))",
"def start(name, path):\n app.start(name, path)",
"def do_start(self, args) :\r\n if not self.wait2start:\r\n Thread(target=self.start_loop).start()\r\n self.wait2start = True\r\n else:\r\n self.__Logger.warn(\"Waiting for simulators to be ready. To force start, type \\\"forcestart\\\"\")",
"def on_start(self, ctx):\n pass",
"def start(self) -> None:\n ...",
"def start(self) -> None:\n ...",
"async def start_program(self):\n program_payload = self._program[\"program\"]\n await self._send_program_message(program_payload)",
"def start(self, event):\n\t\tself.get_roster()\n\t\tself.send_presence()",
"def startapp(self, command):\n e = self.emu\n e.alt(\"F2\")\n e.shortwait()\n e.clickat(self.screen.center)\n e.shortwait()\n e.type(command + \"\\n\")\n e.longwait()",
"def activate(self):\n self.start()",
"def start(self, **kwargs):\n return self.client.api.start(self.id, **kwargs)",
"def start(self):\n assert(self._cbs is not None)\n self._as.start() # start the server",
"def start(self):\r\n pass",
"def _start(self):\n pass",
"def start(self, bot, update):\n print(update.message[\"chat\"])\n start_text = \"Eu sou o bot da IEEE Computer Society UnB \" \\\n \"e gerencio os repositórios da instituição. \" \\\n \"Digite /help para saber mais sobre meus comandos.\"\n bot.send_message(chat_id=update.message.chat_id, text=start_text)\n\n start_text = \"Agora vamos lá. Em que posso ajudá-lo?\"\n bot.send_message(chat_id=update.message.chat_id, text=start_text)\n return",
"def start(update: Update, context: CallbackContext) -> None:\n if update.effective_chat:\n context.bot.send_message(chat_id=update.effective_chat.id,\n text=\"I'm a bot, please talk to me!\")",
"def start(self, **kwargs):\n pass",
"def start(self, **kwargs):\n pass",
"def do_start(self, line):\n\n if not line:\n line = \"cortex\"\n\n # First, check that the name isn't already taken\n clients = self.registry.get_clients()\n if clients.has_key(line):\n print \"A server already exists with that name (%s)\" % line\n return False\n\n subprocess.Popen([\"python\", \"cortex.py\", line])\n # Wait for the system to init\n time.sleep(1)\n print \"Started server, connecting...\"\n return self.do_connect(line)",
"def start():\n request.start = time.time()",
"def run(self):\n self.process.start()",
"def start (self):\n pass",
"def start (self):\n pass",
"def start(self):\n self.p.start()",
"def start(self):\n pass",
"def start(self):\n pass",
"def start(self):\n pass",
"def start(self):\n pass",
"def start(self):\n pass",
"def start(self):\n pass",
"def start(self):\n pass",
"def start(self):\n pass",
"def start():\n app.run()",
"def Start(self):\n\n\n\n assert not self._process, 'Start() can only be called once'\n self._process = subprocess.Popen(self._args)"
] | [
"0.7444013",
"0.7397156",
"0.72363275",
"0.7153408",
"0.7149207",
"0.7125708",
"0.7041666",
"0.6861407",
"0.6861407",
"0.6861407",
"0.6861407",
"0.6860805",
"0.6807865",
"0.6803813",
"0.6761418",
"0.67429066",
"0.67300147",
"0.6695136",
"0.66893226",
"0.6686253",
"0.665731",
"0.6657198",
"0.66384566",
"0.66384566",
"0.66384566",
"0.66384566",
"0.66287136",
"0.6605683",
"0.65928024",
"0.65890104",
"0.6576509",
"0.6570414",
"0.6568209",
"0.6566791",
"0.65648675",
"0.655804",
"0.6557797",
"0.65273434",
"0.6513776",
"0.6509305",
"0.6504141",
"0.6487619",
"0.6481772",
"0.6473795",
"0.64678764",
"0.64471406",
"0.643899",
"0.6420361",
"0.64033383",
"0.6385795",
"0.6385143",
"0.63776046",
"0.6372776",
"0.63548136",
"0.63531476",
"0.6342173",
"0.6334504",
"0.63202536",
"0.6318809",
"0.631121",
"0.63043857",
"0.62740463",
"0.6268867",
"0.6241434",
"0.6229469",
"0.6202952",
"0.61979306",
"0.61882174",
"0.6167771",
"0.6162879",
"0.6161418",
"0.6161418",
"0.6157599",
"0.6155772",
"0.6155734",
"0.61532944",
"0.6153206",
"0.6152317",
"0.6150958",
"0.61351836",
"0.6134756",
"0.6122201",
"0.6120345",
"0.6120345",
"0.61146665",
"0.61143816",
"0.6105666",
"0.61043495",
"0.61043495",
"0.60979676",
"0.6090836",
"0.6090836",
"0.6090836",
"0.6090836",
"0.6090836",
"0.6090836",
"0.6090836",
"0.6090836",
"0.60905194",
"0.60882163"
] | 0.62268615 | 65 |
ReminderBot, sends you a message to remind you of stuff. | def help_command(update: Update, context: CallbackContext) -> None:
update.message.reply_text('Help!') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def send_reminder():\n\n name = config[\"email\"][\"name\"]\n user = config[\"email\"][\"user\"]\n subject = \"REMINDER: %s\" % sys.argv[1]\n body = sys.argv[2] if len(sys.argv) > 2 else \"\"\n email_helper.send(user, name, user, subject, body)",
"async def remind(id, reminder):\n \n guild = BOT_GLOBAL.get_guild(BOT_GLOBAL.settings.guild_id)\n if guild is None:\n return\n member = guild.get_member(id)\n if member is None:\n return\n\n embed = discord.Embed(title=\"Reminder!\", description=f\"*You wanted me to remind you something... What was it... Oh right*:\\n\\n{reminder}\", color=discord.Color.random())\n try:\n await member.send(embed=embed)\n except Exception:\n channel = guild.get_channel(BOT_GLOBAL.settings.guild().channel_botspam)\n await channel.send(member.mention, embed=embed)",
"def send_reminder(self):\n pass",
"def remind():\n ntftion.notify('reminder', f\"{self.notification}:\\n{self.work_name}\\n{self.work_datetime.hour}: \"\n f\"{self.work_datetime.minute} \", app_icon='reminder.ico', timeout=3)",
"async def remind(self, s:int, message=\"Remind me!\"):\n await asyncio.sleep(int(s))\n await self.bot.reply(\"Reminder: {}\".format(message))",
"async def tick(self):\n need_to_save_reminders = False\n while self.reminders:\n reminded: bool = False\n # First element, but is stored as tuple\n reminder: Reminder = self.reminders[0][1]\n reminder_time: arrow.Arrow = arrow.Arrow.utcfromtimestamp(reminder.reminder_utc_timestamp)\n time_now: arrow.Arrow = arrow.utcnow()\n if reminder_time < time_now:\n reminder = heappop(self.reminders)[1]\n need_to_save_reminders = True\n reminded = True\n person: discord.User = await self._get_user_by_id(reminder.user_id)\n logger.info(f\"Attempting to remind {person.name} of: {reminder.message}\")\n channel: discord.TextChannel = await self._get_channel_by_id(reminder.channel_id)\n # Reminder was done using bot command\n if channel and person and reminder.message_id:\n message: discord.Message = await channel.fetch_message(reminder.message_id)\n await person.send(f\"{message.jump_url}\\nYou wanted to be reminded of: {reminder.message}\")\n # Reminder was done using slash command\n elif person and channel:\n # Send the reminder text\n await channel.send(f\"{person.mention} You wanted to be reminded of: {reminder.message}\")\n if not reminded:\n break\n\n # Save reminder to file because we did remind a person now\n if need_to_save_reminders:\n await self.save_reminders()",
"async def remindme(self, ctx, time=None, *, reminder=None):\n\n time = await timeconv.ConvertStrToTime().convert(ctx, time)\n\n if not time:\n return await ctx.send(f'{ctx.author.mention}, you are missing the time!')\n if not reminder:\n return await ctx.send(f'{ctx.author.mention}, you are missing the reminder message!')\n\n remind_time = timedelta(seconds=time)\n dt = datetime.utcnow() + remind_time\n ft = {\n \"year\": dt.year,\n \"month\": dt.month,\n \"day\": dt.day,\n \"hour\": dt.hour,\n \"minute\": dt.minute,\n \"second\": dt.second,\n \"microsecond\": dt.microsecond\n }\n try:\n with open('data/reminders.json', \"r\") as f:\n data = json.load(f)\n if ctx.author.id not in data.keys():\n data[str(ctx.author.id)] = []\n data[str(ctx.author.id)].append({\n \"reminder\": reminder,\n \"time\": ft\n })\n with open('data/reminders.json', 'w') as f:\n json.dump(data, f)\n except json.decoder.JSONDecodeError as e:\n with open('data/reminders.json', 'w') as f:\n f.write(\"{}\")\n print(e)\n\n await ctx.author.send(f\"A reminder for {datetime(**ft).strftime('%b-%d-%Y %H:%M')} UTC has been set.\")",
"def send_reminder(self, url):\n redditor = praw.models.Redditor(self.client, name=self.identifier)\n redditor.message(\"Reminder from Rmnd.in!\", url)",
"async def remind(self, ctx, *, content):\n\t\tif content.lower().startswith(\"me\"):\n\t\t\tcontent = content[2:].strip()\n\t\tcleanContent = content\n\t\tif cleanContent.lower().startswith(\"on\"):\n\t\t\tcleanContent = cleanContent[2:].strip()\n\t\tif cleanContent.lower().startswith(\"at\"):\n\t\t\tcleanContent = cleanContent[2:].strip()\n\t\tcleanContent = re.sub(r'<(?P<animated>a?):(?P<name>[a-zA-Z0-9_]{2,32}):(?P<id>[0-9]{18,22})>','',cleanContent)\n\t\ttimeContent, time = (search_dates(\n\t\t\tcleanContent, settings={'TIMEZONE': 'UTC', 'RETURN_AS_TIMEZONE_AWARE': True})[0])\n\t\tmessage = content.replace(timeContent, \"\").replace(\"@\",\"@\"+u\"\\u200B\")\n\t\tutcNow = dt.now(timezone.utc)\n\t\tif time< utcNow:\n\t\t\tif time.month == utcNow.month and time.day == utcNow.day:\n\t\t\t\ttime = time + timedelta(hours=24)\n\t\t\telse:\n\t\t\t\ttime = time.replace(time.year + 1)\n\t\t\tif time < utcNow:\n\t\t\t\tawait ctx.send(\"You cannot specify a time in the past.\")\n\t\t\t\treturn\n\t\tawait ctx.send(\"will remind you at `{0} UTC` {1}\".format(time.strftime(\"%b %d, %Y at %H:%M\"), message.strip()))\n\t\tevent = {\n\t\t\t'user': ctx.message.author.id,\n\t\t\t'time': time,\n\t\t\t'message': message\n\t\t}\n\t\tself.scheduleEvent(event)\n\t\tself.saveEvents()\n\t\trxn = utils.getRandEmoji(ctx.message.guild.emojis, \"hug\")\n\t\tif rxn is None:\n\t\t\trxn = utils.getRandEmoji(ctx.bot.emojis, \"harukahug\")\n\t\tawait ctx.message.add_reaction(rxn)",
"def send_reminder(self):\n message_contents = \"This is a reminder that your event: \" + self.event_title + \" takes place on \" + self.event_date + \" in \" + self.event_location\n subject = \"Event Reminder\"\n attendees = self.gameplanuser_set.all()\n for attendee in attendees:\n remindermessage = Message.objects.create(sender=self.event_manager, recipient=attendee, contents=message_contents)\n remindermessage.save()",
"def task_send_reminder_email():\n send_reminder_email()\n logger.info(\"Sent reminder email\")",
"def user_reminder(slack_id, time_difference, device_name):\n try:\n slack.chat.post_message(\n slack_id,\n \"It's been *{}* since you checked out `{}`. Please renew your checkout online or return it \"\n \"to the device lab.\".format(time_difference, device_name),\n as_user=False,\n username=\"DeviceNanny\")\n logging.debug(\"[slack][user_reminder] Reminder sent to user sent\")\n except Exception as e:\n logging.warning(\"[slack][user_reminder] Incorrect Slack ID.\")",
"def send_reminder(self, url):\n variables = {\"url\": url, \"username\": self.contact.user.alias}\n send_template_email(recipients=[self.identifier],\n subject=\"Reminder from Rmnd.in!\",\n from_address=\"[email protected]\",\n variables=variables,\n template=\"email/reminder_email\")",
"async def remindlist(self, ctx):\r\n id = str(ctx.author.id)\r\n if id not in self.bot.data.save['reminders'] or len(self.bot.data.save['reminders'][id]) == 0:\r\n await ctx.reply(embed=self.bot.util.embed(title=\"Reminder Error\", description=\"You don't have any reminders\", color=self.color))\r\n else:\r\n embed = discord.Embed(title=\"{}'s Reminder List\".format(ctx.author.display_name), color=self.color)\r\n embed.set_thumbnail(url=ctx.author.avatar_url)\r\n for i in range(0, len(self.bot.data.save['reminders'][id])):\r\n embed.add_field(name=\"#{} ▫️ {:%Y/%m/%d %H:%M} JST\".format(i, self.bot.data.save['reminders'][id][i][0]), value=\"[{}](https://discordapp.com/channels/{})\".format(self.bot.data.save['reminders'][id][i][1], self.bot.data.save['reminders'][id][i][2]), inline=False)\r\n await ctx.reply(embed=embed)",
"def send_reminders(context):\n try:\n handler = db_connector.DataBaseConnector()\n reminders = handler.get_overdue_reminders()\n except (ValueError, ConnectionError) as err:\n logger.get_logger(__name__).warning(\n 'Unable to fetch reminders', err)\n return\n\n rems_to_close = list()\n for rem in reminders:\n try:\n resp_text, markup = _compile_rem(rem, cancel_rem=False,\n show_dl=True)\n context.message = context.bot.send_message(\n chat_id=rem['user_id'], text=resp_text,\n reply_markup=markup, parse_mode=ParseMode.HTML)\n rems_to_close.append(rem['id'])\n except Unauthorized: # User has no chat with bot\n pass\n except (ValueError, ConnectionError, KeyError):\n _LOGGER.exception('Unable to process reminder')\n try:\n handler.close_reminders(rems_to_close)\n except (ValueError, ConnectionError):\n _LOGGER.exception('Unable to close reminders')",
"def reminder(self, reminder):\n\n self._reminder = reminder",
"async def public_remind_in(\n self,\n message: discord.Message,\n author: discord.User,\n channel: discord.TextChannel,\n time: str,\n reminder_message: str,\n ):\n threshold_reached: bool = await self._user_reached_max_reminder_threshold(author.id)\n if threshold_reached:\n user_reminders = await self._get_all_reminders_by_user_id(author.id)\n return f\"You already have {len(user_reminders)} / {self.reminder_limit} reminders, which is higher than the limit.\"\n\n error_description = \"\"\"\nExample usage:\n!reminder 5d 3h 2m 1s remind me of this\n!reminder 1day 1hour 1min 1second remind me of this\n!reminder 5days 3hours 2mins 420seconds remind me of this\n \"\"\"\n error_embed: discord.Embed = discord.Embed(title=\"Usage of reminder command\", description=error_description)\n\n result = await self._parse_time_shift_from_message(f\"{time} {reminder_message}\".strip())\n if result is None:\n return error_embed\n\n future_reminder_time, reminder_message = result\n reminder: Reminder = Reminder(\n reminder_utc_timestamp=future_reminder_time.timestamp(),\n user_id=author.id,\n user_name=author.name,\n guild_id=channel.guild.id,\n channel_id=channel.id,\n message=reminder_message,\n message_id=message.id if message else None,\n )\n await self._add_reminder(reminder)\n # Tell the user that the reminder was added successfully\n output_message: str = f\"You will be reminded {future_reminder_time.humanize()} of: {reminder_message}\"\n return output_message",
"async def public_remind_at(\n self,\n message: discord.Message,\n author: discord.User,\n channel: discord.TextChannel,\n time: str,\n reminder_message: str,\n ):\n threshold_reached: bool = await self._user_reached_max_reminder_threshold(author.id)\n if threshold_reached:\n user_reminders = await self._get_all_reminders_by_user_id(message.author.id)\n return f\"You already have {len(user_reminders)} / {self.reminder_limit} reminders, which is higher than the limit.\"\n\n time_now: arrow.Arrow = arrow.utcnow()\n\n error_description = \"\"\"\nExample usage:\n!remindat 2021-04-20 04:20:00 remind me of this\n!remindat 2021-04-20 04:20 remind me of this\n!remindat 04-20 04:20:00 remind me of this\n!remindat 04-20 04:20 remind me of this\n!remindat 2021-04-20 remind me of this\n!remindat 04-20 remind me of this\n!remindat 04:20:00 remind me of this\n!remindat 04:20 remind me of this\n \"\"\"\n error_embed: discord.Embed = discord.Embed(title=\"Usage of remindat command\", description=error_description)\n\n result = await self._parse_date_and_time_from_message(f\"{time} {reminder_message}\".strip())\n if result is None:\n return error_embed\n future_reminder_time, reminder_message = result\n\n if time_now < future_reminder_time:\n reminder: Reminder = Reminder(\n reminder_utc_timestamp=future_reminder_time.timestamp(),\n user_id=author.id,\n user_name=author.name,\n guild_id=channel.guild.id,\n channel_id=channel.id,\n message=reminder_message,\n message_id=message.id if message else None,\n )\n await self._add_reminder(reminder)\n # Tell the user that the reminder was added successfully\n output_message: str = f\"You will be reminded {future_reminder_time.humanize()} of: {reminder.message}\"\n return output_message\n else:\n # TODO Fix embed for reminders in the past\n # Check if reminder is in the past, error invalid, reminder must be in the future\n return discord.Embed(\n title=\"Usage of remindat command\", description=f\"Your reminder is in the past!\\n{error_description}\"\n )",
"def start(_bot, update):\n start_message = 'Hi\\nuse /add for add retro reminder'\n update.message.reply_text(start_message)",
"async def reminddel(self, ctx, rid : int):\r\n id = str(ctx.author.id)\r\n if id not in self.bot.data.save['reminders'] or len(self.bot.data.save['reminders'][id]) == 0:\r\n await ctx.reply(embed=self.bot.util.embed(title=\"Reminder Error\", description=\"You don't have any reminders\", color=self.color))\r\n else:\r\n if rid < 0 or rid >= len(self.bot.data.save['reminders'][id]):\r\n await ctx.reply(embed=self.bot.util.embed(title=\"Reminder Error\", description=\"Invalid id `{}`\".format(rid), color=self.color))\r\n else:\r\n with self.bot.data.lock:\r\n self.bot.data.save['reminders'][id].pop(rid)\r\n if len(self.bot.data.save['reminders'][id]) == 0:\r\n self.bot.data.save['reminders'].pop(id)\r\n self.bot.data.pending = True\r\n await self.bot.util.react(ctx.message, '✅') # white check mark",
"async def tick(self):\n room = self.bot.Room.load('19961884194@chatroom')\n await room.ready()\n await room.say(f'i love you -> {datetime.now()}')",
"def send_today_reminder(self, request, pk=None):\n try:\n today_menu = Menu.objects.get(\n menu_date=timezone.localtime(timezone.now()).date()\n )\n today_menu.send_today_menu_slack_each_user()\n except Menu.DoesNotExist:\n return Response({\"detail\": \"Not found.\"}, status=status.HTTP_404_NOT_FOUND)\n return Response(\n {\"detail\": \"Reminder sent successfully.\"}, status=status.HTTP_200_OK\n )",
"def send_feedback_email_task(subject, message, sender, reciever):\n logger.info(\"Reminder email\")\n return send_reminder_mail(subject, message, sender, reciever)",
"def send_message(userid):\n\tsc.api_call(\n\t\t\"chat.postMessage\",\n\t\tchannel=userid,\n\t\ttext=\"Hey there, just wanted to remind you to join <#CQCKS8UN6|secret-snowflake-fa19> by Wednesday night, if you want to participate in Secret Santa this year. It will be lots of fun!\",\n\t\tusername=\"Reminder\",\n\t\ticon_emoji=\":santa:\"\n\t)",
"async def inviteme(self):\r\n\r\n #Your code will go here\r\n await self.bot.say(\"Here is a link to Invite Me: http://bit.ly/TacoBot\")",
"async def tell(client, data):\n conn = client.bot.dbs[data.server]\n split = data.split_message\n\n tables = db.get_table_names(conn)\n if 'tells' not in tables:\n asyncio.create_task(client.message(data.target, 'Tell table uninitialized, ask your nearest bot admin to restart the bot.'))\n\n if len(split) > 1:\n recipient = split[0]\n recipient = recipient.lower()\n message = ' '.join(split[1:])\n else:\n return\n \n telldata = (recipient, data.nickname, message, int(time.time()), '0', '0')\n db.set_row(conn, 'tells', telldata)\n db.ccache()\n\n asyncio.create_task(client.notice(data.nickname, 'Your message will be sent.'))",
"async def _bot(ctx):\n await ctx.send('Yes, the bot is cool.')",
"def send_reminder(self, reminder_id):\n try:\n appt = Reminder.query.filter_by(id=reminder_id).one()\n except NoResultFound:\n log.error(\n {\"message\": \"Received unknown appointment with id {}.\".format(\n reminder_id), \"reminder_id\": reminder_id})\n return\n msg_body = create_message_body(appt)\n message = Message(\n to=appt.contact_num,\n from_=FLOWROUTE_NUMBER,\n content=msg_body)\n try:\n sms_controller.create_message(message)\n except Exception as e:\n strerr = vars(e).get('response_body', None)\n log.critical({\"message\": \"Raised an exception sending SMS\",\n \"exc\": e, \"strerr\": strerr, \"reminder_id\": reminder_id})\n raise self.retry(exc=e)\n else:\n log.info(\n {\"message\": \"Reminder sent to {} for reminder_id {}\".format(\n appt.contact_num, reminder_id),\n \"reminder_id\": reminder_id})\n appt.reminder_sent = True\n db_session.add(appt)\n db_session.commit()",
"async def remainder_command(self, ctx, time: TimeConverter, *, reason):\n timers.Timer(\n self.client, \"remainder\", time, args=(ctx.channel.id, ctx.author.id, reason)\n ).start()\n embed = Embed(color=Color.blurple())\n embed.set_author(\n name=f\"Set a remainder for reason - {reason}\",\n icon_url=ctx.author.avatar_url,\n )\n await ctx.send(embed=embed)",
"async def dm(self, ctx, user: int, *, message):\n user = self.bot.get_user(user)\n try:\n await user.send(message)\n await ctx.send(f'{emote.check} | Success!')\n except:\n return await ctx.send(f\"{emote.xmark} | Could not send message!\")",
"def reply(self, message):\n self.logger.info(\"message came as {}\".format(message))\n message = message.lower()\n if message in [\"start over\", \"get started\", \"hello\", \"hi\", \"say hello\"]:\n self.params = \"\"\n self.readyseteatparams = \"\"\n # self.api.send_text_facebook(\n # self.user_id,\n # 'What type of recipe would you like to make? You can type \"start over\" at any time'\n # )\n # return self.api.send_facebook(self.user_id, self.config.QUESTION_MAIN)\n self.send_welcome_messages()\n return self.api.send_facebook(self.user_id, self.config.QUICK_REPLY_MAIN)\n if message in [\"more\", \"show more\"] and self.data:\n self.index += 5\n m_data = self.config.DEFAULT_TEMPLATE.copy()\n m_data[\"message\"][\"attachment\"][\"payload\"][\"elements\"] = self.data[self.index:self.index + 3]\n return self.api.send_facebook(self.user_id, m_data)\n if message == \"ask-tomorrow-payload\":\n self.usersModule.makeNotificationDaily(self.user_id)\n return self.api.send_text_facebook(self.user_id, \"This notification has been set up.\")\n if message == \"ask-week-payload\":\n self.usersModule.makeNotificationWeekly(self.user_id)\n return self.api.send_text_facebook(self.user_id, \"This notification has been set up.\")\n if message == \"activate notifications\":\n self.usersModule.makeNotificationDaily(self.user_id)\n return self.api.send_text_facebook(self.user_id, \"Notification has been activated.\")\n if message in [\"do-nothing\", \"payload_unsubscribe\"]:\n if message == \"payload_unsubscribe\":\n self.usersModule.deactivateNotification(self.user_id)\n return self.api.send_text_facebook(\n self.user_id,\n 'Notification has been deactivated. You can type \"start over\" anytime.')\n else:\n return self.api.send_text_facebook(\n self.user_id,\n 'You can type \"start over\" when you are looking for new recipes.')\n\n try:\n title, choice = message.split(\"_\")\n except:\n title = None\n choice = message\n\n if title == \"category\":\n self.params = \"\"\n self._type = choice\n if choice == \"dinner\":\n self.params += \"&category=89\"\n self.readyseteatparams += \"&category=89\"\n # self.api.send_text_facebook(self.user_id, \"Select a main ingredient:\")\n # return self.api.send_facebook(self.user_id, self.config.DINNER_INGREDIENTS)\n return self.api.send_facebook(self.user_id, self.config.DINNER_GUICK_REPLY)\n elif choice == \"dessert\":\n self.params += \"&category=88\"\n self.readyseteatparams += \"&category=88\"\n # self.api.send_text_facebook(self.user_id, \"What kind of dessert would you like to make?\")\n # return self.api.send_facebook(self.user_id, self.config.DESSERTS)\n return self.api.send_facebook(self.user_id, self.config.DESSERTS_QUICK_REPLY)\n elif choice == \"breakfast\":\n self.params += \"&category=87\"\n self.readyseteatparams += \"&category=87\"\n # self.api.send_text_facebook(self.user_id, \"What kind of breakfast do you want?\")\n # return self.api.send_facebook(self.user_id, self.config.BREAKFAST_QUESTION)\n return self.api.send_facebook(self.user_id, self.config.BREAKFAST_QUICK_REPLY)\n elif choice == \"appetizer\":\n self.params += \"&category=85\"\n self.readyseteatparams += \"&category=85\"\n # self.api.send_text_facebook(self.user_id, \"What kind of appetizer or snack sounds good?\")\n # return self.api.send_facebook(self.user_id, self.config.APPETIZER_QUESTION)\n return self.api.send_facebook(self.user_id, self.config.APPETIZER_QUICK_REPLY)\n elif choice == \"side dish\":\n self.params += \"&category=95\"\n self.readyseteatparams += \"&category=95\"\n # self.api.send_text_facebook(self.user_id, \"Select a main ingredient\")\n # return self.api.send_facebook(self.user_id, self.config.SIDE_DISH_QUESTION)\n return self.api.send_facebook(self.user_id, self.config.SIDE_DISH_QUICK_REPLY)\n else:\n return self.api.send_text_facebook(self.user_id,\n \"I don't know answer that belongs to {} yet\".format(message))\n\n if title == \"main-ingredient\":\n self.mainIngredient = choice\n if choice == \"chicken\":\n self.params += \"&mainingredient=76\"\n self.readyseteatparams += \"&mainingredient=76\"\n elif choice == \"beef\":\n self.params += \"&mainingredient=70\"\n self.readyseteatparams += \"&mainingredient=70\"\n elif choice == \"pork\":\n self.params += \"&mainingredient=249\"\n self.readyseteatparams += \"&mainingredient=249\"\n elif choice == \"seafood\":\n self.params += \"&mainingredient=73\"\n self.readyseteatparams += \"&mainingredient=73\"\n elif choice == \"pasta\":\n self.params += \"&mainingredient=272\"\n self.readyseteatparams += \"&mainingredient=272\"\n elif choice == \"vegetarian\":\n self.params += \"&lifestyle=299\"\n self.readyseteatparams += \"&lifestyle=299\"\n return self.api.send_facebook(self.user_id, self.config.TIME_QUICK_REPLY)\n if title == \"bre-time\":\n self.breakfastTime = choice\n if choice == \"15\":\n self.params += \"&totaltime=15\"\n self.readyseteatparams += \"&totaltime=15\"\n elif choice == \"30\":\n self.params += \"&totaltime=30\"\n self.readyseteatparams += \"&totaltime=30\"\n elif choice == \"45\":\n pass\n recipes = self.api.getRecipes(self.params)\n if not recipes:\n return self.send_no_results()\n viewMoreUrl = self.api.prepareViewMoreUrl(self.readyseteatparams)\n elems = self.api.prepareRecipes(recipes, viewMoreUrl)\n self.data = elems\n m_data = self.config.DEFAULT_TEMPLATE.copy()\n m_data[\"message\"][\"attachment\"][\"payload\"][\"elements\"] = self.data[self.index:self.index + 3]\n r = self.api.send_facebook(self.user_id, m_data)\n self.logger.warning(r)\n return\n if title == \"time\":\n self.time = choice\n self.params += \"&totaltime={}\".format(choice)\n self.readyseteatparams += \"&totaltime={}\".format(choice)\n # self.api.send_text_facebook(self.user_id, \"What sounds Good?\")\n # return self.api.send_facebook(self.user_id, self.config.REGION_DINNER_QUESTION)\n return self.api.send_facebook(self.user_id, self.config.REGION_QUICK_REPLY)\n\n if title == \"region\":\n self.region = choice\n if choice == \"asian\":\n self.params += \"&cuisine=44\"\n self.readyseteatparams += \"&cuisine=44\"\n elif choice == \"italian\":\n self.params += \"&cuisine=46\"\n self.readyseteatparams += \"&cuisine=46\"\n elif choice == \"mediterranean\":\n self.params += \"&cuisine=367\"\n self.readyseteatparams += \"&cuisine=367\"\n elif choice == \"mexican\":\n self.params += \"&cuisine=45\"\n self.readyseteatparams += \"&cuisine=45\"\n elif choice == \"american\":\n self.params += \"&suppresstraits=44,35,355,46,367,45,356,261\"\n\n recipes = self.api.getRecipes(self.params)\n if not recipes:\n return self.send_no_results()\n viewMoreUrl = self.api.prepareViewMoreUrl(self.readyseteatparams)\n elems = self.api.prepareRecipes(recipes, viewMoreUrl)\n self.data = elems\n m_data = self.config.DEFAULT_TEMPLATE.copy()\n m_data[\"message\"][\"attachment\"][\"payload\"][\"elements\"] = self.data[self.index:self.index + 3]\n r = self.api.send_facebook(self.user_id, m_data)\n self.logger.warning(r)\n return\n\n if title == \"dessert\":\n self.dessert = choice\n if choice == \"cookies\":\n self.params += \"&trait=48,10,20,110&suppresstraits=22,24&keywords=cookies\"\n self.readyseteatparams += \"&trait=48,10,20,110&keywords=cookies\"\n elif choice == \"cakes\":\n self.params += \"&suppresstraits=24&keywords=cake\"\n self.readyseteatparams += \"&keywords=cake\"\n elif choice == \"pies\":\n self.params = \"sortby=season,rating&order=desc,desc&negativeingredientkeyword=pieces&keywords=pie&suppresstraits=24&category=88\"\n self.readyseteatparams = \"&negativeingredientkeyword=pieces&keywords=pie&category=88\"\n elif choice == \"healthier\":\n self.params += \"&goodforyou=257&goodforyou=258&goodforyou=260\"\n self.readyseteatparams += \"&goodforyou=257&goodforyou=258&goodforyou=260\"\n elif choice == \"seasonal\":\n self.params = \"sortby=season,newest,rating,publisheddate&order=desc,desc,desc,desc&category=88&season=330\"\n self.readyseteatparams = \"&category=88&season=330\"\n elif choice == \"quick\":\n self.params = \"&totaltime=30\"\n self.readyseteatparams = \"&totaltime=30\"\n\n recipes = self.api.getRecipes(self.params)\n if not recipes:\n return self.send_no_results()\n viewMoreUrl = self.api.prepareViewMoreUrl(self.readyseteatparams)\n elems = self.api.prepareRecipes(recipes, viewMoreUrl)\n self.data = elems\n m_data = self.config.DEFAULT_TEMPLATE.copy()\n m_data[\"message\"][\"attachment\"][\"payload\"][\"elements\"] = self.data[self.index:self.index + 3]\n r = self.api.send_facebook(self.user_id, m_data)\n self.logger.warning(r)\n return\n\n if title == \"breakfast\":\n self.breakfastIngredient = choice\n if choice == \"eggs\":\n self.params += \"&mainingredient=72\"\n self.readyseteatparams += \"&mainingredient=72\"\n self.params += \"&trait=9\"\n self.readyseteatparams += \"&trait=9\"\n elif choice == \"casserole\":\n self.params += \"&keywords=casserole\"\n self.readyseteatparams += \"&keywords=casserole\"\n elif choice == \"healthier\":\n self.params += \"&goodforyou=260&goodforyou=258\"\n self.readyseteatparams += \"&goodforyou=260&goodforyou=258\"\n elif choice == \"sweet\":\n self.params += \"&trait=22\"\n self.readyseteatparams += \"&trait=22\"\n # will add something sweet\n pass\n return self.api.send_facebook(self.user_id, self.config.BREAKFAST_TIME_QUICK_REPLY)\n\n if title == \"appetizer\":\n self.appetizerIng = choice\n if choice == \"cheesy\" or choice == \"meaty\":\n if choice == \"cheesy\":\n self.params += \"&keywords=cheese\"\n self.readyseteatparams += \"&keywords=cheese\"\n elif choice == \"meaty\":\n self.params += \"&mainingredient=70&mainingredient=76&mainingredient=249\"\n self.readyseteatparams += \"&mainingredient=70&mainingredient=76&mainingredient=249\"\n recipes = self.api.getRecipes(self.params)\n if not recipes:\n return self.send_no_results()\n viewMoreUrl = self.api.prepareViewMoreUrl(self.readyseteatparams)\n elems = self.api.prepareRecipes(recipes, viewMoreUrl)\n self.data = elems\n m_data = self.config.DEFAULT_TEMPLATE.copy()\n m_data[\"message\"][\"attachment\"][\"payload\"][\"elements\"] = self.data[self.index:self.index + 3]\n r = self.api.send_facebook(self.user_id, m_data)\n self.logger.warning(r)\n return\n elif choice == \"veggies\" or choice == \"healthier\":\n if choice == \"veggies\":\n self.params += \"&mainingredient=77&mainingredient=310\"\n self.readyseteatparams += \"&mainingredient=77&mainingredient=310\"\n elif choice == \"healthier\":\n self.params += \"&goodforyou=260\"\n self.readyseteatparams += \"&goodforyou=260\"\n return self.api.send_facebook(self.user_id, self.config.HOT_OR_COLD_QUICK_REPLY)\n\n if title == \"hot-cold\":\n self.appetizerType = choice\n if choice == \"hot\":\n self.params += \"&suppresstraits=252\"\n elif choice == \"cold\":\n self.params += \"&cookingmethod=252\"\n self.readyseteatparams += \"&cookingmethod=252\"\n\n recipes = self.api.getRecipes(self.params)\n if not recipes:\n return self.send_no_results()\n viewMoreUrl = self.api.prepareViewMoreUrl(self.readyseteatparams)\n elems = self.api.prepareRecipes(recipes, viewMoreUrl)\n self.data = elems\n m_data = self.config.DEFAULT_TEMPLATE.copy()\n m_data[\"message\"][\"attachment\"][\"payload\"][\"elements\"] = self.data[self.index:self.index + 3]\n r = self.api.send_facebook(self.user_id, m_data)\n self.logger.warning(r)\n return\n\n if title == \"side-dish\":\n self.sideDish = choice\n if choice == \"potato\":\n self.params += \"&mainingredient=298\"\n self.readyseteatparams += \"&mainingredient=298\"\n elif choice == \"vegetable\":\n self.params += \"&mainingredient=77\"\n self.readyseteatparams += \"&mainingredient=77\"\n elif choice == \"rice\":\n self.params += \"&mainingredient=272\"\n self.readyseteatparams += \"&mainingredient=272\"\n elif choice == \"pasta\":\n self.params += \"&mainingredient=75\"\n self.readyseteatparams += \"&mainingredient=75\"\n elif choice == \"salad\":\n self.params = \"sortby=season,newest,rating,publisheddate&order=desc,desc,desc,desc&category=95&mainingredient=77\"\n self.readyseteatparams = \"&category=95&mainingredient=77&trait=92\"\n elif choice == \"beans\":\n self.params += \"&mainingredient=310\"\n self.readyseteatparams += \"&mainingredient=310\"\n\n recipes = self.api.getRecipes(self.params)\n if not recipes:\n return self.send_no_results()\n viewMoreUrl = self.api.prepareViewMoreUrl(self.readyseteatparams)\n elems = self.api.prepareRecipes(recipes, viewMoreUrl)\n self.data = elems\n m_data = self.config.DEFAULT_TEMPLATE.copy()\n m_data[\"message\"][\"attachment\"][\"payload\"][\"elements\"] = self.data[self.index:self.index + 3]\n r = self.api.send_facebook(self.user_id, m_data)\n self.logger.warning(r)\n return\n isParamInMessage = self.fetch_parameters(message)\n if isParamInMessage:\n recipes = self.api.getRecipes(self.params)\n if not recipes:\n return self.send_no_results()\n viewMoreUrl = self.api.prepareViewMoreUrl(self.readyseteatparams)\n elems = self.api.prepareRecipes(recipes, viewMoreUrl)\n self.data = elems\n m_data = self.config.DEFAULT_TEMPLATE.copy()\n m_data[\"message\"][\"attachment\"][\"payload\"][\"elements\"] = self.data[self.index:self.index + 3]\n r = self.api.send_facebook(self.user_id, m_data)\n self.logger.warning(r)\n return\n return self.api.send_text_facebook(self.user_id, \"You can write ‘start over’ to go to the first step\")",
"def yourmom(update, context):\n chat_id = update.message.chat_id\n bot = context.bot\n\n options = [\n \"Dat zei je mama gisteren ook.\",\n emoji.emojize(\"Dat zei je moeder gisteren ook. :woman_raising_hand:\"),\n \"Ik zou nu een je moeder grap kunnen maken maar ik houd me in.\",\n emoji.emojize(\"Je mama is lief hoor. :woman_raising_hand:\")]\n\n msg = random.choice(options)\n\n time.sleep(HUMAN_DELAY*len(msg))\n bot.send_message(chat_id=chat_id, text=msg,\n reply_to_message_id=update.message.message_id)",
"def send_warning(self):\n\n # Check whether all the necessary parameters for SMS are present\n if self.your_phone != '' and self.twilio_phone != '' and self.account_sid != '' and self.auth_token != '':\n client = Client(self.account_sid, self.auth_token)\n\n try:\n sms = client.messages.create(\n body=\"\"\"Last will: It was at least 30 days since your last check in. \n This is a reminder to check in in the next 24 hours.\"\"\",\n from_=self.twilio_phone,\n to=self.your_phone)\n sms\n print(\"\\nSMS sent\")\n except Exception as e:\n print(f\"An error occurred while trying to send the SMS. Error: {e}\")\n\n else:\n print(\"\\nMissing SMS parameters. SMS not sent\")\n\n # Check whether all the necessary parameters for email are present\n if self.sender_name != '' and self.recipient_email != '' and self.email != '' and self.email_pwd != '':\n message = f\"\"\"It has been at least 30 days since you last checked in. \nYou need to check in in the next 24 hours.\\n\nOtherwise at {self.deadline} the email with the important info will be sent to the designated recipient.\\n\nIn order to reset simply go to the working directory and run python3 last_will.py\"\"\"\n\n # send_email will return 0 if everything went ok, otherwise it will return an error message\n status = send_email(self.sender_name, self.your_email,\n self.email, self.email_pwd,\n subject='Last will: Reminder to check in', unencrypted_message=message)\n\n if status != 0:\n print(status)\n exit(1)\n else:\n print(\"Email sent\\n\")\n\n print(f\"You have until {self.deadline} to check in. \"\n f\"In order to do that simply go to the working directory and run ./last_will.sh\\n\")\n else:\n print(\"Missing email parameters. Email not sent.\\n\")\n exit(1)",
"async def repeat(ctx, *, arg):\r\n if await bMsg(ctx,ctx.message.author.name,client):\r\n return\r\n logger.info('repeat: ' + arg, extra={'invoker': ctx.message.author.name})\r\n await ctx.send(arg)",
"def send_reply(self, reminder_id, confirm=True):\n try:\n appt = Reminder.query.filter_by(id=reminder_id).one()\n except NoResultFound:\n log.error(\n {\"message\": \"Received unknown appointment with id {}.\".format(\n reminder_id), \"reminder_id\": reminder_id})\n return\n if confirm is None:\n msg_content = UNPARSABLE_RESPONSE\n elif confirm is True:\n msg_content = CONFIRMATION_RESPONSE\n elif confirm is False:\n msg_content = CANCEL_RESPONSE\n message = Message(\n to=appt.contact_num,\n from_=FLOWROUTE_NUMBER,\n content=msg_content)\n try:\n sms_controller.create_message(message)\n except Exception as e:\n strerr = vars(e).get('response_body', None)\n log.critical({\"message\": \"Raised an exception sending SMS\",\n \"exc\": e, \"strerr\": strerr, \"reminder_id\": reminder_id})\n raise self.retry(exc=e)\n else:\n appt.confirm_sent = True\n db_session.add(appt)\n db_session.commit()\n log.info(\n {\"message\": \"Confirmation sent to {} for reminder_id {}\".format(\n appt.contact_num, reminder_id),\n \"reminder_id\": reminder_id})",
"async def discord(self, ctx):\n embed = discord.Embed(title='Join the discord today!', color=0x5643fd, description=\"This server is where \"\n \"all of \"\n \"NOVA's updates and \"\n \"important \"\n \"announcements will pass \"\n \"through. The creator of \"\n \"this \"\n \"bot, YeetVegetabales#5313, \"\n \"will also be there testing \"\n \"and letting the communtiy \"\n \"in \"\n \"on things first hand!\")\n embed.set_thumbnail(url='https://images-ext-2.discordapp.net/external/AQCEqCF4Yl_PWAfuA-GReZoDify6'\n '--y4hXOJVkqaDHo/%3Fsize%3D1024/https/cdn.discordapp.com/avatars/709922850953494598'\n '/f78ed19924e8c95abc30f406d47670d7.png')\n embed.add_field(name='Server Invite', value='<:news:730866149109137520> '\n '[Join here](https://discord.gg/Uqh9NXY)')\n await ctx.send(embed=embed)",
"async def _bot():\n await bot.say('Yes, the bot is cool.')",
"def respond(sender, message):\n response = get_bot_response(message)\n send_message(sender, response)",
"async def patreon(self, ctx):\n await ctx.send(\"https://www.patreon.com/joinemm\")",
"def whats_for_lunch(client: SlackClient, data: dict):\n restaurants = [\"Textas Longhorn\", \"Sushi!\", \"I think pizza!\"]\n random_restaurant = random.choice(restaurants)\n\n client.rtm_send_message(data.get(\"channel\"), random_restaurant)",
"def send_reminder(to, body, RETRY, FROM, ASID, TOKEN):\n client = TwilioRestClient(ASID, TOKEN)\n counter = 1\n success = False\n\n #Try resending an SMS if it fails, but retry no more than 5 times.\n error_code, error_message = None, None\n while counter <= RETRY and success is False:\n counter += 1\n try:\n message = client.messages.create(to=to, from_=FROM,\n body=body)\n if message.error_code == None:\n success = True\n error_code = message.error_code\n error_message = message.error_message\n except twilio.rest.exceptions.TwilioRestException as e:\n error_code = e.code\n error_message = e.msg\n except Exception as e: #for all other exception\n error_code = -1\n error_message = e\n\n print error_code, error_message\n return (error_code is None, error_message or \"\")",
"async def message(self, ctx:utils.Context, user:discord.User, *, content:str):\n\n await user.send(content)",
"def schedule_reminder(self, id: int, reminder: str, date: datetime) -> None:\n\n self.tasks.add_job(reminder_callback, 'date', id=str(\n id+random.randint(5, 100)), next_run_time=date, args=[id, reminder], misfire_grace_time=3600)",
"def send_email_reminder(admSessionID, login, subject=\"PMA.core password reminder\"):\n reminderParams = {\"username\": login, \"subject\": subject, \"messageTemplate\": \"\"}\n url = _pma_admin_url(admSessionID) + \"EmailPassword\"\n reminderResponse = _pma_http_post(url, reminderParams)\n return reminderResponse",
"def replyMessage(_email, _name):\n\n _mailer = app.config['MAIL_USERNAME']\n mesg = Message(\"Message Received\", sender=('iSOLveIT Contact', f'{_mailer}'), recipients=[_email])\n mesg.body = f'''Hello {_name},\nThe message you sent to Randy has been received. \nRandy will contact you within 24 hours.\nThank you.\n\nRegards,\nRandy\n\nDate Sent: {dt.now(tz=GMT_tz).strftime('%B %d, %Y, %H:%M ') + 'GMT'}\n'''\n mail.send(mesg)\n return 'OK'",
"def notify_users_of_reminders():\n\n #Get current date into dd/mm/YYYY format.\n now = datetime.datetime.now()\n todays_date = now.strftime(\"%d/%m/%Y\")\n\n #Get current time and convert it to hh:mm.\n todays_time = now.strftime(\"%H:%M\")\n print(todays_time)\n\n #Select all notifications from the database based on that date and time.\n notifications_query = \"\"\"SELECT user, reminder_msg FROM reminders WHERE (date=%s AND time=%s);\"\"\"\n\n #Setup our parameters\n notifications_params = (todays_date, todays_time)\n\n #TODO: Add in cursor.\n #TODO: Run query and get reminder data.\n #TODO: Loop over returned rows, and notify users with send_message_to_irc()",
"async def repeat(self, ctx, *, text):\n await ctx.send(text)",
"def send_email():\n send_mail(\"You've got some problem.\", 'REPAIR IT', '[email protected]',\n ['[email protected]'], fail_silently=False,)",
"async def public_del_remind(self, author: discord.User, message: str):\n try:\n reminder_id_to_delete = int(message) - 1\n except ValueError:\n # Error: message is not valid\n # TODO Replace \"!\" with bot variable\n error_title = f\"Invalid usage of !delreminder\"\n embed_description = f\"If you have 3 reminders, a valid command is is:\\n!delreminder 2\"\n embed = discord.Embed(title=error_title, description=embed_description)\n return embed\n\n user_reminders = await self._get_all_reminders_by_user_id(author.id)\n if 0 <= reminder_id_to_delete <= len(user_reminders) - 1:\n reminder_to_delete: Reminder = user_reminders[reminder_id_to_delete]\n # Find the reminder in the reminder list, then remove it\n logger.info(f\"Trying to remove reminder {reminder_to_delete}\")\n logger.info(f\"Reminders available: {self.reminders}\")\n self.reminders.remove((reminder_to_delete.reminder_utc_timestamp, reminder_to_delete))\n heapify(self.reminders)\n await self.save_reminders()\n # Say that the reminder was successfully removed?\n embed = discord.Embed(\n title=f\"Removed {author.name}'s reminder\", description=f\"{reminder_to_delete.message}\"\n )\n return embed\n else:\n # Invalid reminder id, too high number\n if len(user_reminders) == 0:\n return f\"Invalid reminder id, you have no reminders.\"\n if len(user_reminders) == 1:\n return f\"Invalid reminder id, you only have one reminders. Only '!delreminder 1' works for you.\"\n return f\"Invalid reminder id, you only have {len(user_reminders)} reminders. Pick a number between 1 and {len(user_reminders)}.\"",
"async def cancelReminder(self, ctx, *, reminder: int = -1):\n\t\tevents = filter(lambda x: x['user'] == ctx.author.id, self.events)\n\t\tevents = sorted(events, key=lambda event: event[\"time\"])\n\t\tif reminder <= 0:\n\t\t\tif len(events) > 0:\n\t\t\t\tmessage = \"```fortran\\nPick an event to cancel. based on the ID below\\n\"\n\t\t\t\tfor x in range(len(events)):\n\t\t\t\t\tmessage += f\"{x+1}.\\t{events[x]['time'].strftime('%b %d, %Y at %H:%M')}\\t{events[x]['message']}\\n\"\n\t\t\t\tmessage += \"```\"\n\t\t\telse:\n\t\t\t\tmessage = \"You have no reminders scheduled!\"\n\t\t\tawait ctx.send(message)\n\t\telse:\n\t\t\treminder = reminder - 1\n\t\t\tself.scheduler.remove_job(events[reminder][\"id\"])\n\t\t\tself.events.remove(events[reminder])\n\t\t\tself.saveEvents()\n\t\t\tawait utils.yay(ctx)",
"async def ironman(self, ctx):\n if has_post_permission(ctx.guild.id, ctx.channel.id):\n out = (':tools: __**IRONMAN**__ :tools:\\n' \\\n 'If you want to become an ironman, please react to this post with a :thumbsup:. '\n 'This will **RESET** your account and give you the ironman role. '\n 'You will be unable to trade with other players or gamble. '\n 'In return, you will be able to proudly display your status as an ironman, '\n 'by the way.')\n msg = await ctx.send(out)\n\n if await self.confirm(ctx, msg, out):\n ctx.user_object.reset_account()\n ctx.user_object.is_ironman = True\n ctx.user_object.save()\n # ironman_role = discord.utils.get(ctx.guild.roles, name=\"Ironman\")\n # await ctx.author.add_roles(ironman_role, reason='Wanted to become an ironmeme.')\n name = get_display_name(ctx.author)\n await msg.edit(content=f':tools: __**IRONMAN**__ :tools:\\n'\n f'Congratulations, {name}, you are now '\n 'an ironman!')",
"async def hello(ctx):\r\n await bot.say(\"Hello, {}.\".format(ctx.message.author.name)) # These two are equivalent within a bot.command\r\n await bot.send_message(ctx.message.channel, \"I heard you.\") # These two are equivalent within a bot.command\r\n await asyncio.sleep(1) # This can be used to pause for X time, in this case 1 second\r\n msg = await bot.send_message(ctx.message.author, \"You can also do this.\")\r\n await asyncio.sleep(1)\r\n await bot.delete_message(msg) # bot.say and bot.send_message create message objects which you can do things with\r\n msg = await bot.send_message(ctx.message.author, \"Too slow, it's gone.\")\r\n await asyncio.sleep(5)\r\n await bot.edit_message(msg, msg.content + \" But here's a consolation prize.\")\r\n await bot.add_reaction(ctx.message, '👍') # you can do the same stuff to the ctx.message object\r",
"def notify(self):\n\n def remind():\n \"\"\"\n this function shows a pop-up using windows notification\n \"\"\"\n ntftion.notify('reminder', f\"{self.notification}:\\n{self.work_name}\\n{self.work_datetime.hour}: \"\n f\"{self.work_datetime.minute} \", app_icon='reminder.ico', timeout=3)\n\n self.eisenhower_priority()\n if self.priority:\n while dt.now().day <= self.time_ntf.day and self.status != \"done\":\n if self.priority == 1 and dt.now().time() >= self.time_ntf.time():\n remind()\n time.sleep(5*60)\n\n elif (self.priority == 2) and ((dt.now().hour == self.time_ntf.hour)\n and (dt.now().time().minute == self.time_ntf.time().minute)):\n remind()\n break\n elif self.priority == 3 and dt.now().time().hour == 18:\n remind()\n time.sleep(24 * 3600)\n elif self.priority == 4 and dt.now().weekday() == 6:\n remind()\n time.sleep(7 * 24 * 3600)\n else:\n pass",
"async def welcome_command(ctx):\n await ctx.send(f\"Hello! I am a bot made by {ctx.bot.owner}\")",
"async def repeat(self, ctx, times : int, content : str):\n if times < 6:\n for i in range(times):\n await ctx.send(content)\n else:\n await ctx.send(\"Please don't get me banned by Discord! (Max 5)\")",
"def send_verification_reminder_sms(user):\n # # check for his email preference.\n import django\n\n django.setup()\n from .models import User\n\n user = User.objects.get(id=user.id)\n if not user.is_phone_verified:\n send_sms(\n user.phone,\n REMINDER_VERIFY_PHONE_SMS.format(\n **{\"otp_url\": django_settings.DOMAIN + \"/\" + django_settings.OTP_URL}\n ),\n )\n return None",
"async def on_message(self, message: discord.Message):\n \n if message.author.bot:\n return\n \n else:\n if message.channel == message.author.dm_channel:\n time_difference = (datetime.utcnow() - self.last_timeStamp).total_seconds()\n\n if time_difference < 5:\n return await message.channel.send(\"You are on cooldown!\")\n \n self.channel_id = 857690925810319381\n self.modmail_channel = self.bot.get_channel(self.channel_id)\n embed = discord.Embed(\n title = f\"Modmail From `{message.author}`\", \n description = f\"{message.content}\", \n color = 0x2c2f33\n )\n if message.attachments:\n embed.set_image(url=message.attachments[0].url)\n embed.set_footer(text=f'ID: {message.author.id}')\n\n await self.modmail_channel.send(embed=embed)\n await message.channel.send('Your message has been sent!', delete_after = 7)\n self.last_timeStamp = datetime.utcnow()",
"def reminder(request):\n return jingo.render(request, 'landings/reminder.html')",
"async def patrons(self, ctx):\n patrons = [381491116853166080, 121757433507872768, 132921952544227329]\n content = discord.Embed()\n content.title = \"Patreon supporters ❤\"\n content.description = \"\\n\".join([self.client.get_user(x).mention for x in patrons])\n await ctx.send(embed=content)",
"async def __send_alarm(self, context: ContextTypes.DEFAULT_TYPE) -> None:\n if self.door_status.update_status():\n await context.bot.send_message(\n MESKOID,\n text=f\"🐙{self.door_status.last_line}\",\n )\n await context.bot.send_message(\n QKZKID,\n text=f\"🐙{self.door_status.last_line}\",\n )\n elif self.__verbose:\n await context.bot.send_message(\n context.job.chat_id,\n text=f\"🚀unedited - {self.door_status.last_edit}.\",\n )",
"def tell(self, irc, msg, args, target, text):\n if target.lower() == 'me':\n target = msg.nick\n if ircutils.isChannel(target):\n irc.error('Dude, just give the command. No need for the tell.')\n return\n if not ircutils.isNick(target):\n irc.errorInvalid('nick', target)\n if ircutils.nickEqual(target, irc.nick):\n irc.error('You just told me, why should I tell myself?',Raise=True)\n if target not in irc.state.nicksToHostmasks and \\\n not ircdb.checkCapability(msg.prefix, 'owner'):\n # We'll let owners do this.\n s = 'I haven\\'t seen %s, I\\'ll let you do the telling.' % target\n irc.error(s, Raise=True)\n if irc.action:\n irc.action = False\n text = '* %s %s' % (irc.nick, text)\n s = '%s wants me to tell you: %s' % (msg.nick, text)\n irc.reply(s, to=target, private=True)",
"async def ad_reminder(self):\r\n for guild, settings in self.bot.settings.items():\r\n if settings[\"ad_reminder_channel_id\"]:\r\n guild = self.bot.get_guild(int(guild))\r\n embed = discord.Embed(\r\n title=\"Advertising Reminder\",\r\n color=guild.me.color\r\n )\r\n # Disboard - every 2 hours\r\n if settings[\"ad_reminder_disboard\"] and datetime.now().hour % 2 == 0:\r\n embed.add_field(\r\n name='Disboard',\r\n value=f'`every 2 hours`\\nBump at [WEBSITE](https://disboard.org/server/{guild.id}) \\nor with <@302050872383242240>:\\n`!d bump`'\r\n )\r\n # Disforge - every 3 hours\r\n if settings[\"ad_reminder_disforge\"] and datetime.now().hour % 3 == 0:\r\n embed.add_field(\r\n name='Disforge',\r\n value=f'`every 3 hours`\\nBump at [WEBSITE](https://disforge.com/dashboard)'\r\n )\r\n # Discord.me\r\n if settings[\"ad_reminder_discordme\"] and datetime.now().hour % 6 == 0:\r\n embed.add_field(\r\n name='Discord.me',\r\n value=f'`every 6 hours`\\nBump at [WEBSITE](https://discord.me/dashboard)'\r\n )\r\n # discordservers\r\n if settings[\"ad_reminder_discordservers\"] and datetime.now().hour % 12 == 0:\r\n embed.add_field(\r\n name=\"discordservers\",\r\n value=f'`every 12 hours`\\nBump at [WEBSITE](https://discordservers.com/panel/{guild.id}/bump)'\r\n )\r\n # top.gg\r\n if settings[\"ad_reminder_topgg\"] and datetime.now().hour % 12 == 0:\r\n embed.add_field(\r\n name=\"top.gg\",\r\n value=f'`every 12 hours`\\nBump at [WEBSITE](https://top.gg/servers/{guild.id}/vote)'\r\n )\r\n \r\n\r\n if embed.fields:\r\n role = guild.get_role(int(settings[\"ad_reminder_role_id\"]))\r\n await guild.get_channel(\r\n int(settings[\"ad_reminder_channel_id\"])\r\n ).send(content=role.mention if role else None, embed=embed)",
"def alarm(bot, job):\n message = MESSAGES[job.context]\n if len(message) <= 0:\n message = \"Alert set for right now\"\n bot.sendMessage(job.context, text=message)",
"def to_do_fehrist_tasks_reminder():\n\n from todofehrist.models import Task, User\n from todofehrist.utility import send_email\n\n result = Task.objects.filter(\n completion_status=0, completion_datetime__date=date.today()).values(\"user\").annotate(\n count=Count(\"user\"))\n\n for user_tasks_entry in result:\n email_address = User.objects.get(pk=user_tasks_entry[\"user\"]).email\n send_email(\"ToDoFehrist - Pending Tasks Reminder\",\n f\"You have {user_tasks_entry['count']} pending tasks due today.\",\n email_address)\n\n logging.debug(f\"Reminder Email sent to user with email address {email_address}\")",
"def bye(update) -> None:\n update.effective_message.reply_text(\n \"Thank you, see you soon! 👋\",\n reply_markup=ReplyKeyboardRemove()\n )",
"def send_message(self, text):\n self.__telegram_info.message.reply_text(text)",
"def send_reminders(self, send_reminders):\n\n self._send_reminders = send_reminders",
"async def echo_once(bot):\n update = await bot.wait(text_message)\n await bot.api.send_message(\n params={\n \"chat_id\": update[\"message\"][\"chat\"][\"id\"],\n \"text\": update[\"message\"][\"text\"],\n }\n )",
"def reply(self, text):\n yield self.bot.send(text, to=self.channel)",
"async def server():\n await bot.say(\"https://discord.gg/Eau7uhf\")",
"async def invite(self, ctx):\n invite = f\"https://discordapp.com/api/oauth2/authorize?client_id={self.bot.user.id}&permissions=67584&scope=bot\"\n await ctx.send(embed=discord.Embed(\n color=discord.colour.Colour.teal(),\n description=f\":mailbox_with_mail: [Invite]({invite}) me to your server!\"))",
"def remind_users(self, request, pk=None):\n retreat = self.get_object()\n if not retreat.is_active:\n response_data = {\n 'detail': \"Retreat need to be activate to send emails.\"\n }\n return Response(response_data, status=status.HTTP_200_OK)\n\n # This is a hard-coded limitation to allow anonymous users to call\n # the function.\n time_limit = retreat.start_time - timedelta(days=8)\n if timezone.now() < time_limit:\n response_data = {\n 'detail': \"Retreat takes place in more than 8 days.\"\n }\n return Response(response_data, status=status.HTTP_200_OK)\n\n # Notify a user for every reserved seat\n emails = []\n for reservation in retreat.reservations.filter(\n is_active=True, pre_event_send=False):\n send_retreat_reminder_email(reservation.user, retreat)\n reservation.pre_event_send = True\n reservation.save()\n emails.append(reservation.user.email)\n\n response_data = {\n 'stop': True,\n 'emails': emails\n }\n return Response(response_data, status=status.HTTP_200_OK)",
"async def invite(self, ctx):\n perms = discord.Permissions.text()\n perms.update(read_messages=True, manage_messages=True,\n mention_everyone=False, send_tts_messages=False)\n await ctx.send(f'Invite me here:\\n<{discord.utils.oauth_url(self.bot.user.id, perms)}>')",
"async def admin(ctx):\n info = await(bot.application_info())\n mention = info.owner.mention\n message = \"My administrator is the glorious {}. Fear them, for they are mighty.\".format(mention)\n await(ctx.send(message))",
"async def send_discord(msg, cnl):\n await bot.wait_until_ready()\n await bot.send_message(bot.get_channel(cnl), msg)",
"def echo(update, context):\n if \"Hey friends,\" in update.message.text:\n update.message.reply_text(\"What a great idea. How about today?\")",
"def send_email_notification(request, question, answer):\n subject = 'New answer for your question'\n to_email = [question.user.email]\n html_message = render_to_string('email/answer.html', {\n 'answer': answer,\n 'question': question,\n 'link': request.build_absolute_uri(reverse('question_detail', kwargs={'pk': question.pk})),\n })\n plain_message = strip_tags(html_message)\n send_mail(subject, plain_message, settings.EMAIL_FROM, to_email, html_message=html_message)",
"async def say(self, context,message):\n\t\tawait context.send(message)",
"async def credits(self, ctx):\n embed = discord.Embed(title='{}:'.format(ctx.message.author.name),\n description='\\n:ok_hand: :laughing:\\n :telephone_receiver::shirt::call_me:\\n :jeans: :fire:',\n colour=0xf20006)\n last_message = await self.bot.say(embed=embed)\n await self.bot.add_reaction(last_message, self.emojiUnicode['succes'])",
"def alarm_message(bot, user_id):\n\n def ring_alarm():\n\n message = bot_collection[user_id].get_alarm_message()\n alarm_count = bot_collection[user_id].alarm_count\n\n for i in range(alarm_count):\n bot_message = bot.send_message(text=message, chat_id=user_id)\n time.sleep(1)\n message_id = bot_message.message_id\n bot.delete_message(chat_id=user_id, message_id=message_id)\n\n return ring_alarm",
"def start(bot, update):\n update.message.reply_text('Hi!')",
"def send_verification_reminder_email(user):\n # # check for his email preference.\n import django\n\n django.setup()\n from .models import User\n\n user = User.objects.get(id=user.id)\n if not user.is_email_verified:\n context = get_email_context(user)\n context[\"first_name\"] = user.first_name\n context[\"url\"] = django_settings.ACTIVATION_URL.format(**context)\n VerifyEmailReminderNotification(user.email, context=context).send()\n return None",
"async def note(self, ctx):\n note_embed = discord.Embed(color=discord.Color.blurple())\n note_embed.add_field(name=\"__**Please Note**__\", value=RULES_NOTE)\n await ctx.send(embed=note_embed)",
"def send_reset_email(user):\n msg = emails.reset_email(user)\n try:\n mail.send(msg)\n except Exception as e:\n traceback.print_exc()",
"async def reset(self, ctx):\n now = pendulum.now(KR_TIME)\n quest_reset = pendulum.tomorrow(KR_TIME).add(hours=4) if now.hour > 3 else pendulum.today(KR_TIME).add(hours=4)\n\n reset_countdown = quest_reset.diff(now)\n em = Embed(\n description=f\":alarm_clock: The next reset will happen in {reset_countdown.as_interval()}. :alarm_clock:\")\n await helpers.message_handler(em, ctx, 20, embed=True)",
"def notify(self, thing, redditor, link, body, author):\n if self.quiet or util.is_ignored(redditor):\n return\n\n quote = util.quote(body)\n msg = self.NOTIFICATION_BODY % (thing, link, author, quote)\n\n while msg.__len__() > 10000: # Check message size\n quote_len = quote.__len__() # Get the quote length\n quote = quote[:quote_len - 2] # Chop off a character\n msg = self.NOTIFICATION_BODY % (permalink, author, quote) # Reassign the message\n\n username = redditor.name\n print('Sending message to ' + username + '...', end=\"\")\n self.reddit.send_message(username, 'You have been mentioned in a comment.', msg)\n print('[DONE]')",
"async def send_welcome(message: types.Message):\n await message.reply(\"Hi!\\nI'm Autoencoder Bot!\\nPowered by aiogram.\\n\\n\"\n \"/random - for get generated image from nose\\n\"\n \"send photo - show decoded variant from autoencoder\\n\"\n \"TODO: - make smile on photo, generate glasses and heads\")",
"def reminder(self):\n return self._reminder",
"async def testsay(self, ctx, *, message):\n await ctx.send(message)",
"async def say(self, ctx):\n\n await ctx.message.delete()\n if len(ctx.message.content.split()) < 2:\n return await ctx.send('You must inform all parameters!')\n\n msg = ctx.message.content.split('!say', 1)\n await ctx.send(msg[1])",
"async def wherearemypants():\n await bot.say('justin is a known pants thief. Not saying he took them but he totally probably took them')",
"async def poweroff(ctx):\n await ctx.send(\"Bye\")\n await bot.logout()",
"async def invite(self, context: Context) -> None:\n embed = discord.Embed(\n description=f\"Invite me by clicking [here](https://discordapp.com/oauth2/authorize?&client_id={self.bot.config['application_id']}&scope=bot+applications.commands&permissions={self.bot.config['permissions']}).\",\n color=0xD75BF4,\n )\n try:\n # To know what permissions to give to your bot, please see here: https://discordapi.com/permissions.html and remember to not give Administrator permissions.\n await context.author.send(embed=embed)\n await context.send(\"I sent you a private message!\")\n except discord.Forbidden:\n await context.send(embed=embed)",
"async def custom_interaction(bot, context, response, result):\n if result is None: # Timed out\n edit = 'You took too long to respond...'\n elif result.content:\n edit = 'You replied with \"{}\"'.format(result.content[:100])\n else:\n edit = 'You did not reply with any content text!'\n await response.message.edit(content=edit)",
"def _send_notification() -> None:\n send_notification(\n self,\n \"slack:@aaron\",\n \"New {0} Version: {1}\".format(\n self.properties[CONF_APP_NAME], new_version\n ),\n title=\"New Software 💿\",\n )",
"async def reply(self, ctx, *, m):\n await ctx.message.delete()\n await ctx.send(m)",
"def play_command(update,context):\n update.message.reply_text('Rkrt: Welcome on board. Let\\'s see if you are worth the challenge. To find the invite code and land on planet hackazon you will need to solve this first. Ready for a ride?!')\n time.sleep(5)\n update.message.reply_text('Mx: During intergalactical travel, time does not matter. Any enemies could be listening in at any time. This is why the crew is sometimes forced to used coded languages to exchange messages between vessels. To decrypt messages every crew member can use the key on their hardware tokens.')\n time.sleep(10)\n update.message.reply_text('Jms: Mx we are getting a distress signal from vessel Vigenere. Do you copy?')\n time.sleep(3)\n update.message.reply_text('Mx: [gasps...]')\n time.sleep(1)\n update.message.reply_text('Mx: This one is for you rookie... See you on the other side.')\n update.message.reply_text('Kyjkda kghc tir Yeevobyj: BgXfsGofrCyrDouwfh\\r\\nUsfcfqg zb dywzv lcfy ij cqff hsnal jjoa:\\r\\nCKJ{en55td2my6jse8361a427p3xf319tf12}')",
"def alarm(self, context):\n job = context.job\n context.bot.send_message(job.context, text=\"Nuevo valor seteado!\")",
"def task_rescheduled_notify(name, attempts, last_error, date_time, task_name, task_params):\n body = loader.render_to_string(\n 'notification/email/notify_rescheduled_task.html', {\n 'name': name,\n 'attempts': attempts,\n 'last_error': last_error,\n 'date_time': date_time.strftime(\"%Y-%m-%d %H:%M:%S\"),\n 'task_name': task_name,\n 'task_params': task_params,\n 'signature': settings.EMAIL_SIGNATURE\n })\n subject = name + \" has been rescheduled\"\n mail_admins(subject, body, settings.DEFAULT_FROM_EMAIL)",
"def get(self):\n app_id = app_identity.get_application_id()\n users = User.query(User.email != None)\n for user in users:\n games = Game.query(Game.user == user.key, Game.game_over == False)\n if games:\n subject = \"This is a reminder!\"\n body = \"Hello {}, you have some unfinished games.\".format(\n user.name)\n\n mail.send_mail('noreply@{}.appspot.com'.format(app_id),\n user.email, subject, body)",
"def notify(guid, message):"
] | [
"0.8080209",
"0.7884799",
"0.77642393",
"0.7438207",
"0.7399876",
"0.72116685",
"0.71892047",
"0.71756524",
"0.716907",
"0.71174127",
"0.69518006",
"0.68498677",
"0.6783447",
"0.67111504",
"0.66775066",
"0.6611192",
"0.6484514",
"0.64267516",
"0.6417511",
"0.63394415",
"0.6295182",
"0.62897694",
"0.62354165",
"0.6202982",
"0.620242",
"0.6190181",
"0.6110508",
"0.6059775",
"0.6019531",
"0.60006976",
"0.6000378",
"0.59965",
"0.5995645",
"0.59279406",
"0.5921028",
"0.5915108",
"0.58957887",
"0.58790636",
"0.58738625",
"0.58679855",
"0.58618754",
"0.5860771",
"0.5825029",
"0.5805841",
"0.57889944",
"0.5782462",
"0.5769507",
"0.5763542",
"0.5755209",
"0.5748159",
"0.57321787",
"0.572002",
"0.5716298",
"0.57155335",
"0.5711362",
"0.5709584",
"0.56996167",
"0.56870884",
"0.5659338",
"0.56563705",
"0.5653819",
"0.565364",
"0.56532854",
"0.5649694",
"0.5648641",
"0.56468624",
"0.56413215",
"0.56323004",
"0.56235033",
"0.56185806",
"0.56148803",
"0.561356",
"0.56004786",
"0.55999297",
"0.5592072",
"0.55874825",
"0.55866313",
"0.5579419",
"0.55764836",
"0.55730975",
"0.5571223",
"0.5562349",
"0.5560333",
"0.55590576",
"0.55544937",
"0.5550286",
"0.5531312",
"0.5530943",
"0.5529103",
"0.5528374",
"0.5524583",
"0.55203944",
"0.55160445",
"0.55148685",
"0.5508704",
"0.55065763",
"0.5504548",
"0.5492393",
"0.54918575",
"0.54873395",
"0.5486515"
] | 0.0 | -1 |
Linac phasing Note that these overlays override individual klystron phases. | def bmad_linac_phasing_lines(epics):
lines = [
'! Linac overall phasing',
'O_L1[phase_deg] = 0 ! K21_1 sets this directly. This is a delta on top of that.',
'O_L2[phase_deg] = '+str(epics.caget('SIOC:SYS0:ML00:CALC204')),
'O_L3[phase_deg] = '+str(epics.caget('SIOC:SYS0:ML00:AO499'))
]
return lines | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_display_from_lines(self):\n y = 1\n maxlin = CA_World.ca_display_size - 1\n limy = len(self.ca_lines) + maxlin\n for i in self.ca_lines:\n x = 1\n if limy >= maxlin:\n if SimEngine.gui_get('init') == \"Right\": # Right\n limx = len(i) + maxlin + 2\n for j in range(len(i) - 2):\n if limx >= maxlin:\n b = bool(i[j])\n self.pixel_tuple_to_patch(\n ((maxlin - len(i) + 2 + x) * 4, (maxlin - len(self.ca_lines) + y) * 4)).set_on_off(b)\n x += 1\n else:\n limx -= 1\n elif SimEngine.gui_get('init') == \"Left\": # Left\n limx = 0\n for j in range(len(i) - 2):\n if limx <= maxlin + 2:\n b = bool(i[j])\n self.pixel_tuple_to_patch(((x - 3) * 4, (maxlin - len(self.ca_lines) + y) * 4)).set_on_off(\n b)\n x += 1\n limx += 1\n else: # Center and Random\n limx = int((len(i) - maxlin) / 2)\n k = 0\n for j in range(len(i)):\n if limx < 0:\n b = bool(i[j])\n self.pixel_tuple_to_patch(((maxlin - len(i) + x - 1 + limx) * 4,\n (maxlin - len(self.ca_lines) + y) * 4)).set_on_off(b)\n else:\n if k < maxlin + 1:\n b = bool(i[j + limx])\n self.pixel_tuple_to_patch((k * 4,\n (maxlin - len(self.ca_lines) + y) * 4)).set_on_off(b)\n x += 1\n k += 1\n y += 1\n else:\n limy -= 1",
"def add_lvs_correspondence_points(self):\n\n pin = self.rbl_inv_inst.get_pin(\"A\")\n self.add_label_pin(text=\"bl[0]\",\n layer=pin.layer,\n offset=pin.ll(),\n height=pin.height(),\n width=pin.width())\n\n pin = self.dc_inst.get_pin(\"out\")\n self.add_label_pin(text=\"delayed_en\",\n layer=pin.layer,\n offset=pin.ll(),\n height=pin.height(),\n width=pin.width())",
"def drawWarpLines(self):\n # draw warp lines\n for item in self.game.warpLines:\n anwp.sl.engine.drawLine(item[0]+self.bufferX, item[1]+self.bufferY, item[2]+self.bufferX, item[3]+self.bufferY, pyui.colors.blue)",
"def road_lines():\n cv2.polylines(frame_1, [pts_1], True, yellow_color)\n cv2.polylines(frame_2, [pts_2], True, yellow_color)",
"def _on_lane_invasion(self, event):\n self.lanes_invaded = event.crossed_lane_markings",
"def draw_lines(img, lines, color=[0, 0, 255], thickness=10):\n \n yFinal = 540 # tweak these values as per the frame size\n yIni = 350\n xPlus = []\n yPlus = []\n xMinus = []\n yMinus= []\n slope_range = 0.2\n\n if lines is not None:\n for line in lines:\n if line is not None:\n for x1,y1,x2,y2 in line:\n # check slope \n slope = (y2-y1)/(x2-x1)\n\t\t \n \t\t # Collect all points with + ve slope (right lane)\n if (slope > slope_range):\n xPlus.append(x1)\n xPlus.append(x2)\n yPlus.append(y1)\n yPlus.append(y2)\n\n # Collect all points with - ve slope (left lane)\n elif ((slope) < (-slope_range)):\n xMinus.append(x1)\n xMinus.append(x2)\n yMinus.append(y1)\n yMinus.append(y2)\n # If out of range, lists defined in beginning of this function will be empty \n else:\n continue\n \n # draw right lane\n x1,y1,x2,y2 = fit_line(xPlus, yPlus, yIni, yFinal)\n cv2.line(img,(x1,y1),(x2,y2),color, thickness) \n\n # draw left lane\n x1,y1,x2,y2 = fit_line(xMinus, yMinus, yIni, yFinal)\n cv2.line(img,(x1,y1),(x2,y2),color,thickness)",
"def set_lanes(left_lines, right_lines, image):\n \n Y_LANE_EXTRAP = 35 # percent up from bottom of image to extrapolate lane lines\n \n image_wk = np.copy(image) # working copy\n image_lines = np.copy(image_wk)*0 # create a blank to draw lines on\n im_y = image_wk.shape[0]\n \n y1_lane = im_y\n y2_lane = np.int32(im_y - (Y_LANE_EXTRAP/100*im_y))\n \n # Process left lane\n if left_lines:\n z_left = my_linear_polyfit(left_lines)\n x1_lane = np.int32( (y1_lane - z_left[1]) / z_left[0] ) # x = (y-b)/m\n x2_lane = np.int32( (y2_lane - z_left[1]) / z_left[0] )\n \n # Draw left lane on blank image\n cv2.line(image_lines, (x1_lane, y1_lane), (x2_lane, y2_lane), (100,100,100), 15)\n \n # Process right lane\n if right_lines:\n z_right = my_linear_polyfit(right_lines)\n x1_lane = np.int32( (y1_lane - z_right[1]) / z_right[0] ) # x = (y-b)/m\n x2_lane = np.int32( (y2_lane - z_right[1]) / z_right[0] )\n \n # Draw right lane on blank image\n cv2.line(image_lines, (x1_lane, y1_lane), (x2_lane, y2_lane), (100,100,100), 15)\n \n # Overlay detected left/right lanes on road image\n image_wk = weighted_img(image_lines, image_wk)\n \n # Output road image with overlaid left/right lanes\n return image_wk",
"def palm_land(self):\n self.palm_landing = True\n self.drone.palm_land()",
"def lane(self, mask, win_color = None):\n\n # the nonzero point\n solid = np.nonzero(mask)\n sx, sy = solid[1], solid[0]\n\n # make a image to draw on\n out_img = np.dstack([np.zeros_like(mask)]*3)*255\n if self.fit is None:\n # get the intial poly line for window sliding\n\n # get the midpoint for both line, expecting it shows up in the lower half\n self.h, self.w = mask.shape\n self.midpoint = self.w//2\n self.win_height = self.h//self.nb_win\n\n curv_head = self.h//self.frac\n histogram = np.sum(mask[:curv_head, :], axis = 0)\n mid_l = np.argmax(histogram[:self.midpoint])\n mid_r = np.argmax(histogram[self.midpoint:]) + self.midpoint\n\n # the indice for solid pixel in left and right\n l_lane_idc = []\n r_lane_idc = []\n\n # slide the windows down up\n btm = self.h\n for n in range(self.nb_win):\n # right window\n ul_l = (mid_l - self.half, btm - self.win_height)\n lr_l = (mid_l + self.half, btm)\n\n # left window\n ul_r = (mid_r - self.half, btm - self.win_height)\n lr_r = (mid_r + self.half, btm)\n\n\n # draw the retangle on the image\n if win_color:\n cv2.rectangle(out_img, lr_l, ul_l, win_color, 2)\n cv2.rectangle(out_img, lr_r, ul_r, win_color, 2)\n\n\n # the indice within window\n within_l = ((sx>=ul_l[0]) & \\\n (sx<=lr_l[0]) & \\\n (sy>=ul_l[1]) & \\\n (sy<=lr_l[1])).nonzero()[0]\n\n within_r = ((sx>=ul_r[0]) & \\\n (sx<=lr_r[0]) & \\\n (sy>=ul_r[1]) & \\\n (sy<=lr_r[1])).nonzero()[0]\n\n # append to the lane\n l_lane_idc.append(within_l)\n r_lane_idc.append(within_r)\n\n if len(within_r) > self.minpix:\n mid_r = np.int(np.mean(sx[within_r]))\n if len(within_l) > self.minpix:\n mid_l = np.int(np.mean(sx[within_l]))\n btm -= self.win_height\n\n # concatenate the windows\n l_lane_idc = np.concatenate(l_lane_idc)\n r_lane_idc = np.concatenate(r_lane_idc)\n try:\n self.fit = [np.polyfit(sy[l_lane_idc], sx[l_lane_idc], 2),\n np.polyfit(sy[r_lane_idc], sx[r_lane_idc], 2)]\n except:\n return out_img\n\n\n else:\n # if we've fitted the lane, use that as guide\n l_fit, r_fit = self.fit\n l_lane_idc = ((sx >= np.polyval(l_fit, sy) - self.half) &\n (sx <= np.polyval(l_fit, sy) + self.half)).nonzero()[0]\n r_lane_idc = ((sx >= np.polyval(r_fit, sy) - self.half) &\n (sx <= np.polyval(r_fit, sy) + self.half)).nonzero()[0]\n\n\n curv_head = self.h//self.frac\n l_curv_count = np.sum((sy >= curv_head) & (sx <= self.midpoint))\n r_curv_count = np.sum((sy >= curv_head) & (sx >= self.midpoint))\n\n if l_curv_count >= self.curv_count:\n try: self.fit[0] = np.polyfit(sy[l_lane_idc], sx[l_lane_idc], 2)\n except: pass\n if r_curv_count >= self.curv_count:\n try: self.fit[1] = np.polyfit(sy[r_lane_idc], sx[r_lane_idc], 2)\n except: pass\n\n # draw the lane area\n l_fit, r_fit = self.fit\n y_cord = np.linspace(0, self.h - 1, self.h)\n lane_l = np.polyval(l_fit, y_cord)\n lane_r = np.polyval(r_fit, y_cord)\n\n\n if not win_color:\n pts_l = np.array([np.vstack([lane_l, y_cord]).T])\n pts_r = np.array([np.flipud(np.vstack([lane_r, y_cord]).T)])\n\n pts = np.hstack((pts_l, pts_r))\n cv2.fillPoly(out_img, np.int_(pts), [0, 100, 0])\n\n # draw red on left\n out_img[sy[l_lane_idc], sx[l_lane_idc]] = RED\n # draw blue on right\n out_img[sy[r_lane_idc], sx[r_lane_idc]] = BLUE\n\n\n # put text showing meters away center and radius\n l_btm = np.polyval(l_fit, self.h)\n r_btm = np.polyval(r_fit, self.h)\n mpp = self.lane_width/(r_btm - l_btm) # meters per pixel\n\n mid_lane = int((r_btm + l_btm)/2)\n dev = (self.midpoint - mid_lane)\n radius = np.mean(self.curvature(mpp))\n\n side = ''\n side = 'L' if dev < 0 else 'R'\n dev_text = (\"%.2fm %s\"%(np.abs(mpp*dev), side))\n radius_text = (\"RADIUS %.2fm\"%(radius)) if radius < 2000 else 'STRAIGHT'\n\n (dev_w, dev_h), _ = cv2.getTextSize(dev_text,\n fontFace = cv2.FONT_HERSHEY_SIMPLEX,\n fontScale = 1, thickness = 2)\n\n (radius_w, radius_h), _ = cv2.getTextSize(radius_text,\n fontFace = cv2.FONT_HERSHEY_SIMPLEX,\n fontScale = 1, thickness = 3)\n\n\n dev_org = (int(mid_lane + 2*dev - dev_w//2), self.h - 30)\n radius_org = (int(mid_lane - radius_w//2), self.h - 80)\n\n\n\n cv2.line(out_img, (mid_lane, self.h - 20),\n (mid_lane, self.h - 40 - dev_h),\n color = [255,255,255], thickness = 3)\n\n cv2.putText(out_img, radius_text,\n fontFace = cv2.FONT_HERSHEY_SIMPLEX,\n fontScale = 1, thickness = 3,\n org = radius_org, color = [0, 0, 0])\n\n cv2.putText(out_img, dev_text,\n fontFace = cv2.FONT_HERSHEY_SIMPLEX,\n fontScale = 1, thickness = 2,\n org = dev_org, color = [0, 0, 0])\n\n return out_img",
"def palm_land(self):\n log.debug(\"PALM_LAND\")\n self.drone.palm_land()",
"def process_laneOffset(self):\n center_line = np.poly1d(np.mean([self.line_l.get_LinePoly().coeffs, self.line_r.get_LinePoly().coeffs], axis=0))\n # store the center line polynomial\n self.center_poly = center_line\n center_point = IMAGE_WIDTH/2 - center_line(709)\n offset_from_center =center_point* self.line_l.x_pxm\n self.lane_offset = offset_from_center\n return center_point",
"def line_layer(self):\n screen_origin = self.ids.mapview.get_window_xy_from(lat1, lon1, self.ids.mapview.zoom)\n screen_destination = self.ids.mapview.get_window_xy_from(lat2, lon2, self.ids.mapview.zoom)\n point_list = [screen_origin[0], screen_origin[1], screen_destination[0], screen_destination[1]]\n\n with self.ids.line.canvas:\n self.ids.line.canvas.clear()\n\n Color(0, 0, 0, .6)\n Line(points=point_list, width=3, joint=\"bevel\")",
"def ActiveHlt2Lines(self) :\n\n lines = [\n 'Hlt2SingleMuon',\n 'Hlt2SingleMuonHighPT',\n 'Hlt2SingleMuonLowPT',\n ]\n \n return lines",
"def draw_lh_lines(data):\n #hnd = extract_left_hand(data);\n hnd = np.array(data['crop']);\n hand.draw_hand_lines(hnd,data['lhkpss'][data['i']]);\n return hnd;",
"def add_layout_pins(self):\n en_offset = self.dc_inst.get_pin(\"in\").ll()\n self.add_layout_pin(text=\"en\",\n layer=\"metal1\",\n offset=en_offset.scale(1,0),\n width=self.m1_width,\n height=en_offset.y)\n\n out_offset = self.rbl_inv_inst.get_pin(\"Z\").ll()\n self.add_layout_pin(text=\"out\",\n layer=\"metal1\",\n offset=out_offset.scale(1,0),\n width=self.m1_width,\n height=out_offset.y)",
"def lla(self, input_poly):\n # check the input\n if type(input_poly) is not Polygon:\n #if not isinstance(input_poly, Polygon):\n # if we weren't given a polygon, turn the coordinates into one\n if (type(input_poly) is np.ndarray) or (type(input_poly) is list):\n input_poly = Polygon(input_poly)\n else:\n return\n # set the internal value for lla shape\n self._lla_shape = input_poly\n # get the vertex coordinates for the lla shape\n lla_coords_temp = np.array(self._lla_shape.exterior.xy).T\n # add a column of zeros for altitude (shapely is 2D..)\n lla_coords = np.zeros((lla_coords_temp.shape[0], 3))\n lla_coords[:,:-1] = lla_coords_temp\n # convert lla vertices to ned\n ned_coords = lla2ned(lla_coords, self._ref_pt)\n # make the ned shape out of these coordinates\n ned_exterior = Polygon(ned_coords)\n\n # make a unified shape for the keep out zones\n keep_out_list = []\n for shape in input_poly.interiors:\n # convert keepout coords to ned\n shape = Polygon(shape)\n lla_coords_temp = np.array(shape.exterior.xy).T\n # add a column of zeros for altitude (shapely is 2D..)\n lla_coords = np.zeros((lla_coords_temp.shape[0], 3))\n lla_coords[:,:-1] = lla_coords_temp\n # convert lla vertices to ned\n ned_coords = lla2ned(lla_coords, self._ref_pt)\n # add this region to the list\n keep_out_list.append(Polygon(ned_coords) )\n keep_out = cascaded_union(keep_out_list)\n\n # now make a valid mission area polygon\n self._ned_shape = ned_exterior.difference(keep_out)",
"def mark_lane_lines(undist, warped, ploty, left_fitx, right_fitx, Minv):\n # Create an image to draw the lines on\n warp_zero = np.zeros_like(warped).astype(np.uint8)\n color_warp = np.dstack((warp_zero, warp_zero, warp_zero))\n\n # Recast the x and y points into usable format for cv2.fillPoly()\n pts_left = np.array([np.transpose(np.vstack([left_fitx, ploty]))])\n pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fitx, ploty])))])\n pts = np.hstack((pts_left, pts_right))\n\n # Draw the lane onto the warped blank image\n cv2.fillPoly(color_warp, np.int_([pts]), (0,255, 0))\n\n # Warp the blank back to original image space using inverse perspective matrix (Minv)\n newwarp = cv2.warpPerspective(color_warp, Minv, (warped.shape[1], warped.shape[0])) \n # Combine the result with the original image\n result = cv2.addWeighted(undist, 1, newwarp, 0.3, 0)\n return result",
"def overlay_lines(self, p1, p2, FT, frame):\n \n if p1 == p2:\n self.show_dif_class_msg()\n \n else:\n a1 = complete_scores[p1, p2][0]\n a2 = complete_scores[p1, p2][1]\n projection1 = make_1D(extract_2D[p1], a1)\n projection2 = make_1D(extract_2D[p2], a2)\n\n if FT: \n pad_p1 = np.pad(projection1.vector, pad_width=(0, shape-projection1.size()))\n pad_p2 = np.pad(projection2.vector, pad_width=(0, shape-projection2.size()))\n A = abs(np.fft.rfft(pad_p1))\n B = abs(np.fft.rfft(pad_p2))\n \n f = Figure(figsize=(8,4))\n ax = f.add_subplot(111)\n\n ax.bar(range(len(A)), A, alpha=0.35, color='deepskyblue', ec='k', linewidth=1)\n ax.bar(range(len(B)), B, alpha=0.35, color='yellow', ec='k', linewidth=1)\n \n ax.get_xaxis().set_ticks([])\n ax.set_xlabel('frequency component')\n ax.set_ylabel('Amplitude')\n\n else:\n a2_flip = complete_scores[p1, p2][1] + 180\n projection2_flip = make_1D(extract_2D[p2], a2_flip)\n\n score_default, r, c = slide_score(projection1, projection2) # Score and location of optimum\n score_flip, r_flip, c_flip = slide_score(projection1, projection2_flip) # Score of phase flipped\n\n if score_default <= score_flip:\n ref_intensity, comp_intensity = r, c\n else:\n ref_intensity, comp_intensity = r_flip, c_flip\n\n f = Figure(figsize=(8,4))\n ax = f.add_subplot(111)\n\n x_axis_max = len(ref_intensity)\n y_axis_max = max(np.amax(ref_intensity), np.amax(comp_intensity))\n y_axis_min = min(np.amin(ref_intensity), np.amin(comp_intensity))\n\n ax.plot(ref_intensity, color='black')\n ax.plot(comp_intensity, color='black')\n\n ax.fill_between(range(len(ref_intensity)), ref_intensity, alpha=0.35, color='deepskyblue')\n ax.fill_between(range(len(comp_intensity)), comp_intensity, alpha=0.35, color='yellow')\n\n ax.set_ylabel('Intensity')\n ax.set_ylim([y_axis_min, (y_axis_max + 0.025*y_axis_max)])\n ax.xaxis.set_visible(False)\n\n f.tight_layout()\n\n if self.projcanvas:\n self.projcanvas.get_tk_widget().destroy()\n self.projtoolbar.destroy()\n\n self.projcanvas = FigureCanvasTkAgg(f, frame)\n self.projcanvas.draw()\n self.projcanvas.get_tk_widget().pack(side=tk.TOP, fill=tk.BOTH, expand=True)\n self.projcanvas._tkcanvas.pack(side=tk.TOP, fill=tk.BOTH, expand=True)\n\n self.projtoolbar = NavigationToolbar2Tk(self.projcanvas, frame)\n self.projtoolbar.update()",
"def vertival_lines_iglu():\n # top first line\n line(screen, BLACK, (180, 400), (170, 440))\n line(screen, BLACK, (220, 400), (230, 440))\n # second line\n line(screen, BLACK, (150, 438), (140, 480))\n line(screen, BLACK, (200, 438), (200, 480))\n line(screen, BLACK, (250, 438), (260, 480))\n # third line\n line(screen, BLACK, (115, 477), (95, 525))\n line(screen, BLACK, (170, 480), (165, 528))\n line(screen, BLACK, (235, 480), (240, 528))\n line(screen, BLACK, (285, 480), (300, 528))\n # forth line\n line(screen, BLACK, (70, 525), (60, 570))\n line(screen, BLACK, (125, 530), (115, 580))\n line(screen, BLACK, (200, 530), (200, 580))\n line(screen, BLACK, (270, 530), (275, 580))\n line(screen, BLACK, (330, 525), (340, 570))",
"def draw_horizontal_paddle(self):\n pygame.draw.rect(self.screen, self.color, self.top_rect)\n pygame.draw.rect(self.screen, self.color, self.bot_rect)",
"def draw_final_image(self, image, warped, undist, ploty, left_fitx, right_fitx, Minv, left_rad, right_rad):\n gray = cv2.cvtColor(warped, cv2.COLOR_BGR2GRAY)\n # Create an image to draw the lines on\n warp_zero = np.zeros_like(gray).astype(np.uint8)\n color_warp = np.dstack((warp_zero, warp_zero, warp_zero))\n\n # Recast the x and y points into usable format for cv2.fillPoly()\n pts_left = np.array([np.transpose(np.vstack([left_fitx, ploty]))])\n pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fitx, ploty])))])\n pts = np.hstack((pts_left, pts_right))\n\n # Draw the lane onto the warped blank image\n cv2.fillPoly(color_warp, np.int_([pts]), (0, 255, 0))\n\n # Warp the blank back to original image space using inverse perspective matrix (Minv)\n newwarp = cv2.warpPerspective(color_warp, Minv, (image.shape[1], image.shape[0]))\n # Combine the result with the original image\n result = cv2.addWeighted(undist, 1, newwarp, 0.3, 0)\n\n font = cv2.FONT_HERSHEY_SIMPLEX\n off_center = calculate_center(left_fitx, right_fitx, image.shape)\n direction_str = 'left' if off_center < 0 else 'right'\n center_str = '{:.2f} m of center {}'.format(abs(off_center), direction_str)\n cv2.putText(result, center_str, (430, 630), font, 1, (0, 0, 255), 2, cv2.LINE_AA)\n if left_rad and right_rad:\n curvature = 0.5 * (round(right_rad / 1000, 1) + round(left_rad / 1000, 1))\n else:\n curvature = 0\n str2 = 'Radius of curvature: {} km'.format(curvature)\n cv2.putText(result, str2, (430, 670), font, 1, (0, 0, 255), 2, cv2.LINE_AA)\n\n if self.args.is_test:\n plt.imshow(result)\n plt.show()\n\n return result",
"def ActiveHlt1Lines(self) :\n lines = ['Hlt1IncPhi','Hlt1CalibTracking']\n\n return lines",
"def ActiveHlt2Lines(self) :\n hlt2 = ['Hlt2PassThrough','Hlt2Lumi','Hlt2DebugEvent','Hlt2Forward','Hlt2ErrorEvent','Hlt2Transparent',\n 'Hlt2diPhotonDiMuon',\n 'Hlt2LowMultMuon',\n 'Hlt2LowMultHadron',\n 'Hlt2LowMultPhoton',\n 'Hlt2LowMultElectron',\n 'Hlt2LowMultHadron_nofilter',\n 'Hlt2LowMultElectron_nofilter',\n 'Hlt2HighPtJets'\n ]\n\n\n from Muons_April2012 import Muons_April2012\n hlt2.extend( Muons_April2012().ActiveHlt2Lines() )\n\n from Electrons_July2011 import Electrons_July2011\n hlt2.extend( Electrons_July2011().ActiveHlt2Lines() )\n\n from Hadrons_September2012 import Hadrons_September2012\n hlt2.extend( Hadrons_September2012().ActiveHlt2Lines() )\n \n from DV_draft2012 import DV_draft2012 \n hlt2.extend( DV_draft2012().ActiveHlt2Lines() )\n\n from CharmLeptonic_draft2012 import CharmLeptonic_draft2012\n hlt2.extend( CharmLeptonic_draft2012().ActiveHlt2Lines() )\n\n from CharmCEP_September2012 import CharmCEP_September2012\n hlt2.extend( CharmCEP_September2012().ActiveHlt2Lines() )\n\n from KshortMuMuPiPi_July2012 import KshortMuMuPiPi_July2012\n hlt2.extend( KshortMuMuPiPi_July2012().ActiveHlt2Lines() )\n \n return hlt2",
"def lane_fill_poly(self, binary_warped,undist, inverse_perspective_transform, left_fit,right_fit):\r\n # Generate x and y values\r\n ploty = np.linspace(0, binary_warped.shape[0]-1, binary_warped.shape[0] )\r\n left_fitx = self.get_val(ploty, left_fit)\r\n right_fitx = self.get_val(ploty, right_fit)\r\n \r\n # Create an image to draw the lines on\r\n warp_zero = np.zeros_like(binary_warped).astype(np.uint8)\r\n color_warp = np.dstack((warp_zero, warp_zero, warp_zero))\r\n\r\n # Recast x and y for cv2.fillPoly()\r\n pts_left = np.array([np.transpose(np.vstack([left_fitx, ploty]))])\r\n pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fitx, ploty])))])\r\n pts = np.hstack((pts_left, pts_right))\r\n\r\n # Draw the lane \r\n cv2.fillPoly(color_warp, np.int_([pts]), (255,255, 255))\r\n\r\n # Warp using inverse perspective transform\r\n newwarp = cv2.warpPerspective(color_warp, inverse_perspective_transform, (binary_warped.shape[1], binary_warped.shape[0])) \r\n # overlay\r\n #newwarp = cv.cvtColor(newwarp, cv.COLOR_BGR2RGB)\r\n result = cv2.addWeighted(undist, 1, newwarp, 0.3, 0)\r\n \r\n return result",
"def __add_to_piano_roll(self, line_element: LineElement) -> None:\n self._piano_roll[\n line_element.scale_element.position_in_semitones,\n line_element.start_time_in_eighths:line_element.end_time_in_eighths\n ] = 1",
"def on_draw_overlay(self):",
"def _inverse_lines(self):\n pass",
"def PINTARLEYENDAPLANOXY(self):\n \n # Pinto la linea del eje X\n self.telaMAPA.create_line(40, 560, 680, 560)\n\n # Pinto los numeros del eje x\n for i in range(0, 26):\n # Lugar de referencia a pintar que se mueve en x\n x0 = ((i+1)*24) + 30\n self.telaMAPA.create_text(x0, 580, text=str(i))\n\n \n # Pinto la linea del eje y\n self.telaMAPA.create_line(40, 20, 40, 560)\n\n # Pinto los numeros del eje y\n for i in range(0, 26):\n # Lugar de referencia a pintar\n y0 = ((i+1)*21) + 6\n self.telaMAPA.create_text(20, y0, text=str(25 - i))\n\n \n # Vamos a pintar la botonera\n self.PINTARMATRIXDEBOTONES()\n\n # Vamos a rellenar la matrix que controla el pintado de las paredes\n self.rellenarMatrix()",
"def draw_flow(img, pts, next_pts, flowColor = (0,0,255), flowThickness = 1, p=1, q=1, th = 0, drawArrows=False,\n lenghtOfArrayArm = 2, angleOfArrow=np.pi/3):\n if pts.shape[0] == 0 or next_pts.shape[0] == 0 or pts.shape[0] != next_pts.shape[0]:\n return img\n lines = np.hstack((pts, next_pts))\n #make it into format opencv wants\n lines = lines.reshape(-1,2,2)\n #round up to nears integer\n lines = np.int32(lines + 0.5)\n\n #select p every q\n index = np.arange(lines.shape[0])\n index = index[(index%q) < p]\n lines = lines[index]\n\n #filter small values\n if th > 0:\n #make points into a easy way to manipulate\n points = lines.reshape(-1, 4)\n #compute displacement\n displacement = points[:,2:4] - points[:,0:2]\n S = np.linalg.norm(displacement, axis=1)\n lines = lines[S > th]\n \n if len(img.shape) < 3:\n #make sure we're dealing with a BGR image\n img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)\n #draw multiple lines\n cv2.polylines(img, lines, isClosed = False, color = flowColor, thickness=flowThickness)\n\n if drawArrows:\n #compute flow direction\n flow = lines[:, 1, :] - lines[:,0,:]\n flow_angle = np.arctan2(flow[:,1], flow[:,0]).reshape(-1,1)\n\n #get start point of every arrow\n startPoints_x = lines[:, 1, 0].reshape(-1,1)\n startPoints_y = lines[:, 1, 1].reshape(-1,1)\n\n #get end point of arrow arm 1\n endPoints_x = (startPoints_x + lenghtOfArrayArm * np.cos( angleOfArrow + np.pi + flow_angle)).reshape(-1,1)\n endPoints_y = (startPoints_y + lenghtOfArrayArm * np.sin( angleOfArrow + np.pi + flow_angle)).reshape(-1,1)\n\n #get end point of arrow arm 2\n endPoints2_x = (startPoints_x + lenghtOfArrayArm * np.cos( -1.0*angleOfArrow + np.pi + flow_angle)).reshape(-1,1)\n endPoints2_y = (startPoints_y + lenghtOfArrayArm * np.sin( -1.0*angleOfArrow + np.pi + flow_angle)).reshape(-1,1)\n\n\n #create array with line indications the way opencv wants it\n arrowArms = np.hstack((startPoints_x, startPoints_y, endPoints_x, endPoints_y))\n arrowArms2 = np.hstack((startPoints_x, startPoints_y, endPoints2_x, endPoints2_y))\n arrowArms = np.vstack((arrowArms, arrowArms2))\n arrowArms = arrowArms.reshape((-1,2,2))\n arrowArms = np.array(arrowArms, dtype = np.int32)\n\n\n #draw multiple lines\n cv2.polylines(img, arrowArms, isClosed = False, color = flowColor, thickness=flowThickness)\n\n\n return img",
"def line_plane(l, p):\n d = dot((p.o - l.o), p.n) / dot(l.d, p.n)\n return l(d)",
"def draw_overlay(self):\n pass",
"def set_mode_line():\n global DRAW_MODE, CURRENT_LABEL, SHAPE_SIZE\n global mouse_pos, line_start_pos\n\n if DRAW_MODE==\"line\":\n # draw the line on the mask\n cv.line(source_msk, line_start_pos, mouse_pos, CURRENT_LABEL, thickness=SHAPE_SIZE)\n\n line_start_pos = mouse_pos\n DRAW_MODE=\"line\"",
"def at_phaseFold_SecondaryEclipse(self):\n\n\t lcPF_SE = PF(self.t,self.fm,self.P,self.s2ncut_t0,self.tdur)\n\t self.add_dset('lcPF_SE',lcPF_SE,\n\t description='Phase folded photometry (secondary eclipse)')\n\t t0shft_SE = self.t0 - self.s2ncut_t0\n\t ph = t0shft_SE / self.P * 360\n\t self.add_attr('t0shft_SE',t0shft_SE,'Time offset of secondary eclipse')\n\t self.add_attr('ph_SE',ph,'Phase offset of secondary eclipse')",
"def at_phaseFold_SecondaryEclipse(self):\n\n\t lcPF_SE = PF(self.t,self.fm,self.P,self.s2ncut_t0,self.tdur)\n\t self.add_dset('lcPF_SE',lcPF_SE,\n\t description='Phase folded photometry (secondary eclipse)')\n\t t0shft_SE = self.t0 - self.s2ncut_t0\n\t ph = t0shft_SE / self.P * 360\n\t self.add_attr('t0shft_SE',t0shft_SE,'Time offset of secondary eclipse')\n\t self.add_attr('ph_SE',ph,'Phase offset of secondary eclipse')",
"def first_processing(self,img):\r\n \r\n processed_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\r\n #cv2.imshow('Balc',processed_img)\r\n #vertices = np.array([[0,480],[0,200],[640,200], [640,480] ], np.int32)\r\n #processed_img = self.masking(processed_img,[vertices])\r\n processed_img = cv2.Canny(processed_img, threshold1 = 50, threshold2=100)\r\n processed_img = cv2.GaussianBlur(processed_img,(5,5),0)\r\n #masking image (only below 200px cheight processing--> no horizont is detected)\r\n #cv2.imshow('2',processed_img)\r\n lines = cv2.HoughLinesP(processed_img, 1, np.pi/180, 180,np.array([]),100, 30)\r\n return lines",
"def floorplan(PAN, tilt1 = None): #---- placement floorplan\r\n if tilt1 is not None: PAN.img1_tilt = tilt1; # setup tilt angle for image 1\r\n PAN.H = np.zeros((PAN.count, 8)); # Transform matrix for floorplan\r\n print('---- floorplanning:')\r\n for k in range(PAN.count):\r\n PAN.H[k,0]= PAN.imgc[k].shape[0]; # set image height\r\n PAN.H[k,1]= PAN.imgc[k].shape[1]; # set image width\r\n PAN.H[k,2]= (0.5/np.tan(PAN.hfov/180*np.pi))* PAN.imgs[k].shape[1]; \r\n PAN.H[k,4]= 1; # set initial sizing = 1\r\n PAN.H[0,3]= PAN.img1_tilt/180*np.pi; # set tilt \r\n #--- compute new floorplan for image 2 ~ ... ---\r\n for k in range(1, PAN.count): # \r\n #for k in range(1, 2): # debug only \r\n KPS1 = PAN.KPDBs[k-1]; # load keypoint database\r\n KPS2 = PAN.KPDBs[k]; # \r\n H1 = PAN.H[k-1]; # transform function\r\n H2 = PAN.H[k]; # \r\n for m in range(0, PAN.match_seq.shape[0]):\r\n if PAN.match_seq[m, 0]== (k-1) and PAN.match_seq[m, 1]== k: \r\n match = PAN.matchinfo[m];\r\n pano_tools.stitch(KPS1, KPS2, H1, H2, match);",
"def Back_to_center (Chest_img,wich_side='Left'):\n Filter_length = 130\n iteration = 0\n while True:\n if len(action_list) == 0:\n print('Filter_length',Filter_length)\n Chest = np.rot90(undistort_chest(Chest_img.imgs)).copy()\n cv2.imshow(\"undistort_chest\", Chest)\n cv2.waitKey(1)\n # continue\n if wich_side == 'Right':\n ROI_image = Chest[250:550,240:450]#右侧边缘,胸部\n elif wich_side == 'Left':\n ROI_image = Chest[250:550,30:239]#左侧边缘,胸部\n\n # 机器人脚的位置\n # ROI_image[340,:] = 255 \n\n cv2.imshow(\"Chest_img\",ROI_image)\n cv2.waitKey(1)\n\n ROI_image = cv2.pyrMeanShiftFiltering(ROI_image, 9, 25)\n cv2.imshow(\"pyrMeanShiftFiltering\",ROI_image)\n cv2.waitKey(1)\n Canny_img = cv2.Canny(ROI_image,15,150)\n # cv2.imshow(\"Canny_img\",Canny_img)\n # cv2.waitKey(1)\n\n #膨胀加粗边缘 \n dilate = cv2.dilate(Canny_img, np.ones((2, 2), np.uint8), iterations=1)\n cv2.imshow(\"dilate\",dilate)\n cv2.waitKey(1)\n\n\n Lines = cv2.HoughLinesP(dilate,1.0,np.pi / 180, 100,minLineLength=Filter_length,maxLineGap=15)\n\n # final_image = draw_lines(ROI_image,Lines,color=[0,255,0],thickness=2) #for test\n # cv2.imshow(\"origine line\",final_image)\n # cv2.waitKey(1)\n final_image, Final_line, good = group_lines_and_draw(ROI_image, Lines, wich_side)\n if Final_line is None:\n if Filter_length > 80:\n Filter_length -= 10\n else:\n iteration += 1\n continue\n \n if iteration == 3:\n print('No lines for long, just go')\n break\n\n cv2.imshow(\"image line\",final_image)\n cv2.waitKey(1)\n # print('test')\n if good:\n if wich_side == 'Right':\n Final_line[0] = Final_line[0] + 240\n Final_line[1] = Final_line[1] + 240\n if wich_side == 'Left':\n Final_line[0] = Final_line[0] + 30\n Final_line[1] = Final_line[1] + 30\n dX, deg = Calculate_position(Final_line)\n # print('line info',dX,deg)\n Step, Trun, Move_action, Turn_action = Move_dicision(dX, deg, wich_side)\n if Step == 0 and Trun == 0:\n print('In the center')\n break \n else:\n Step,Trun,Move_action,Turn_action = 0,0,True,True\n print('啥也没看见朋友!')\n \n\n for i in range(int(Trun)):\n action_append(Turn_action)\n time.sleep(0.5)\n\n for i in range(int(Step)):\n action_append(Move_action)\n time.sleep(0.5)",
"def _flipChipsLR(exp, wcs, dataId, dims=None):\n flipLR, flipTB = (False, True) if dataId['ccd'] in (100, 101, 102, 103) else (True, False)\n if exp:\n exp.setMaskedImage(afwMath.flipImage(exp.getMaskedImage(), flipLR, flipTB))\n if wcs:\n ampDimensions = exp.getDimensions() if dims is None else dims\n ampCenter = afwGeom.Point2D(ampDimensions/2.0)\n wcs = afwGeom.makeFlippedWcs(wcs, flipLR, flipTB, ampCenter)\n return exp,wcs",
"def loft(*sections):\n result = Mesh()\n current = sections[0]\n cap = closePoly(current)\n result.addPoly(cap)\n for next in sections:\n for ((p0,p1),(q0,q1)) in zip(edges(current),edges(next)):\n # HANDEDNESS\n result.addTri([p0,q0,p1]).addTri([p1,q0,q1])\n current = next\n # back cap is reversed (to face backward)\n result.addPoly(closePoly(current))\n return result",
"def line(x1,y1,x2,y2,z_thickness,laser):\r\n\t#Global variables that are used by all algorithms\r\n\tlayers = int(z_thickness/laser[\"z_spacing\"])\r\n\r\n\t#Works out offset when beginning on a new layer\r\n\ttaper = math.tan(math.radians(laser[\"kerf_angle\"]/2)) * laser[\"z_spacing\"]\r\n\ttaper_x,taper_y = offset(x1,y1,x2,y2,taper)\r\n\r\n\t#Works out offset between each parallel scan on the same layer\r\n\tdelta_x,delta_y = offset(x1,y1,x2,y2,laser[\"xy_spacing\"])\r\n\r\n\t#Works out maximum offset from starting line, we don't want to exceed this at any point.\r\n\tmax_taper = math.tan(math.radians(laser[\"kerf_angle\"]/2)) * (z_thickness) * 2\r\n\tmax_delta_x, max_delta_y = offset(x1,y1,x2,y2,max_taper)\r\n\t#max_delta_x, max_delta_y = 2*max_delta_x, 2*max_delta_y\r\n\r\n\t#Loops through each layer, in which we fit as many parallel raster scans as the maximum offset allows\r\n\tcutlist = []\r\n\tfor a in range(layers):\r\n\t\tnew_x1,new_x2,new_y1,new_y2 = x1 + a*taper_x, x2 + a*taper_x, y1 + a*taper_y, y2 + a*taper_y\r\n\t\ti = 0\r\n\t\tcutlist.append([\"z_step\", str(-laser[\"z_spacing\"])])\r\n\t\twhile abs(new_x1-x1) < abs(max_delta_x) or abs(new_y1-y1) < abs(max_delta_y):\r\n\t\t\t#This use of i is to reduce the jump distance between individual scans\r\n\t\t\tif i % 2 == 0:\r\n\t\t\t\tcutlist.append([\"jump\", f\"{new_x1:.6f}\", f\"{new_y1:.6f}\"])\r\n\t\t\t\tcutlist.append([\"mark\", f\"{new_x2:.6f}\", f\"{new_y2:.6f}\"])\r\n\t\t\telse:\r\n\t\t\t\tcutlist.append([\"jump\", f\"{new_x2:.6f}\", f\"{new_y2:.6f}\"])\r\n\t\t\t\tcutlist.append([\"mark\", f\"{new_x1:.6f}\", f\"{new_y1:.6f}\"])\r\n\t\t\tnew_x1,new_x2,new_y1,new_y2 = new_x1 + delta_x, new_x2 + delta_x, new_y1 + delta_y, new_y2 + delta_y\r\n\t\t\ti = i + 1\r\n\t\t#Having completed one layer, the laser moves down to begin the next layer\r\n\t\tmax_delta_x = max_delta_x - taper_x\r\n\r\n\tcutlist.insert(0, [\"set_trigger4\", \"1\", \"0\", \"7\", \"8\", \"45\"])\r\n\tcutlist.append([\"stop_trigger\"])\r\n\treturn json.dumps(cutlist)",
"def _map_lines(self):\n\n # init params to lower limits\n params = copy.copy(self._llimits)\n\n # map it\n self._map_line_axis(params)",
"def trajectories(self):\n # OPTIMIZE: take too much time due to too much solver call\n alpha_min = self.search_alpha_min()\n alpha_finder = self.FOV_img/2\n\n if self.display_trajectories is True:\n plt.figure('Trajectories plan')\n plt.clf() #clear the graph to avoir superposing data from the same set (can be deactivated if need to superpose)\n ax = plt.subplot(111, projection='polar') #warning if use python in ligne (!= graphical) graphs got superposed\n ax.set_title(\"light trajectories close to a black hole\\n\", va='bottom')\n ax.set_xlabel('R(UA)')\n plt.ylabel('phi(°)\\n\\n\\n\\n', rotation=0)\n ax.set_rlim((0, 4*self.D))\n ax.set_rlabel_position(-90)\n\n seen_angle = np.array([])\n deviated_angle = np.array([])\n\n booli = False # avoid points from the first loop to exceed points from the second loop\n points = 40 # careful with this if using kind=linear\n\n for i in range(6):\n # print(alpha_finder)\n\n for alpha in np.linspace(alpha_finder, alpha_min,\n num=points, endpoint=booli):\n r, phi = self.solver(alpha)\n\n if r[-1] > 1.1*self.Rs: # if not capture by black hole\n seen_angle = np.append(seen_angle, 180-alpha)\n dev_angle = phi[-1] + math.asin(self.D/r[-1]*math.sin(phi[-1]))\n dev_angle = math.degrees(dev_angle)\n deviated_angle = np.append(deviated_angle, dev_angle)\n Ci = 'C'+str(i)\n\n if self.display_trajectories is True:\n ax.plot(phi, r, Ci) # plot one trajectory\n\n if self.kind == 'linear':\n alpha_finder = alpha_min + (alpha_finder - alpha_min)/(points/3 + 1) # start a more precise cycle from last point\n\n else:\n alpha_finder = alpha_min + (alpha_finder - alpha_min)/(points + 1) # start a more precise cycle from last point\n\n points = 10 # careful with this if using kind=linear\n\n if i == 4:\n booli = True # allow to display the last point\n\n if self.display_trajectories is True:\n # plt.savefig('trajectories.png', format='png', dpi=1000, bbox_inches='tight')\n plt.draw()\n\n return seen_angle, deviated_angle",
"def ActiveHlt1Lines(self) :\n lines = [ 'Hlt1TrackAllL0', 'Hlt1TrackMuon', 'Hlt1TrackAllL0Tight', 'Hlt1TrackPhoton'\n , 'Hlt1VertexDisplVertex'\n , 'Hlt1SingleMuonNoIP', 'Hlt1SingleMuonHighPT'\n , 'Hlt1SingleElectronNoIP'\n , 'Hlt1DiMuonLowMass', 'Hlt1DiMuonHighMass'\n , 'Hlt1DiProtonLowMult', 'Hlt1DiProton'\n , 'Hlt1L0HighSumETJet','Hlt1HighPtJetsSinglePV']\n \n \n lines += ['Hlt1CharmCalibrationNoBias']\n lines += ['Hlt1CharmCalibrationNoBias']\n return lines",
"def process_pipeline(frame, keep_state=True):\n\n global line_lt, line_rt, processed_frames\n\n # undistort the image using coefficients found in calibration\n undistorted_img = undistort(frame, mtx, dist)\n\n # binarize the frame and highlight lane lines\n binarized_img = binarize(undistorted_img)\n\n # perspective transform to obtain bird's eye view\n birdeye_img, matrix, inversed_matrix = birdeye(binarized_img, visualise=False)\n\n # 2 order polynomial curve fit onto lane lines found\n if processed_frames > 0 and keep_state and line_lt.detected and line_rt.detected:\n find_lane_by_previous_fits(birdeye_img, line_lt, line_rt, visualise=False)\n else:\n find_lane_by_sliding_windows(birdeye_img, line_lt, line_rt, n_windows=9, visualise=False)\n\n # compute offset in meter from center of the lane\n offset_meter = offset_from_lane_center(line_lt, line_rt, frame_width=frame.shape[1])\n\n # draw the surface enclosed by lane lines back onto the original frame\n blend_on_road = draw_back_onto_the_road(undistorted_img, inversed_matrix, line_lt, line_rt, keep_state)\n mean_curvature_meter = np.mean([line_lt.curvature_meter, line_rt.curvature_meter])\n font = cv2.FONT_HERSHEY_SIMPLEX\n cv2.putText(blend_on_road, 'Curvature radius: {:.02f}m'.format(mean_curvature_meter), (60, 60), font, 1,\n (255, 255, 255), 2)\n cv2.putText(blend_on_road, 'Offset from center: {:.02f}m'.format(offset_meter), (60, 90), font, 1,\n (255, 255, 255), 2)\n\n processed_frames += 1\n\n return blend_on_road",
"def get_landmarks(self, sorted_cut_endo_pts, lowest_pt_idx, display_opt):\n\n # make polydata out of sorted endo pts\n numPoints = sorted_cut_endo_pts.shape[0]\n vtk_float_arr = numpy_support.numpy_to_vtk(num_array=np.asarray(sorted_cut_endo_pts), deep=True, array_type=vtk.VTK_FLOAT)\n vtkpts = vtk.vtkPoints()\n vtkpts.SetData(vtk_float_arr)\n cut_endo_poly = vtk.vtkPolyData()\n cut_endo_poly.SetPoints(vtkpts)\n\n # now make lines\n polyLine = vtk.vtkPolyLine()\n polyLine.GetPointIds().SetNumberOfIds(numPoints)\n\n for i in range(numPoints):\n polyLine.GetPointIds().SetId(i, i) # from 0,1 then 2,3 then 4,5 ...\n\n cells = vtk.vtkCellArray()\n cells.InsertNextCell(polyLine)\n\n # add points and lines to polydata container\n cut_endo_poly.SetLines(cells)\n\n # create tree for intersection process\n bspTree = vtk.vtkModifiedBSPTree() # bsp tree is much faster than obbtree due to rejection test\n bspTree.SetDataSet(cut_endo_poly)\n bspTree.BuildLocator()\n\n top_left = np.asarray(sorted_cut_endo_pts[0])\n top_right = np.asarray(sorted_cut_endo_pts[-1])\n low_pt = np.asarray(sorted_cut_endo_pts[lowest_pt_idx])\n\n # get direction of lines\n line_dir = normalize(top_right - top_left) # top_pt[0] to top_pt[1]\n\n # add distance on both sides to make sure the line can pass through the entire LV horizontally\n dist = np.linalg.norm(top_right - top_left)\n pSource_0 = top_right + dist*line_dir\n pTarget_0 = top_left - dist*line_dir\n\n # determine the length to travel from top to bottom\n top_center = (top_right + top_left)/2.0\n midline = normalize(low_pt - top_center)\n max_dist = np.linalg.norm(low_pt - top_center)\n\n left_pts = []\n right_pts = []\n\n weights = np.linspace(0.00, 0.98, self.numSamples)\n\n for i in range(self.numSamples):\n # determine source and target points\n pSource = pSource_0 + weights[i]*max_dist*midline\n pTarget = pTarget_0 + weights[i]*max_dist*midline\n center = (pSource + pTarget) / 2.0\n\n # set empty variables\n subId = vtk.mutable(0)\n pcoords = [0, 0, 0]\n t = vtk.mutable(0)\n left = [0, 0, 0]\n right = [0, 0, 0]\n\n # # run interesect command\n # pointid1 = bspTree.IntersectWithLine(pSource, pTarget, 0.001, t, left, pcoords, subId)\n # pointid2 = bspTree.IntersectWithLine(pTarget, pSource, 0.001, t, right, pcoords, subId)\n\n # intersect with line that goes from source to center or target to center\n pointid1 = bspTree.IntersectWithLine(pSource, center, 0.001, t, left, pcoords, subId)\n pointid2 = bspTree.IntersectWithLine(pTarget, center, 0.001, t, right, pcoords, subId)\n\n left_pts.append(list(left))\n right_pts.append(list(right))\n\n if display_opt:\n mapper = vtk.vtkPolyDataMapper()\n mapper.SetInputData(cut_endo_poly)\n\n all_act = vtk.vtkActor()\n all_act.SetMapper(mapper)\n\n right_act = include_points(left_pts, len(left_pts), 4, (1,0,0))\n left_act = include_points(right_pts, len(right_pts), 4, (1,0,0))\n low_pt_act = include_points(list(low_pt), 1, 10, (1,0,1))\n\n top_right_act = include_points(list(top_right), 1, 10, (0,0,1))\n top_left_act = include_points(list(top_left), 1, 10, (0,0,1))\n\n ren = vtk.vtkRenderer()\n ren.AddActor(all_act)\n ren.AddActor(right_act)\n ren.AddActor(left_act)\n ren.AddActor(top_right_act)\n ren.AddActor(top_left_act)\n ren.AddActor(low_pt_act)\n\n vtk_show(ren)\n\n # ensure that left and right points have the same number of points as numSamples\n if len(left_pts) != self.numSamples or len(right_pts) != self.numSamples:\n print('Either left or right points do not have the same number of points as numSamples!')\n\n return left_pts, right_pts",
"def __init__(self, roi_warped_points):\n\n # was the line detected in the last iteration?\n self.detected = False\n # x values of the last n fits of the line\n self.recent_xfitted = []\n #average x values of the fitted line over the last n iterations\n self.bestx = None\n #polynomial coefficients averaged over the last n iterations\n self.best_fit = [np.array([False])]\n #polinomial coefficients for the last n fits of the lane\n self.recent_fit = []\n #polynomial coefficients for the most recent fit\n self.current_fit = [np.array([False])]\n #radius of curvature of the line in some units\n self.radius_of_curvature = 0\n #distance in meters of vehicle center from the line\n self.line_base_pos = 0\n #difference in fit coefficients between last and new fits\n self.diffs = np.array([0,0,0], dtype='float')\n #x values for detected line pixels\n self.allx = None\n #maximum number of iterations to average\n self.max_n = 10 #25\n\n # roi image points in bird's view space\n self.roi_warped_points = roi_warped_points\n\n #y values for detected line pixels\n self.ally = np.linspace(0, self.roi_warped_points[2][1] - 1, self.roi_warped_points[2][1])\n\n # line base pos is calculated through the roi information\n # the used four point ROI has two points at the bottom that are straight\n # with respect to the bottom - as this points are right next to the lines,\n # they can be translated from pixels into meters with the knowledge of\n # a U.S. highway standard lane - this is an apprximation, but should be\n # good enough for this project\n # U.S. regulations minimum lane width: 3.7m\n self.xm_per_pix = 3.7 / (self.roi_warped_points[1][0] - self.roi_warped_points[0][0])\n\n # each dashed line is 3m long --> about 33m for warped image\n self.ym_per_pix = 33 / (self.roi_warped_points[2][1] - self.roi_warped_points[0][1])",
"def enable_cl1_pll1(self):\n self.write_versa5(0x17,0x02) ## Change top multiplier to 0x22\n self.write_versa5(0x18,0x20)\n self.write_versa5(0x10,0xc0) ## Enable xtal and clock\n self.write_versa5(0x13,0x03) ## Switch to clock\n self.write_versa5(0x10,0x40) ## Enable clock input only, won't lock to master",
"def recalc_verts(self):\n tongue_w = self.TEX.height\n\n along = self.fly_pos - self.mouth_pos\n across = along.normalized().rotated(90) * tongue_w * 0.5\n\n along *= self.length\n\n self.dl.vertices = [c for v in [\n self.mouth_pos - across,\n self.mouth_pos - across + along,\n self.mouth_pos + across + along,\n self.mouth_pos + across,\n ] for c in v]",
"def drawLines(self):\n\t\tintersections = [[], []]\n\t\tfor l in self.lines:\n\t\t\tif l.direction == 'v':\n\t\t\t\tif l.rtc:\n\t\t\t\t\tposition = l.coordinate + int((self.width - 1) / 2)\n\t\t\t\telse:\n\t\t\t\t\tposition = int((l.coordinate * self.width / 100) if type(l.coordinate) == float else l.coordinate)\n\t\t\t\tintersections[0].append(position)\n\t\t\t\tfor yPos in range(1, self.height - 2):\n\t\t\t\t\tself.wts(yPos, position, '│', self._borderColor)\n\t\t\t\t# endpoints\n\t\t\t\tself.wts(0, position, '┬',self._borderColor)\n\t\t\t\tself.wts(self.height - 2, position, '┴', self._borderColor)\n\t\t\telif l.direction == 'h':\n\t\t\t\tif l.rtc:\n\t\t\t\t\tposition = l.coordinate + ((self.height - 1) / 2)\n\t\t\t\telse:\n\t\t\t\t\tposition = int((l.coordinate * self.height / 100) - 1 if type(l.coordinate) == float else l.coordinate)\n\t\t\t\tintersections[1].append(position)\n\t\t\t\tself.wts(position, 1, '─' * (self.width - 2), self._borderColor)\n\t\t\t\t# endpoints\n\t\t\t\tself.wts(position, 0, '├', self._borderColor)\n\t\t\t\tself.wts(position, self.width - 1, '┤', self._borderColor)\n\t\t# draw intersections\n\t\tfor x in intersections[1]:\n\t\t\tfor y in intersections[0]:\n\t\t\t\tself.wts(x, y, '┼', self._borderColor)\n\t\tself.verticalBoundaries = intersections[0]\n\t\tif self.screenBorder:\n\t\t\tself.verticalBoundaries.append(self.width)",
"def lower_arm(self):\r\n # ---------------------------------------------------------------------\r\n # Done: 8. Implement this method; it is a ONE-LINER!\r\n # ---------------------------------------------------------------------\r\n if self.is_calibrated == False:\r\n self.calibrate_arm()\r\n self.move_arm_to_position(0)#America\r",
"def at_phaseFold(self, ph, **PF_kw):\n\t # Epoch at arbitrary phase, ph\n\t t0 = self.t0 + ph / 360. * self.P \n\t lcPF = PF(self.t, self.fm, self.P, t0, self.tdur, **PF_kw) \n\t lcPF = pd.DataFrame(lcPF)\n\t self.update_table('lcPF%i' % ph, lcPF,'Phase folded light curve')",
"def at_phaseFold(self, ph, **PF_kw):\n\t # Epoch at arbitrary phase, ph\n\t t0 = self.t0 + ph / 360. * self.P \n\t lcPF = PF(self.t, self.fm, self.P, t0, self.tdur, **PF_kw) \n\t lcPF = pd.DataFrame(lcPF)\n\t self.update_table('lcPF%i' % ph, lcPF,'Phase folded light curve')",
"def ActiveHlt2Lines(self) :\n \n lines = ['Hlt2PassThrough','Hlt2Lumi','Hlt2DebugEvent',\n 'Hlt2Forward','Hlt2ErrorEvent','Hlt2Transparent']\n \n return lines",
"def drawcutline(f,layernamelist,cutline_entities_count): \r\n \r\n #layernamelist=[layernamelist[0]] \r\n layercount=0\r\n ringlist=[[[-0.215+globalconfig.CUTLINE_X_OFFSET,0.0+globalconfig.CUTLINE_Y_OFFSET],[0.215+globalconfig.CUTLINE_X_OFFSET,0.0+globalconfig.CUTLINE_Y_OFFSET]],\r\n [[-0.215+globalconfig.CUTLINE_X_OFFSET,171.68+globalconfig.CUTLINE_Y_OFFSET],[0.215+globalconfig.CUTLINE_X_OFFSET,171.68+globalconfig.CUTLINE_Y_OFFSET]],\r\n [[-0.215+globalconfig.CUTLINE_X_OFFSET,175.68+globalconfig.CUTLINE_Y_OFFSET],[0.215+globalconfig.CUTLINE_X_OFFSET,175.68+globalconfig.CUTLINE_Y_OFFSET]],\r\n [[171.4650+globalconfig.CUTLINE_X_OFFSET,0.0+globalconfig.CUTLINE_Y_OFFSET],[171.8950+globalconfig.CUTLINE_X_OFFSET,0.0+globalconfig.CUTLINE_Y_OFFSET]],\r\n [[171.4650+globalconfig.CUTLINE_X_OFFSET,171.68+globalconfig.CUTLINE_Y_OFFSET],[171.8950+globalconfig.CUTLINE_X_OFFSET,171.68+globalconfig.CUTLINE_Y_OFFSET]]]\r\n flashlist=buildflashlist()\r\n cutlineset=buildcutlineset() \r\n \r\n f.write(\"0\\nSECTION\\n2\\nENTITIES\\n\")\r\n \r\n for layername in layernamelist:\r\n layercount=layercount+1\r\n for polyline in cutlineset:\r\n cutline_entities_count=cutline_entities_count+1\r\n f.write(\"0\\nPOLYLINE\\n8\\n\"+layername+\"\\n5\\n\"+hex(cutline_entities_count)[2:]) # begin writing a polyline\r\n f.write(\"\\n66\\n1\\n10\\n0.0\\n20\\n0.0\\n30\\n0.0\\n40\\n0.08\\n41\\n0.08\\n\")\r\n cutline_entities_count=drawwidthpolyline(polyline, cutline_entities_count, f,layername)\r\n cutline_entities_count=drawring(ringlist, cutline_entities_count, f, layername)\r\n cutline_entities_count=drawflash(flashlist, cutline_entities_count, f, layername)\r\n cutline_entities_count=drawtext(cutline_entities_count, f, layername,layercount)\r\n \r\n return cutline_entities_count",
"def lpt_prototype(mesh,\n nc=FLAGS.nc,\n bs=FLAGS.box_size,\n batch_size=FLAGS.batch_size,\n a0=FLAGS.a0,\n a=FLAGS.af,\n nsteps=FLAGS.nsteps):\n\n stages = np.linspace(a0, a, nsteps, endpoint=True)\n klin = np.loadtxt('../flowpm/data/Planck15_a1p00.txt').T[0]\n plin = np.loadtxt('../flowpm/data/Planck15_a1p00.txt').T[1]\n ipklin = iuspline(klin, plin)\n\n # Define the named dimensions\n # Parameters of the small scales decomposition\n n_block_x = FLAGS.nx\n n_block_y = FLAGS.ny\n n_block_z = 1\n halo_size = FLAGS.hsize\n\n if halo_size >= 0.5 * min(nc // n_block_x, nc // n_block_y, nc // n_block_z):\n new_size = int(0.5 *\n min(nc // n_block_x, nc // n_block_y, nc // n_block_z))\n print('WARNING: REDUCING HALO SIZE from %d to %d' % (halo_size, new_size))\n halo_size = new_size\n\n # Parameters of the large scales decomposition\n downsampling_factor = FLAGS.dsample\n lnc = nc // 2**downsampling_factor\n\n #\n\n fx_dim = mtf.Dimension(\"nx\", nc)\n fy_dim = mtf.Dimension(\"ny\", nc)\n fz_dim = mtf.Dimension(\"nz\", nc)\n\n tfx_dim = mtf.Dimension(\"tx\", nc)\n tfy_dim = mtf.Dimension(\"ty\", nc)\n tfz_dim = mtf.Dimension(\"tz\", nc)\n\n # Dimensions of the low resolution grid\n x_dim = mtf.Dimension(\"nx_lr\", lnc)\n y_dim = mtf.Dimension(\"ny_lr\", lnc)\n z_dim = mtf.Dimension(\"nz_lr\", lnc)\n\n tx_dim = mtf.Dimension(\"tx_lr\", lnc)\n ty_dim = mtf.Dimension(\"ty_lr\", lnc)\n tz_dim = mtf.Dimension(\"tz_lr\", lnc)\n\n nx_dim = mtf.Dimension('nx_block', n_block_x)\n ny_dim = mtf.Dimension('ny_block', n_block_y)\n nz_dim = mtf.Dimension('nz_block', n_block_z)\n\n sx_dim = mtf.Dimension('sx_block', nc // n_block_x)\n sy_dim = mtf.Dimension('sy_block', nc // n_block_y)\n sz_dim = mtf.Dimension('sz_block', nc // n_block_z)\n\n k_dims = [tx_dim, ty_dim, tz_dim]\n\n batch_dim = mtf.Dimension(\"batch\", batch_size)\n pk_dim = mtf.Dimension(\"npk\", len(plin))\n pk = mtf.import_tf_tensor(mesh, plin.astype('float32'), shape=[pk_dim])\n\n # Compute necessary Fourier kernels\n kvec = flowpm.kernels.fftk((nc, nc, nc), symmetric=False)\n kx = mtf.import_tf_tensor(mesh,\n kvec[0].squeeze().astype('float32'),\n shape=[tfx_dim])\n ky = mtf.import_tf_tensor(mesh,\n kvec[1].squeeze().astype('float32'),\n shape=[tfy_dim])\n kz = mtf.import_tf_tensor(mesh,\n kvec[2].squeeze().astype('float32'),\n shape=[tfz_dim])\n kv = [ky, kz, kx]\n\n # kvec for low resolution grid\n kvec_lr = flowpm.kernels.fftk([lnc, lnc, lnc], symmetric=False)\n\n kx_lr = mtf.import_tf_tensor(mesh,\n kvec_lr[0].squeeze().astype('float32') /\n 2**downsampling_factor,\n shape=[tx_dim])\n ky_lr = mtf.import_tf_tensor(mesh,\n kvec_lr[1].squeeze().astype('float32') /\n 2**downsampling_factor,\n shape=[ty_dim])\n kz_lr = mtf.import_tf_tensor(mesh,\n kvec_lr[2].squeeze().astype('float32') /\n 2**downsampling_factor,\n shape=[tz_dim])\n kv_lr = [ky_lr, kz_lr, kx_lr]\n\n # kvec for high resolution blocks\n padded_sx_dim = mtf.Dimension('padded_sx_block',\n nc // n_block_x + 2 * halo_size)\n padded_sy_dim = mtf.Dimension('padded_sy_block',\n nc // n_block_y + 2 * halo_size)\n padded_sz_dim = mtf.Dimension('padded_sz_block',\n nc // n_block_z + 2 * halo_size)\n kvec_hr = flowpm.kernels.fftk([\n nc // n_block_x + 2 * halo_size, nc // n_block_y + 2 * halo_size,\n nc // n_block_z + 2 * halo_size\n ],\n symmetric=False)\n\n kx_hr = mtf.import_tf_tensor(mesh,\n kvec_hr[0].squeeze().astype('float32'),\n shape=[padded_sx_dim])\n ky_hr = mtf.import_tf_tensor(mesh,\n kvec_hr[1].squeeze().astype('float32'),\n shape=[padded_sy_dim])\n kz_hr = mtf.import_tf_tensor(mesh,\n kvec_hr[2].squeeze().astype('float32'),\n shape=[padded_sz_dim])\n kv_hr = [ky_hr, kz_hr, kx_hr]\n\n shape = [batch_dim, fx_dim, fy_dim, fz_dim]\n lr_shape = [batch_dim, x_dim, y_dim, z_dim]\n hr_shape = [batch_dim, nx_dim, ny_dim, nz_dim, sx_dim, sy_dim, sz_dim]\n part_shape = [batch_dim, fx_dim, fy_dim, fz_dim]\n\n # Begin simulation\n\n initc = mtfpm.linear_field(mesh, shape, bs, nc, pk, kv)\n\n # Reshaping array into high resolution mesh\n field = mtf.slicewise(lambda x: tf.expand_dims(\n tf.expand_dims(tf.expand_dims(x, axis=1), axis=1), axis=1), [initc],\n output_dtype=tf.float32,\n output_shape=hr_shape,\n name='my_reshape',\n splittable_dims=lr_shape[:-1] + hr_shape[1:4] +\n part_shape[1:3])\n\n for block_size_dim in hr_shape[-3:]:\n field = mtf.pad(field, [halo_size, halo_size], block_size_dim.name)\n\n for blocks_dim, block_size_dim in zip(hr_shape[1:4], field.shape[-3:]):\n field = mpm.halo_reduce(field, blocks_dim, block_size_dim, halo_size)\n\n field = mtf.reshape(field, field.shape + [mtf.Dimension('h_dim', 1)])\n high = field\n low = mesh_utils.downsample(field, downsampling_factor, antialias=True)\n\n low = mtf.reshape(low, low.shape[:-1])\n high = mtf.reshape(high, high.shape[:-1])\n\n for block_size_dim in hr_shape[-3:]:\n low = mtf.slice(low, halo_size // 2**downsampling_factor,\n block_size_dim.size // 2**downsampling_factor,\n block_size_dim.name)\n # Hack usisng custom reshape because mesh is pretty dumb\n low = mtf.slicewise(lambda x: x[:, 0, 0, 0], [low],\n output_dtype=tf.float32,\n output_shape=lr_shape,\n name='my_dumb_reshape',\n splittable_dims=lr_shape[:-1] + hr_shape[:4])\n\n state = mtfpm.lpt_init(\n low,\n high,\n 0.1,\n kv_lr,\n kv_hr,\n halo_size,\n hr_shape,\n lr_shape,\n part_shape[1:],\n downsampling_factor=downsampling_factor,\n antialias=True,\n )\n\n # Here we can run our nbody\n final_state = state #mtfpm.nbody(state, stages, lr_shape, hr_shape, k_dims, kv_lr, kv_hr, halo_size, downsampling_factor=downsampling_factor)\n\n # paint the field\n final_field = mtf.zeros(mesh, shape=hr_shape)\n for block_size_dim in hr_shape[-3:]:\n final_field = mtf.pad(final_field, [halo_size, halo_size],\n block_size_dim.name)\n final_field = mesh_utils.cic_paint(final_field, final_state[0], halo_size)\n # Halo exchange\n for blocks_dim, block_size_dim in zip(hr_shape[1:4], final_field.shape[-3:]):\n final_field = mpm.halo_reduce(final_field, blocks_dim, block_size_dim,\n halo_size)\n # Remove borders\n for block_size_dim in hr_shape[-3:]:\n final_field = mtf.slice(final_field, halo_size, block_size_dim.size,\n block_size_dim.name)\n\n #final_field = mtf.reshape(final_field, [batch_dim, fx_dim, fy_dim, fz_dim])\n # Hack usisng custom reshape because mesh is pretty dumb\n final_field = mtf.slicewise(lambda x: x[:, 0, 0, 0], [final_field],\n output_dtype=tf.float32,\n output_shape=[batch_dim, fx_dim, fy_dim, fz_dim],\n name='my_dumb_reshape',\n splittable_dims=part_shape[:-1] + hr_shape[:4])\n\n return initc, final_field\n\n ##",
"def unfold_wire(pl):\n\tpl = phone_pl\n\tshape_points = [np.array(p) for p in pl.points]\n\tpointIter = takeNGenerator(shape_points, 4)\n\td0 = getDistance(*shape_points[0:2])\n\tpoints = [np.array([0, 0]), np.array([0, d0])]\n\tfor i in range(len(shape_points)-3):\n\t\t(p1,p2,p3,p4) = pointIter.next()\n\t\tv1 =p1-p2\n\t\tv2 = p3-p2\n\t\tv3 = p2-p3\n\t\tv4 = p4-p3\n\t\told_normal = np.cross(v1,v2)\n\t\tnew_normal = np.cross(v3,v4)\n\t\tnorm_old = old_normal/la.norm(old_normal)\n\t\tnorm_new = old_normal/la.norm(new_normal)\n\n\n\t\t#check if we need to transform:\n\t\tif any(norm_old != norm_new):\n\t\t\tprint norm_old, norm_new\n\t\t\t#create a transform that will rotate the next points to the old orientation\n\t\t\ttransform = create_transform(norm_new, norm_old)\n\t\t\trot_pot = p2\n\t\t\tpose = (rot_pot, transform)\n\t\t\tpoly = PolyLine(shape_points[i:])\n\t\t\ttranslated = poly.transformed(pose)\n\t\t\tnew_pts = [np.array(p) for p in translated.points]\n\n\t\t\tif len(shape_points[:i]) is 0:\n\t\t\t\tshape_points = new_pts\n\t\t\telse:\n\t\t\t\tshape_points = np.vstack((shape_points[:i], new_pts))\n\t\t\tpointIter = takeNGenerator(shape_points, 4)\n\t\t\tfast_forward(pointIter, i)\n\treturn PolyLine(shape_points)",
"def pipeline(image,motorq):\n\n height = image.shape[0]\n width = image.shape[1]\n region_of_interest_vertices = [\n (0, height),\n (width / 2, 0),\n (width, height),\n ]\n blur = cv2.blur(image,(5,5))\n gray_image = cv2.cvtColor(blur, cv2.COLOR_RGB2GRAY)\n\n cannyed_image = cv2.Canny(gray_image, 100, 200)\n \n # cropped_image = region_of_interest(\n # cannyed_image,\n # np.array(\n # [region_of_interest_vertices],\n # np.int32\n # ),\n # )\n \n lines = cv2.HoughLinesP(\n cannyed_image,\n rho=6,\n theta=np.pi / 60,\n threshold=160,\n lines=np.array([]),\n minLineLength=40,\n maxLineGap=25\n )\n \n left_line_x = []\n left_line_y = []\n right_line_x = []\n right_line_y = []\n #print(lines)\n if not np.any(lines):\n return image\n \n for line in lines:\n for x1, y1, x2, y2 in line:\n #print line\n if (x2-x1) == 0:\n break\n slope = float(y2 - y1) / (x2 - x1)\n if math.fabs(slope) < 0.5:\n continue\n if slope <= 0:\n left_line_x.extend([x1, x2])\n left_line_y.extend([y1, y2])\n else:\n right_line_x.extend([x1, x2])\n right_line_y.extend([y1, y2])\n\n if len(left_line_x)==0 or len(right_line_x)==0:\n return image\n\n min_y = int(image.shape[0] * (3 / 5))\n max_y = int(image.shape[0])\n\n poly_left = np.poly1d(np.polyfit(\n left_line_y,\n left_line_x,\n deg=1\n ))\n \n left_x_start = int(poly_left(max_y))\n left_x_end = int(poly_left(min_y))\n \n poly_right = np.poly1d(np.polyfit(\n right_line_y,\n right_line_x,\n deg=1\n ))\n \n right_x_start = int(poly_right(max_y))\n right_x_end = int(poly_right(min_y))\n\n line_image = draw_lines(\n image,\n [[\n [left_x_start, max_y, left_x_end, min_y],\n [right_x_start, max_y, right_x_end, min_y],\n ]],\n thickness=5,\n )\n\n x_int = Intersect([left_x_start, max_y], [left_x_end, min_y], [right_x_start, max_y], [right_x_end, min_y])[0]\n #print(line_image.shape()[0])\n middle = line_image.shape[0]/2\n if x_int < middle-140:\n motorq.put( [ -13000 , 0 ] )\n elif x_int > middle+140:\n motorq.put( [ 0, -13000 ] )\n else:\n motorq.put( [ -13000, -13000 ] )\n\n \n\n\n return line_image",
"def setup_lines(self):\n self.center_lines()\n self.space_lines()",
"def set_original_planes(self, display_opt):\n\n # get 4-chamber view\n four_ch_view_plane_normal = self.find_4ch_view(display_opt)\n\n # set rodriguez rotation around midline (apex to C)\n axis_of_rot = np.array(self.epi_apex_node - self.C)\n self.axis_of_rot_normalized = axis_of_rot/np.linalg.norm(axis_of_rot)\n\n # get 2-chamber view (90-counterclock rotation from 4ch)\n new_P = my_rodriguez_rotation(self.plane_pts, self.axis_of_rot_normalized,\n math.radians(self.orig_view_angles[1])) # rodriguez rotation around midline\n two_ch_view_plane_normal = find_plane_eq(new_P[0, :], new_P[1, :], new_P[2, :])\n\n # get 3-chamber view (additional 30-60 counterclock rotation from 3ch)\n new_P = my_rodriguez_rotation(self.plane_pts, self.axis_of_rot_normalized, math.radians(self.orig_view_angles[2]))\n three_ch_view_plane_normal = find_plane_eq(new_P[0, :], new_P[1, :], new_P[2, :])\n\n if display_opt:\n _ = self.mesh_slicer(four_ch_view_plane_normal, 'mesh')\n _ = self.mesh_slicer(two_ch_view_plane_normal, 'mesh')\n _ = self.mesh_slicer(three_ch_view_plane_normal, 'mesh')\n\n self.original_planes = np.vstack((four_ch_view_plane_normal,\n two_ch_view_plane_normal,\n three_ch_view_plane_normal))",
"def _set_fibonnaci_level(self, prc=50.0):\n color = COLORS[prc]\n xpos, ypos = self.position_line(prc)\n\n line = pg.LineSegmentROI(positions=(xpos, ypos),\n pen=pg.mkPen(color, width=2),\n movable=False,\n rotatable=False,\n resizable=False,\n )\n line.setSelected(False)\n return line",
"def build_splines(self, nsc, alpha=0.05, initial_beta=1.5):\n pass",
"def plotOverlays(self):\n if self.overlayFluxSurfaces:\n self.plotFluxSurfaces()\n if self.overlayMagneticAxis:\n self.plotMagneticAxis()\n if self.overlaySeparatrix:\n self.plotSeparatrix()\n if self.overlayWallCrossSection:\n self.plotWallCrossSection()",
"def testTinttsysMapLC(self):\n self._runTest('tinttsys', False, [1,3,5,7,9,11,13,15], 'linear,cspline',self.spwmap)",
"def horizontal_arcs_iglu():\n arc(screen, BLACK, (50, 560, 300, 20), 3.14, 0)\n arc(screen, BLACK, (60, 510, 280, 20), 3.14, 0)\n arc(screen, BLACK, (80, 460, 240, 20), 3.14, 0)\n arc(screen, BLACK, (120, 420, 160, 20), 3.14, 0)",
"def group_centers_phase1_and_2(self) -> None:\n self.rotate_U_to_U()\n self.rotate_F_to_F()\n\n if self.centers_staged():\n return\n\n original_state = self.state[:]\n original_solution = self.solution[:]\n tmp_solution_len = len(self.solution)\n\n # find multiple phase1 solutions\n phase1_solutions = self.lt_LR_centers_stage.solutions_via_c(solution_count=100)\n pt_state_indexes = []\n pt_state_indexes_LR_centers_special = []\n phase2_pt_state_indexes_to_phase1_solution = {}\n logger.info(f\"found {len(phase1_solutions)} phase1 solutions\")\n\n # find the phase2 solution for each phase1 solution\n for phase1_solution, (pt0_state, pt1_state, pt2_state, pt3_state, pt4_state) in phase1_solutions:\n self.state = original_state[:]\n self.solution = original_solution[:]\n\n for step in phase1_solution:\n self.rotate(step)\n\n # stage the LR centers\n phase2_pt_state_indexes = tuple([pt.state_index() for pt in self.lt_FB_centers_stage.prune_tables])\n pt_state_indexes.append(phase2_pt_state_indexes)\n phase2_pt_state_indexes_to_phase1_solution[phase2_pt_state_indexes] = phase1_solution\n\n # stage the LR centers and put them into one of 495 states solveable with L L' R R'\n phase2_pt_state_indexes = tuple(\n [pt.state_index() for pt in self.lt_FB_centers_stage_LR_centers_special.prune_tables]\n )\n pt_state_indexes_LR_centers_special.append(phase2_pt_state_indexes)\n phase2_pt_state_indexes_to_phase1_solution[phase2_pt_state_indexes] = phase1_solution\n\n self.state = original_state[:]\n self.solution = original_solution[:]\n\n # stage the FB centers\n phase2_solutions = self.lt_FB_centers_stage.solutions_via_c(pt_states=pt_state_indexes, solution_count=1)\n phase2_solution = phase2_solutions[0][0]\n\n # stage the FB centers and put LR centers into one of 495 states solveable with L L' R R'\n phase2_solutions_lr_centers_special = self.lt_FB_centers_stage_LR_centers_special.solutions_via_c(\n pt_states=pt_state_indexes_LR_centers_special, solution_count=1\n )\n phase2_solution_lr_centers_special = phase2_solutions_lr_centers_special[0][0]\n\n # if we can put the LR centers into one of 495 states without adding to the move count, make it so\n if len(phase2_solution_lr_centers_special) <= len(phase2_solution):\n min_phase2_solution, (\n pt0_state,\n pt1_state,\n pt2_state,\n pt3_state,\n pt4_state,\n ) = phase2_solutions_lr_centers_special[0]\n min_phase1_solution = phase2_pt_state_indexes_to_phase1_solution[pt0_state, pt1_state, pt2_state]\n else:\n min_phase2_solution, (pt0_state, pt1_state, pt2_state, pt3_state, pt4_state) = phase2_solutions[0]\n min_phase1_solution = phase2_pt_state_indexes_to_phase1_solution[pt0_state, pt1_state]\n\n logger.info(\n f\"phase2 solution length {len(phase2_solution)}, phase2_lr_centers_special solution length {len(phase2_solution_lr_centers_special)}\"\n )\n\n for step in min_phase1_solution:\n self.rotate(step)\n\n self.print_cube_add_comment(\"LR centers staged\", tmp_solution_len)\n\n tmp_solution_len = len(self.solution)\n for step in min_phase2_solution:\n self.rotate(step)\n\n self.print_cube_add_comment(\"UD FB centers staged\", tmp_solution_len)",
"def horn_adjust(x, y):\n debug=False\n #debug=True\n meanX = x.mean(axis=0)\n meanY = y.mean(axis=0)\n translation = meanY - meanX\n x_centered = x - meanX\n y_centered = y - meanY\n if debug:\n print(\"x_centered\")\n print(x_centered)\n print(\"y_centered\")\n print(y_centered)\n # Find how much to rescale the x's. Entrywise multiplication.\n x_scale = np.sqrt((x_centered * x_centered).sum())\n y_scale = np.sqrt((y_centered * y_centered).sum())\n scale_factor = y_scale / x_scale\n x_centered_prime = x_centered * scale_factor\n if debug:\n print(\"scale_factor\")\n print(scale_factor)\n print(\"x_centered_prime\")\n print(x_centered_prime)\n # Find angle to rotate the planes\n x_perp = np.cross(x_centered_prime[0], x_centered_prime[1])\n y_perp = np.cross(y_centered[0], y_centered[1])\n # Find rotation matrix to rotate the x plane into the y plane\n # Using https://math.stackexchange.com/questions/180418/calculate-rotation-matrix-to-align-vector-a-to-vector-b-in-3d\n # https://en.wikipedia.org/wiki/Rodrigues'_rotation_formula\n x_perp_unit = x_perp / np.linalg.norm(x_perp)\n y_perp_unit = y_perp / np.linalg.norm(y_perp)\n v = np.cross(x_perp_unit, y_perp_unit)\n s = np.linalg.norm(v) # sine of angle between the planes\n c = x_perp_unit.dot(y_perp_unit) # cosine of angle between the planes\n v_x = np.array([[0, -v[2], v[1]],\n [v[2], 0, -v[0]],\n [-v[1], v[0], 0]])\n # rotation_p acts on the plane\n rotation_p = np.eye(3) + v_x + v_x.dot(v_x) * (1 - c) / s**2.0\n # Transpose to make each x a column vector, then transpose back for next part\n x_plane = rotation_p.dot(x_centered_prime.T).T\n # Now rotate within the plane, as in Sec. 5 of Horn\n v_y = np.array([[0, -y_perp_unit[2], y_perp_unit[1]],\n [y_perp_unit[2], 0, -y_perp_unit[0]],\n [-y_perp_unit[1], y_perp_unit[0], 0]])\n s_win_tmp = np.sum([np.cross(x_plane[i], y_centered[i]) for i in range(3)],\n axis=0).dot(y_perp_unit)\n c_win_tmp = np.sum([x_plane[i].dot(y_centered[i]) for i in range(3)],\n axis=0)\n sin_theta = s_win_tmp / np.sqrt(np.linalg.norm(s_win_tmp)**2 +\n np.linalg.norm(c_win_tmp)**2)\n cos_theta = c_win_tmp / np.sqrt(np.linalg.norm(s_win_tmp)**2 +\n np.linalg.norm(c_win_tmp)**2)\n rotation_win = np.eye(3) + sin_theta * v_y + (1 - cos_theta) * v_y.dot(v_y)\n # transpose so each column is an x vector, then transpose back at the end\n # x_final = rotation_win.dot(x_final.T).T\n rotation_full = rotation_win.dot(rotation_p)\n # Ignore scale_factor\n # T(x) = Ax + b\n A = rotation_full\n b = meanY - rotation_full.dot(meanX)\n if debug:\n print(\"A\")\n print(rotation_full)\n print(\"b\")\n print(b)\n return(A, b)",
"def center_flows(L_wprime, U_wprime, L_w3, U_w3, L_overlap, U_overlap):\n # examine every possible point\n current_dist_to_edge = -1\n point = (0,0)\n #print(\"w3 range: [{}, {}]\".format(L_w3, U_w3))\n #print(\"w' range: [{}, {}]\".format(L_wprime, U_wprime))\n #print(\"overlap range: [{},{}]\".format(L_overlap, U_overlap))\n for y in range(L_w3, U_w3 + 1):\n #print(\"y={}\".format(y))\n LH_bound = max(L_wprime, L_overlap - y)\n #print(\"LH bound = {}\".format(LH_bound))\n RH_bound = min(U_wprime, U_overlap - y)\n #print(\"RH bound = {}\".format(RH_bound))\n for x in range(LH_bound, RH_bound + 1):\n # w3 UB: 0x + 1y - U_w3 = 0\n # w3 LB: 0x + 1y - L_w3 = 0\n # wprime UB: 1x + 0y - U_wprime\n # wprime LB: 1x + 0y - L_wprime\n # wprime + w3 UB: 1x + 1y - U_wprime,wk\n # wprime + w3 LB: 1x + 1y - L_wprime,wk\n dist_to_edge = min(distance_point_to_line(x, y, 0, -1, U_w3), #0x-1y+U_w3=0\n distance_point_to_line(x, y, 0, -1, L_w3), #0x-1y+L_w3=0\n # -1x + 0y + U_wprime = 0\n distance_point_to_line(x, y, -1, 0, U_wprime),\n # -1x + 0y + L_wprime = 0\n distance_point_to_line(x, y, -1, 0, L_wprime),\n # -1x - 1y + U_overlap = 0\n distance_point_to_line(x, y, -1, -1, U_overlap),\n # -1 x - y + L_overlap = 0\n distance_point_to_line(x, y, -1, -1, L_overlap))\n if dist_to_edge > current_dist_to_edge:\n #print(\"At point ({},{}), distance to edge increased from {} to {}.\"\\\n # .format(x,y,current_dist_to_edge,dist_to_edge))\n current_dist_to_edge = dist_to_edge\n point = (x,y)\n return(point)",
"def draw_line(self, DISP, side:str, indizes:tuple, pink = False):\r\n offset = 1 #< Just to draw the line nicely\r\n pos = (indizes[0] - 1) * self.grid_size, indizes[1] * self.grid_size\r\n # Check if it's a pink line\r\n if pink:\r\n start_pos = pos[0], pos[1] + self.grid_size // 2\r\n end_pos = pos[0] + self.grid_size, pos[1] + self.grid_size // 2\r\n # Check if the line should be vertically. u for up\r\n elif side == 'u':\r\n start_pos = pos[0] + self.width - offset + self.grid_size // 2, pos[1] + self.grid_size // 2\r\n end_pos = pos[0] + self.grid_size + offset + self.grid_size // 2 - self.width, pos[1] + self.grid_size // 2\r\n # Check if the line should be horizontally. l for left\r\n elif side == 'l':\r\n start_pos = pos[0] + self.grid_size // 2, pos[1] + self.width - offset + self.grid_size // 2\r\n end_pos = pos[0] + self.grid_size // 2, pos[1] - self.width + self.grid_size + offset + self.grid_size // 2\r\n if not pink:\r\n pg.draw.line(DISP, Colors.colors['BLACK'], start_pos,end_pos, self.width + 2 * offset) \r\n else:\r\n pg.draw.line(DISP, Colors.colors['PINK'], start_pos,end_pos, self.width + 2 * offset)",
"def suspension_plot(ax, full_car_dict, planes_choice, instant_center_choice, *keys):\n # -------------------------Stack 3d Points To Form Links-----------------------------------\n left = keys[0]\n right = keys[1]\n # Left\n left_lower_control_arm = np.stack((full_car_dict[left]['Lower Fore'], full_car_dict[left]['Lower Out'],\n full_car_dict[left]['Lower Aft']), axis=-1)\n left_upper_control_arm = np.stack((full_car_dict[left]['Upper Fore'], full_car_dict[left]['Upper Out'],\n full_car_dict[left]['Upper Aft']), axis=-1)\n left_pushrod = np.stack((full_car_dict[left]['Pushrod Control Arm'], full_car_dict[left]['Pushrod Rocker']), axis=-1\n )\n\n left_rocker = np.stack((full_car_dict[left]['Damper Rocker'], full_car_dict[left]['Rocker Pivot'],\n full_car_dict[left]['Pushrod Rocker']), axis=-1)\n\n # Right\n right_lower_control_arm = np.stack((full_car_dict[right]['Lower Fore'], full_car_dict[right]['Lower Out'],\n full_car_dict[right]['Lower Aft']), axis=-1)\n right_upper_control_arm = np.stack((full_car_dict[right]['Upper Fore'], full_car_dict[right]['Upper Out'],\n full_car_dict[right]['Upper Aft']), axis=-1)\n\n right_pushrod = np.stack((full_car_dict[right]['Pushrod Control Arm'], full_car_dict[right]['Pushrod Rocker']),\n axis=-1)\n heave_damper = np.stack((full_car_dict[right]['Damper Rocker'], full_car_dict[left]['Damper Rocker']), axis=-1)\n\n roll_damper_a = np.stack((full_car_dict[right]['Roll Damper a'], full_car_dict[left]['Roll Damper a']),\n axis=-1)\n\n right_rocker = np.stack((full_car_dict[right]['Damper Rocker'], full_car_dict[right]['Rocker Pivot'],\n full_car_dict[right]['Pushrod Rocker']), axis=-1)\n\n # Steering\n steering = np.stack((full_car_dict[left]['Tie Rod Upright'], full_car_dict[left]['Tie Rod Chassis'],\n full_car_dict[right]['Tie Rod Chassis'], full_car_dict[right]['Tie Rod Upright']), axis=-1)\n\n # ---------------------Plane Instant Centers------------------------\n # LEFT\n left_uc_x = np.linspace(full_car_dict[left]['Upper Aft'][0], full_car_dict[left]['Upper Fore'][0], 10)\n left_uc_y = np.linspace(full_car_dict[left]['Upper Aft'][1], full_car_dict[left]['Upper Out'][1], 10)\n left_ucxx, left_ucyy = np.meshgrid(left_uc_x, left_uc_y)\n left_uc_plane = plane_equation(full_car_dict[left]['Upper Fore'], full_car_dict[left]['Upper Aft'],\n full_car_dict[left]['Upper Out'])\n left_uczz = plot_plane(left_uc_plane, left_ucxx, left_ucyy, full_car_dict[left]['Upper Fore'])\n\n left_lc_x = np.linspace(full_car_dict[left]['Lower Fore'][0], full_car_dict[left]['Lower Aft'][0], 10)\n left_lc_y = np.linspace(0, full_car_dict[left]['Lower Out'][1], 10)\n left_lc_plane = plane_equation(full_car_dict[left]['Lower Fore'], full_car_dict[left]['Lower Aft'],\n full_car_dict[left]['Lower Out'])\n left_lczz = plot_plane(left_lc_plane, left_lc_x, left_lc_y, full_car_dict[left]['Lower Fore'])\n left_lcxx, left_lcyy = np.meshgrid(left_lc_y, left_lc_x)\n\n # intersection line of two planes (instant center axis)\n left_ic_unit, left_ic_point = plane_intersection_line(left_uc_plane, left_lc_plane,\n full_car_dict[left]['Upper Fore'],\n full_car_dict[left]['Lower Fore'])\n middle_t = (full_car_dict[right]['Upper Fore'][0] - left_ic_point[0]) / left_ic_unit[0]\n left_intersection_line = plot_line(left_ic_unit, left_ic_point, np.linspace(middle_t - 20, middle_t + 30, 20))\n\n # RIGHT (same as left but with right points)\n # control arm planes\n right_uc_x = np.linspace(full_car_dict[right]['Upper Aft'][0], full_car_dict[right]['Upper Fore'][0], 10)\n right_uc_y = np.linspace(full_car_dict[right]['Upper Aft'][1], full_car_dict[right]['Upper Out'][1], 10)\n right_ucxx, right_ucyy = np.meshgrid(right_uc_x, right_uc_y)\n right_uc_plane = plane_equation(full_car_dict[right]['Upper Fore'], full_car_dict[right]['Upper Aft'],\n full_car_dict[right]['Upper Out'])\n right_uczz = plot_plane(right_uc_plane, right_ucxx, right_ucyy, full_car_dict[right]['Upper Fore'])\n\n right_lc_x = np.linspace(full_car_dict[right]['Lower Fore'][0], full_car_dict[right]['Lower Aft'][0], 10)\n right_lc_y = np.linspace(0, full_car_dict[right]['Lower Out'][1], 10)\n right_lc_plane = plane_equation(full_car_dict[right]['Lower Fore'], full_car_dict[right]['Lower Aft'],\n full_car_dict[right]['Lower Out'])\n right_lczz = plot_plane(right_lc_plane, right_lc_x, right_lc_y, full_car_dict[right]['Lower Fore'])\n right_lcxx, right_lcyy = np.meshgrid(right_lc_y, right_lc_x)\n\n # intersection line of two planes (instant center axis)\n right_ic_unit, right_ic_point = plane_intersection_line(right_uc_plane, right_lc_plane,\n full_car_dict[right]['Upper Fore'],\n full_car_dict[right]['Lower Fore'])\n middle_t = (full_car_dict[right]['Upper Fore'][0] - right_ic_point[0]) / right_ic_unit[0]\n right_intersection_line = plot_line(right_ic_unit, right_ic_point, np.linspace(middle_t - 30, middle_t + 30, 20))\n\n # -------------------------------Plot on Given Axis-----------------------------------------\n # set up axis and clear previous if animated\n ax.clear()\n ax.set_xlim(full_car_dict[left]['Upper Fore'][0] + 10, full_car_dict[left]['Upper Aft'][0] - 10)\n ax.set_ylim(-25, 25)\n ax.set_zlim(0, 25)\n ax.view_init(0, 0)\n # Tie rods\n ax.plot(steering[0], steering[1], steering[2], c='r')\n # Rockers\n ax.plot(left_rocker[0], left_rocker[1], left_rocker[2], c='k')\n ax.plot(right_rocker[0], right_rocker[1], right_rocker[2], c='k')\n # Control Arms\n ax.plot(left_lower_control_arm[0], left_lower_control_arm[1], left_lower_control_arm[2], c='b')\n ax.plot(left_upper_control_arm[0], left_upper_control_arm[1], left_upper_control_arm[2], c='b')\n ax.plot(right_lower_control_arm[0], right_lower_control_arm[1], right_lower_control_arm[2], c='b')\n ax.plot(right_upper_control_arm[0], right_upper_control_arm[1], right_upper_control_arm[2], c='b')\n # Pushrods\n ax.plot(left_pushrod[0], left_pushrod[1], left_pushrod[2], c='g')\n ax.plot(right_pushrod[0], right_pushrod[1], right_pushrod[2], c='g')\n # Dampers\n ax.plot(heave_damper[0], heave_damper[1], heave_damper[2], c='r')\n ax.plot(roll_damper_a[0], roll_damper_a[1], roll_damper_a[2], c='c')\n\n if planes_choice: # simply looking for whether plane choice is true or not\n ax.plot(left_lower_control_arm[0], left_lower_control_arm[1], left_lower_control_arm[2], c='b')\n ax.plot(left_upper_control_arm[0], left_upper_control_arm[1], left_upper_control_arm[2], c='b')\n ax.plot(right_lower_control_arm[0], right_lower_control_arm[1], right_lower_control_arm[2], c='b')\n ax.plot(right_upper_control_arm[0], right_upper_control_arm[1], right_upper_control_arm[2], c='b')\n # Control Arm Planes\n ax.plot_surface(left_ucxx, left_ucyy, left_uczz)\n ax.plot_surface(left_lcyy, left_lcxx, left_lczz)\n ax.plot_surface(right_ucxx, right_ucyy, right_uczz)\n ax.plot_surface(right_lcyy, right_lcxx, right_lczz)\n\n # Instant center lines\n if instant_center_choice: # simply looking for whether Instant Center choice is true or not\n ax.plot(*left_intersection_line)\n ax.plot(*right_intersection_line)",
"def route_bitlines(self):\n # adds the BL on metal 2\n offset = vector(self.bitcell.get_pin(self.bitcell_bl).cx(),0) - vector(0.5 * self.m2_width,0)\n self.add_layout_pin(text=\"bl\",\n layer=\"metal2\",\n offset=offset,\n width=drc['minwidth_metal2'],\n height=self.height)\n\n # adds the BR on metal 2\n offset = vector(self.bitcell.get_pin(self.bitcell_br).cx(),0) - vector(0.5 * self.m2_width,0)\n self.add_layout_pin(text=\"br\",\n layer=\"metal2\",\n offset=offset,\n width=drc['minwidth_metal2'],\n height=self.height)",
"def rotatePolyLines(inFC):\n fieldList = [\"SHAPE@\"]\n with arcpy.da.UpdateCursor(inFC, fieldList) as cur:\n for row in cur:\n startPoint = (row[0].firstPoint.X, row[0].firstPoint.Y)\n endPoint = (row[0].lastPoint.X, row[0].lastPoint.Y)\n angle = getOrient(startPoint, endPoint)\n verts = []\n for part in row[0]:\n for pnt in part:\n x, y = RotateXY(pnt.X, pnt.Y, row[0].firstPoint.X, row[0].firstPoint.Y, angle)\n pnt.X = x\n pnt.Y = y\n verts.append(pnt)\n row[0]=arcpy.Polyline(arcpy.Array(verts))\n cur.updateRow(row)\n del cur",
"def SLTrace(self,NSL=12,Pts=[]):\n \n #Grid edge\n Bound_vert=[(0,0),(1,0),(1,1),(0,1),(0,0)]\n Bound_vert_phy=[]\n for i in range(len(Bound_vert)):\n Bound_vert_phy.append(self.Pts2Physic(Bound_vert[i]))\n \n #Streamline\n if(len(Pts)==0): #if the initial Pts are not provided\n Pts=PointOnUnitSquare(NSL,Endpoint=False)\n else:\n NSL=len(Pts)\n \n SL=[]\n SL_phy=[]\n TOF_phy=[]\n \n for i in range(len(Pts)):\n temp=self.Trace1SL(Pts[i])\n SL.append(temp[2])\n SL_phy.append(temp[3])\n TOF_phy.append(temp[5])\n \n #SL_phy=self.RotateSL(SL_phy)\n #SL_phy=self.TranslateSL(SL_phy)\n \n fig, axs = plt.subplots(ncols=2)\n \n ax=axs[0]\n ax.plot(*np.asarray(Bound_vert).T,lw=3,color='red')\n for i in range(len(Pts)):\n ax.plot(*np.asarray(SL[i]).T,lw=1,marker='o',markersize=1,color='blue')\n ax.set_ylim(bottom=0)\n ax.set_aspect('equal')\n ax.set_title(r'Transformed Space ($\\alpha,\\beta$)')\n \n ax=axs[1]\n ax.plot(*np.asarray(Bound_vert_phy).T,lw=3,color='red')\n for i in range(len(Pts)):\n ax.plot(*np.asarray(SL_phy[i]).T,lw=1,marker='o',markersize=1,color='blue')\n ax.set_ylim(bottom=0)\n ax.set_aspect('equal')\n ax.set_title(r'Physical Space ($x,y$)')\n\n fig.tight_layout()\n plt.show()\n return SL_phy,TOF_phy",
"def light_source_directions():\n L = np.array([[-0.06059872, -0.44839055, 0.8917812],\n [-0.05939919, -0.33739538, 0.93948714],\n [-0.05710194, -0.21230722, 0.97553319],\n [-0.05360061, -0.07800089, 0.99551134],\n [-0.04919816, 0.05869781, 0.99706274],\n [-0.04399823, 0.19019233, 0.98076044],\n [-0.03839991, 0.31049925, 0.9497977],\n [-0.03280081, 0.41611025, 0.90872238],\n [-0.18449839, -0.43989616, 0.87889232],\n [-0.18870114, -0.32950199, 0.92510557],\n [-0.1901994, -0.20549935, 0.95999698],\n [-0.18849605, -0.07269848, 0.97937948],\n [-0.18329657, 0.06229884, 0.98108166],\n [-0.17500445, 0.19220488, 0.96562453],\n [-0.16449474, 0.31129005, 0.93597008],\n [-0.15270716, 0.4160195, 0.89644202],\n [-0.30139786, -0.42509698, 0.85349393],\n [-0.31020115, -0.31660118, 0.89640333],\n [-0.31489186, -0.19549495, 0.92877599],\n [-0.31450962, -0.06640203, 0.94692897],\n [-0.30880699, 0.06470146, 0.94892147],\n [-0.2981084, 0.19100538, 0.93522635],\n [-0.28359251, 0.30729189, 0.90837601],\n [-0.26670649, 0.41020998, 0.87212122],\n [-0.40709586, -0.40559588, 0.81839168],\n [-0.41919869, -0.29999906, 0.85689732],\n [-0.42618633, -0.18329412, 0.88587159],\n [-0.42691512, -0.05950211, 0.90233197],\n [-0.42090385, 0.0659006, 0.90470827],\n [-0.40860354, 0.18720162, 0.89330773],\n [-0.39141794, 0.29941372, 0.87013988],\n [-0.3707838, 0.39958255, 0.83836338],\n [-0.499596, -0.38319693, 0.77689378],\n [-0.51360334, -0.28130183, 0.81060526],\n [-0.52190667, -0.16990217, 0.83591069],\n [-0.52326874, -0.05249686, 0.85054918],\n [-0.51720021, 0.06620003, 0.85330035],\n [-0.50428312, 0.18139393, 0.84427174],\n [-0.48561334, 0.28870793, 0.82512267],\n [-0.46289771, 0.38549809, 0.79819605],\n [-0.57853599, -0.35932235, 0.73224555],\n [-0.59329349, -0.26189713, 0.76119165],\n [-0.60202327, -0.15630604, 0.78303027],\n [-0.6037003, -0.04570002, 0.7959004],\n [-0.59781529, 0.06590169, 0.79892043],\n [-0.58486953, 0.17439091, 0.79215873],\n [-0.56588359, 0.27639198, 0.77677747],\n [-0.54241965, 0.36921337, 0.75462733],\n [0.05220076, -0.43870637, 0.89711304],\n [0.05199786, -0.33138635, 0.9420612],\n [0.05109826, -0.20999284, 0.97636672],\n [0.04919919, -0.07869871, 0.99568366],\n [0.04640163, 0.05630197, 0.99733494],\n [0.04279892, 0.18779527, 0.98127529],\n [0.03870043, 0.30950341, 0.95011048],\n [0.03440055, 0.41730662, 0.90811441],\n [0.17290651, -0.43181626, 0.88523333],\n [0.17839998, -0.32509996, 0.92869988],\n [0.18160174, -0.20480196, 0.96180921],\n [0.18200745, -0.07490306, 0.98044012],\n [0.17919505, 0.05849838, 0.98207285],\n [0.17329685, 0.18839658, 0.96668244],\n [0.1649036, 0.30880674, 0.93672045],\n [0.1549931, 0.41578148, 0.89616009],\n [0.28720483, -0.41910705, 0.8613145],\n [0.29740177, -0.31410186, 0.90160535],\n [0.30420604, -0.1965039, 0.9321185],\n [0.30640529, -0.07010121, 0.94931639],\n [0.30361153, 0.05950226, 0.95093613],\n [0.29588748, 0.18589214, 0.93696036],\n [0.28409783, 0.30349768, 0.90949304],\n [0.26939905, 0.40849857, 0.87209694],\n [0.39120402, -0.40190413, 0.8279085],\n [0.40481085, -0.29960803, 0.86392315],\n [0.41411685, -0.18590756, 0.89103626],\n [0.41769724, -0.06449957, 0.906294],\n [0.41498764, 0.05959822, 0.90787296],\n [0.40607977, 0.18089099, 0.89575537],\n [0.39179226, 0.29439419, 0.87168279],\n [0.37379609, 0.39649585, 0.83849122],\n [0.48278794, -0.38169046, 0.78818031],\n [0.49848546, -0.28279175, 0.8194761],\n [0.50918069, -0.1740934, 0.84286803],\n [0.51360856, -0.05870098, 0.85601427],\n [0.51097962, 0.05899765, 0.8575658],\n [0.50151639, 0.17420569, 0.84742769],\n [0.48600297, 0.28260173, 0.82700506],\n [0.46600106, 0.38110087, 0.79850181],\n [0.56150442, -0.35990283, 0.74510586],\n [0.57807114, -0.26498677, 0.77176147],\n [0.58933134, -0.1617086, 0.7915421],\n [0.59407609, -0.05289787, 0.80266769],\n [0.59157958, 0.057798, 0.80417224],\n [0.58198189, 0.16649482, 0.79597523],\n [0.56620006, 0.26940003, 0.77900008],\n [0.54551481, 0.36380988, 0.7550205]], dtype=float)\n return L",
"def switch_to_offsets(self):\n\n cdelt1, cdelt2 = proj_plane_pixel_scales(self.wcs)\n ctype = self.wcs.wcs.ctype\n crpix = self.wcs.wcs_world2pix(self.ra, self.dec, 1)\n\n # Create new WCS as Skymapper does weird things with CDELT\n self.wcs = WCS(naxis=2)\n\n # Centre pixel is offset by 1 due to array indexing convention\n # self.wcs.wcs.crpix = [(len(self.data)) / 2 + 1,\n # (len(self.data)) / 2 + 1]\n self.wcs.wcs.crpix = [crpix[0], crpix[1]]\n self.wcs.wcs.crval = [0, 0]\n self.wcs.wcs.cdelt = [-cdelt1, cdelt2]\n self.wcs.wcs.ctype = ctype\n\n if 'radio' in dir(self):\n r_crpix = self.radio.wcs.wcs_world2pix(self.ra, self.dec, 1)\n # self.radio.wcs.wcs.crpix = [(len(self.radio.data)) / 2 + 1,\n # (len(self.radio.data)) / 2 + 1]\n self.radio.wcs.wcs.crpix = [r_crpix[0], r_crpix[1]]\n self.radio.wcs.wcs.crval = [0, 0]\n\n self.offsets = True",
"def monolayer_4band():\n a = 0.222\n ax = 0.438\n ay = 0.332\n theta = 96.79 * (pi / 180)\n phi = 103.69 * (pi / 180)\n\n lat = pb.Lattice(a1=[ax, 0], a2=[0, ay])\n\n h = a * sin(phi - pi / 2)\n s = 0.5 * ax - a * cos(theta / 2)\n lat.add_sublattices(\n ('A', [0, 0, h], 0),\n ('B', [s, 0, 0], 0),\n ('C', [ax/2, ay/2, 0], 0),\n ('D', [ax/2 + s, ay/2, h], 0)\n )\n\n lat.register_hopping_energies({\n 't1': -1.22,\n 't2': 3.665,\n 't3': -0.205,\n 't4': -0.105,\n 't5': -0.055\n })\n\n lat.add_hoppings(\n # t1\n ([-1, 0], 'A', 'D', 't1'),\n ([-1, -1], 'A', 'D', 't1'),\n ([ 0, 0], 'B', 'C', 't1'),\n ([ 0, -1], 'B', 'C', 't1'),\n # t2\n ([ 0, 0], 'A', 'B', 't2'),\n ([ 0, 0], 'C', 'D', 't2'),\n # t3\n ([ 0, 0], 'A', 'D', 't3'),\n ([ 0, -1], 'A', 'D', 't3'),\n ([ 1, 1], 'C', 'B', 't3'),\n ([ 1, 0], 'C', 'B', 't3'),\n # t4\n ([ 0, 0], 'A', 'C', 't4'),\n ([ 0, -1], 'A', 'C', 't4'),\n ([-1, 0], 'A', 'C', 't4'),\n ([-1, -1], 'A', 'C', 't4'),\n ([ 0, 0], 'B', 'D', 't4'),\n ([ 0, -1], 'B', 'D', 't4'),\n ([-1, 0], 'B', 'D', 't4'),\n ([-1, -1], 'B', 'D', 't4'),\n # t5\n ([-1, 0], 'A', 'B', 't5'),\n ([ 0, 1], 'A', 'B', 't5'),\n ([ 0, -1], 'A', 'B', 't5'),\n ([-1, 0], 'C', 'D', 't5'),\n ([ 0, 1], 'C', 'D', 't5'),\n ([ 0, -1], 'C', 'D', 't5'),\n )\n\n return lat",
"def createMasks(inputFolderName=\"Masks_From_VolumeTracing\", outputFolderName=\"Masks_From_VolumeTracing_Lines_From_VolumeTracing\"):\n\n root, df = loader.dataModules()\n\n PATH_TO_RAW_FRAMES_PARENT_DIR = os.path.join(root, inputFolderName) # frames path\n PATH_TO_OUTPUT_DIR = os.path.join(root, outputFolderName) # frames path\n \n os.makedirs(PATH_TO_OUTPUT_DIR, exist_ok=True) # creates ground truth mask parent directory\n\n print(\"Drawing lines on each mask from VolumeTracings\")\n for i in tqdm(range(len(df))): # iterates through each row of data frame\n videoName = df.iloc[i, 0] # name of video\n frameNumber = df.iloc[i, 1] # timing for clip\n\n frameName = videoName + \".avi_\" + str(frameNumber) + \".png\" # concatenate video name with frame number as file name\n PATH_TO_RAW_FRAME = os.path.join(PATH_TO_RAW_FRAMES_PARENT_DIR, frameName)\n PATH_TO_OUTPUT_RAW_FRAME = os.path.join(PATH_TO_OUTPUT_DIR, frameName)\n\n image = cv2.imread(\"/Users/ishan/Downloads/Untraced.png\") # read in the image from the specified frame path\n if os.path.exists(PATH_TO_RAW_FRAME) and videoName == \"0X16A6CEDE7F61E211\" and int(frameNumber) == 131: # checks if frame exists\n x1 = list(literal_eval(df.iloc[i, 2])) # x1 coords\n y1 = list(literal_eval(df.iloc[i, 3])) # y1 coords\n x2 = list(literal_eval(df.iloc[i, 4])) # x2 coords\n y2 = list(literal_eval(df.iloc[i, 5])) # y2 coords\n\n for coord in range(len(x1)): # iterate through each coordinate\n if coord is 0: # gets the perpendicular/long line\n longLine1stCoords = (int(x1[coord]), int(y1[coord]))\n longLine2ndCoords = (int(x2[coord]), int(y2[coord]))\n else: # draws the parallel lines\n cv2.line(image, (int(x1[coord]), int(y1[coord])), (int(x2[coord]), int(y2[coord])), (255, 255, 255), 1)\n \n cv2.line(image, longLine1stCoords, longLine2ndCoords, (255, 255, 255), 1) # Drawing the perpendicular/long line in different color\n\n cv2.imwrite(\"/Users/ishan/Downloads/Traced.png\", image)",
"def slope_lines(self,image):\r\n img_copy = image.copy()\r\n \r\n left_lines,right_lines=self.makeLeftRightline()\r\n left_line = np.mean(left_lines, axis=0)\r\n right_line = np.mean(right_lines, axis=0)\r\n\r\n poly_vertices = []\r\n order = [0,1,3,2]\r\n\r\n for slope, intercept in [left_line, right_line]:\r\n #getting height of image in y1\r\n rows, cols = image.shape[:2]\r\n y1= int(rows) \r\n #taking y2 upto 68% of y1\r\n y2= int(rows*0.68) \r\n #y=mx +c can be written as x=(y-c)/m\r\n x1=int((y1-intercept)/slope)\r\n x2=int((y2-intercept)/slope)\r\n poly_vertices.append((x1, y1))\r\n poly_vertices.append((x2, y2))\r\n\r\n # DRAWING LINES AND PATH ON THE IMAGE\r\n thickness_of_line=9\r\n color_of_line=[20, 255, 20]\r\n lines=np.array([[[x1,y1,x2,y2]]])\r\n for i in lines:\r\n for x1,y1,x2,y2 in i:\r\n cv2.line(img_copy, (x1, y1), (x2, y2), color_of_line, thickness_of_line)\r\n poly_vertices = [poly_vertices[i] for i in order]\r\n #filling polygon color\r\n cv2.fillPoly(img_copy, pts = np.array([poly_vertices],'int32'), color = (200,20,20))\r\n final_out=cv2.addWeighted(image,0.7,img_copy,0.4,0.)\r\n return final_out",
"def fix_straight_lines(self):\r\n\r\n # Creates a vertical 1x5 kernel and applies binary closing based on that kernel\r\n vertical_kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (1, 5))\r\n self.thresh_invert = cv2.morphologyEx(self.thresh_invert, cv2.MORPH_CLOSE, vertical_kernel, iterations=9)\r\n\r\n # Creates a horizontal 5x1 kernel and applies binary closing based on that kernel\r\n horizontal_kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (5, 1))\r\n self.thresh_invert = cv2.morphologyEx(self.thresh_invert, cv2.MORPH_CLOSE, horizontal_kernel, iterations=4)",
"def fillLane(self, src):\n lines = cv2.HoughLinesP(src, 1, np.pi/180, 1)\n try:\n for i in range(lines.shape[0]): #FIXME: \"'NoneType' object has no attribute 'shape'\" possible\n l = lines[i]\n cv2.line(src, (l[0][0], l[0][1]), (l[0][2], l[0][3]), 255, 3, cv2.LINE_AA)\n \n except:\n pass",
"def lhco_line(self):\n if not self.check_def(['eta','phi','pt','mass','pid']): \n sys.exit('Particle error: some attribute not defined')\n\n jet=[1,2,3,4,5,6,21]\n inv_list=[12,14,16,18,1000022,1000023,1000024,1000025,1000035]\n\n #define pid-> type\n pid_to_type={11:1,-11:1,13:2,-13:2,15:3,-15:3,22:0}\n for data in jet:\n pid_to_type[data]=4\n pid_to_type[-data]=4\n for data in inv_list:\n pid_to_type[data]=6\n pid_to_type[-data]=6\n\n\n \n type=''\n for key in pid_to_type.keys():\n if self.pid==key:\n type=pid_to_type[key]\n break\n \n if type=='':\n print 'Warning unknown type'\n return ''\n\n text =' '+str(type) #type LHCO\n text+=' '+str(self.eta) #ETA\n text+=' '+str(self.phi) #PHI\n text+=' '+str(self.pt) #PT\n text+=' '+str(self.mass) #JMASS\n if self.pid in [11,13]: #NTRK\n text+=' -1' \n else:\n text+=' 1'\n if self.pid in [-5,5]: #BTAG\n text+=' 2'\n else:\n text+=' 0'\n text+=' 0' #HAD/EM\n text+=' 0' #DUMMY 1\n text+=' 0' #DUMMY 2\n \n return text",
"def illustrate_driving_lane_with_topdownview(image, left_line, right_line):\n\n rows, cols = image.shape[:2]\n window_img = np.zeros_like(image)\n\n window_margin = 56\n left_plotx, right_plotx = left_line, right_line\n ploty = left_line\n lane_width = right_line[0] - left_line[0]\n lane_center = (right_line[0] + left_line[0]) / 2\n lane_offset = cols / 2 - (2*left_line[0] + lane_width) / 2\n car_offset = int(lane_center - 360)\n \n # Generate a polygon to illustrate the search window area\n # And recast the x and y points into usable format for cv2.fillPoly()\n left_line_window1 = np.array([np.transpose(np.vstack([right_plotx + lane_offset - lane_width - window_margin / 4, ploty]))])\n left_line_window2 = np.array([np.flipud(np.transpose(np.vstack([right_plotx + lane_offset - lane_width+ window_margin / 4, ploty])))])\n left_line_pts = np.hstack((left_line_window1, left_line_window2))\n right_line_window1 = np.array([np.transpose(np.vstack([right_plotx + lane_offset - window_margin / 4, ploty]))])\n right_line_window2 = np.array([np.flipud(np.transpose(np.vstack([right_plotx + lane_offset + window_margin / 4, ploty])))])\n right_line_pts = np.hstack((right_line_window1, right_line_window2))\n\n # Draw the lane onto the warped blank image\n cv.fillPoly(window_img, np.int_([left_line_pts]), (140, 0, 170))\n cv.fillPoly(window_img, np.int_([right_line_pts]), (140, 0, 170))\n\n # Recast the x and y points into usable format for cv2.fillPoly()\n pts_left = np.array([np.transpose(np.vstack([right_plotx + lane_offset - lane_width + window_margin / 4, ploty]))])\n pts_right = np.array([np.flipud(np.transpose(np.vstack([right_plotx + lane_offset - window_margin / 4, ploty])))])\n pts = np.hstack((pts_left, pts_right))\n\n # Draw the lane onto the warped blank image\n cv.fillPoly(window_img, np.int_([pts]), (0, 160, 0))\n\n #window_img[10:133,300:360] = img\n road_map = Image.new('RGBA', image.shape[:2], (0, 0, 0, 0))\n window_img = Image.fromarray(window_img)\n road_map.paste(window_img, (0, 0))\n road_map = np.array(road_map)\n road_map = cv.resize(road_map, (95, 95))\n road_map = cv.cvtColor(road_map, cv.COLOR_BGRA2BGR)\n\n return road_map",
"def seven_punishment(self):\n if self.current_draw_punishment == 1:\n self.current_draw_punishment = 2\n else:\n self.current_draw_punishment = self.current_draw_punishment + 2",
"def add_polyline(self, layer_to_use,poly,open):\n if type(poly) is not list:\n toplot = [poly]\n else:\n toplot = poly\n\n for y in toplot:\n\n polyline = self.msp.add_polyline2d(\n points=[],\n dxfattribs={'layer': layer_to_use['name']})\n\n if open==True:\n polyline.close(False)\n else:\n polyline.close(True)\n y = np.round(100*y)/100\n if layer_to_use['inversion']==0:\n polyline.append_vertices(y)\n else:\n polyline.append_vertices(-y)",
"def house ():\n\n poly (3,300,\"red\")\n penup()\n setposition(0,-300)\n pendown()\n poly (4,300,\"brown\")\n penup()\n setposition(100,-300)\n pendown()\n poly(4,100,\"green\") \n\n return None",
"def line():\n tt.left(90)\n tt.down()\n tt.forward(50)\n tt.up()\n tt.right(90)\n tt.forward(10)\n tt.right(90)\n tt.forward(50)\n tt.left(90)",
"def setSplineMode(order=3,npts=200):\n dislin.splmod(order,npts)",
"def expandWithoutMutex(self, previousLayer):\n previousLayerProposition = previousLayer.getPropositionLayer()\n \"*** YOUR CODE HERE ***\"",
"def placement_automatic(args):\n clarity_epp.placement.plate.copy_layout(lims, args.process_id)",
"def layer_offsets(self):\n ...",
"def calculate_module_offsets(self):\n \n # These aren't for instantiating, but we use them to get the dimensions\n self.poly_contact_offset = vector(0.5*contact.poly.width,0.5*contact.poly.height)\n\n # M1/M2 routing pitch is based on contacted pitch\n self.m1_pitch = max(contact.m1m2.width,contact.m1m2.height) + max(self.m1_space,self.m2_space)\n self.m2_pitch = max(contact.m2m3.width,contact.m2m3.height) + max(self.m2_space,self.m3_space)\n \n # This corrects the offset pitch difference between M2 and M1\n self.offset_fix = vector(0.5*(self.m2_width-self.m1_width),0)\n\n # delay chain will be rotated 90, so move it over a width\n # we move it up a inv height just for some routing room\n self.rbl_inv_offset = vector(self.delay_chain.height, self.inv.width)\n # access TX goes right on top of inverter, leave space for an inverter which is\n # about the same as a TX. We'll need to add rails though.\n self.access_tx_offset = vector(1.25*self.inv.height,self.rbl_inv_offset.y) + vector(0,2.5*self.inv.width)\n self.delay_chain_offset = self.rbl_inv_offset + vector(0,4*self.inv.width)\n\n # Replica bitline and such are not rotated, but they must be placed far enough\n # away from the delay chain/inverter with space for three M2 tracks\n self.bitcell_offset = self.rbl_inv_offset + vector(2*self.m2_pitch, 0) + vector(0, self.bitcell.height + self.inv.width)\n\n self.rbl_offset = self.bitcell_offset\n\n \n self.height = self.rbl_offset.y + self.rbl.height + self.m2_pitch\n self.width = self.rbl_offset.x + self.bitcell.width",
"def testTinttsysMapLCSp(self):\n self._runTest('tinttsys', True, [1,3,5,7,9,11,13,15], 'linear,cspline',self.spwmap)",
"def horizontal_line(t,n, h):\n lt(t)\n pu(t)\n fd(t,h)\n pd(t)\n lt(t)\n fd(t,n)\n rt(t)",
"def position_line(self, prc=50.0):\n rtc = self._get_fibonnaci_level(prc)[0]\n x_pos = [self.roi.pos()[0], rtc]\n y_pos = [self.roi.pos()[0] + self.roi.size()[0], rtc]\n return x_pos, y_pos",
"def stitch(KPS1, KPS2, H1, H2, match): #---- stich image to previous one\r\n #--- projection image1 from plane to cylindrical ---\r\n total = np.minimum(match.shape[0],100); # total pairing number\r\n bin1 = match[0:total,0].astype(int); # feature no at image 1\r\n R1 = KPS1.keyz[bin1, 0]; # keypoint Y at image 1\r\n C1 = KPS1.keyz[bin1, 1]; # keypoint X at image 1\r\n V1, U1 = pano_tools.project_p2c_points(R1, C1, H1);\r\n #--- image 2 ---\r\n bin2 = match[0:total,1].astype(int); # feature no at image 2\r\n R2 = KPS2.keyz[bin2, 0]; # keypoint Y at image 2\r\n C2 = KPS2.keyz[bin2, 1]; # keypoint X at image 2\r\n Rc2 = H2[0]/2; Rp2= R2 - Rc2; \r\n Cc2 = H2[1]/2; Cp2= C2 - Cc2;\r\n #--- --- \r\n # {phi1,S1,TU1,TV1} = M*M matrix: which is derived by chosen 2 pairs \r\n # {phi0,S0,TU0,TV0} = scalar: which is initial guess by removing outlier\r\n # \r\n phi1,S1,TU1,TV1= pano_tools.derive_p2c_formula(U1,V1,Cp2,Rp2);\r\n seq,phi0,S0,TU0,TV0 = pano_tools.remove_ill_matched_pair(phi1,S1,TU1,TV1); \r\n #--- linear regression [not necessary] ---\r\n # U1X = U1[seq]; C2X = C2[seq]; V1X = V1[seq]; R2X = R2[seq]; \r\n # phi0,S0,TU0,TV0,Err= pano_tools.linear_regression(V1X,U1X,R2X,C2X, phi0,S0,TU0,TV0,H2)\r\n H2[3]= phi0; H2[4]= S0; H2[5]= TV0; H2[6]= TU0;",
"def test_optical_flow_warp_flyingchairs(self):\n self.single_warp_test_helper('pwcnet/warp/test_data/06530_flow.flo', 'pwcnet/warp/test_data/06530_img1.ppm',\n 'pwcnet/warp/test_data/06530_img2.ppm', 0.031)",
"def draw_lane_lines(image, lines, color=[0, 0, 255], thickness=20):\n # Make a separate image to draw lines and combine with the orignal later\n line_image = np.zeros_like(image)\n if lines is not None:\n for line in lines:\n if(len(line) > 0):\n x1, y1, x2, y2 = line.reshape(4)\n cv2.line(line_image, (x1, y1), (x2, y2), color, thickness)\n return line_image",
"def setup_layout_constants(self):\n # determines the spacing between the edge and nmos (rail to active\n # metal or poly_to_poly spacing)\n half_gate_to_gate = 0.5 * (drc[\"poly_to_poly\"] - drc[\"minwidth_metal1\"])\n edge_to_nmos = max(drc[\"metal1_to_metal1\"] - self.nmos.active_contact_positions[0].y,\n half_gate_to_gate - self.nmos.poly_positions[0].y)\n\n # determine the position of the first transistor from the left\n self.nmos_position1 = vector(0,\n 0.5 * drc[\"minwidth_metal1\"] + edge_to_nmos)\n offset = self.nmos_position1 + vector(0,self.nmos.height)\n\n x = vector(self.nmos.active_width - self.nmos.active_contact.width, 0)\n self.nmos_position2 = x + self.nmos_position1.scale(0,1)\n\n # determines the spacing between the edge and pmos\n edge_to_pmos = max(drc[\"metal1_to_metal1\"] - self.pmos.active_contact_positions[0].y,\n half_gate_to_gate - self.pmos.poly_positions[0].y)\n self.pmos_position1 = vector(0,\n self.height - 0.5 * drc[\"minwidth_metal1\"]\n - edge_to_pmos - self.pmos.height)\n self.pmos_position2 = self.pmos_position1 + vector(self.pmos.width,0)\n\n self.well_width = max(self.pmos_position2.x + self.pmos.active_position.x\n + self.pmos.active_width\n + drc[\"active_to_body_active\"] + self.nwell_contact.width \n + drc[\"well_enclosure_active\"],\n self.nmos_position2.x + self.nmos.active_position.x \n + self.nmos.active_width \n + drc[\"active_to_body_active\"] + drc[\"well_enclosure_active\"])\n self.width = self.well_width",
"def form_low_clipping_plane(fx: float, img_height: int) -> np.ndarray:\n low_plane = np.array([0.0, -fx, img_height / 2.0, 0.0])\n low_plane /= np.linalg.norm(low_plane)\n return low_plane",
"def process(self):\n lines = cv.HoughLinesP(self.input_image, 1, np.pi/180, 100, 10, 150, 400)\n for x1,y1,x2,y2 in lines[0]:\n cv.line(self.output_image,(x1,y1),(x2,y2),(0,255,0),2)\n cv.line(self.output_image, (0,0), (100, 100), (0, 0, 255), 4)\n return self.output_image",
"def direction(self):\n import pylab\n i = 0\n j = 0\n vals = []\n vects = []\n kpx = self.keypoints.x\n kpy = self.keypoints.y\n sigma = self.keypoints.sigma\n img = self.raw\n pylab.figure()\n pylab.imshow(img, interpolation='nearest')\n\n for y, x, s in zip(kpy, kpx, sigma):\n s_patch = numpy.trunc(s * 2)\n\n if s_patch % 2 == 0 :\n s_patch += 1\n\n if s_patch < 3 : s_patch = 3\n\n if (x > s_patch / 2 and x < img.shape[1] - s_patch / 2 - 1 and y > s_patch / 2 and y < img.shape[0] - s_patch / 2):\n\n patch = img[y - (s_patch - 1) / 2:y + (s_patch - 1) / 2 + 1, x - (s_patch - 1) / 2:x + (s_patch - 1) / 2 + 1]\n x_patch = numpy.arange(s_patch)\n Gx = numpy.exp(-4 * numpy.log(2) * (x_patch - numpy.median(x_patch)) ** 2 / s)\n Gy = Gx[:, numpy.newaxis]\n dGx = -Gx * 4 * numpy.log(2) / s * 2 * (x_patch - numpy.median(x_patch))\n dGy = dGx[:, numpy.newaxis]\n d2Gx = -8 * numpy.log(2) / s * ((x_patch - numpy.median(x_patch)) * dGx + Gx)\n d2Gy = d2Gx[:, numpy.newaxis]\n\n Hxx = d2Gx * Gy\n Hyy = d2Gy * Gx\n Hxy = dGx * dGy\n\n d2x = (Hxx.ravel() * patch.ravel()).sum()\n d2y = (Hyy.ravel() * patch.ravel()).sum()\n dxy = (Hxy.ravel() * patch.ravel()).sum()\n H = numpy.array([[d2y, dxy], [dxy, d2x]])\n val, vect = numpy.linalg.eig(H)\n\n# print 'new point'\n# print x, y\n# print val\n# print vect\n# print numpy.dot(vect[0],vect[1])\n e = numpy.abs(val[0] - val[1]) / numpy.abs(val[0] + val[1])\n j += 1\n# print j\n# print e\n if numpy.abs(val[1]) < numpy.abs(val[0]): # reorganisation des valeurs propres et vecteurs propres\n val[0],val[1] = val[1],val[0]\n vect = vect[-1::-1,:]\n\n\n pylab.annotate(\"\", xy=(x + vect[0][0] * val[0], y + vect[0][1] * val[0]), xytext=(x, y),\n arrowprops=dict(facecolor='red', shrink=0.05),)\n\n pylab.annotate(\"\", xy=(x + vect[1][0] * val[1], y + vect[1][1] * val[1]), xytext=(x, y),\n arrowprops=dict(facecolor='red', shrink=0.05),)\n pylab.plot(x, y, 'og')\n vals.append(val)\n vects.append(vect)\n return vals, vects"
] | [
"0.59270585",
"0.58033264",
"0.5608836",
"0.55544627",
"0.5367089",
"0.53572994",
"0.53372705",
"0.5273966",
"0.52700067",
"0.5266239",
"0.52600336",
"0.5244795",
"0.51854324",
"0.5150837",
"0.5133585",
"0.51236475",
"0.51020104",
"0.50978017",
"0.5095993",
"0.5059783",
"0.5050122",
"0.50479436",
"0.50455284",
"0.50362366",
"0.50353545",
"0.50268257",
"0.50054336",
"0.500227",
"0.4981197",
"0.49799535",
"0.49752378",
"0.49683523",
"0.495812",
"0.495812",
"0.4955107",
"0.4950393",
"0.49318862",
"0.49244827",
"0.49108306",
"0.49038562",
"0.49027953",
"0.48995534",
"0.489726",
"0.4897183",
"0.48876628",
"0.48846924",
"0.48819488",
"0.48796973",
"0.48702037",
"0.48652944",
"0.4862178",
"0.4862178",
"0.48591495",
"0.48552492",
"0.48517168",
"0.48375675",
"0.48359835",
"0.48342887",
"0.48327395",
"0.48241132",
"0.48109734",
"0.48059902",
"0.48055005",
"0.47975954",
"0.4795234",
"0.47903538",
"0.47894815",
"0.478645",
"0.4784164",
"0.47818163",
"0.4779336",
"0.47749218",
"0.47741705",
"0.47725934",
"0.47678947",
"0.47665143",
"0.47631666",
"0.47616065",
"0.4759623",
"0.4756092",
"0.47536477",
"0.47535604",
"0.47517377",
"0.47472927",
"0.47446656",
"0.47427198",
"0.47352847",
"0.47327268",
"0.47324774",
"0.4730493",
"0.47294718",
"0.47284082",
"0.47259277",
"0.47252",
"0.47240287",
"0.4719153",
"0.47178957",
"0.47159818",
"0.4714014",
"0.47125843"
] | 0.6246182 | 0 |
Writes linac phasing lines to a Bmad file. Requires epics (or proxy object). | def write_bmad_linac_phasing_lines(filePath='linac_settings.bmad', epics=None, verbose=False):
lines = bmad_linac_phasing_lines(epics)
with open(filePath, 'w') as f:
for l in lines:
f.write(l+'\n')
if verbose:
print('Written:', filePath) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def write_tao_BC_and_LEM_lines(filePath='LEM_settings.tao', epics=None, verbose=False):\n lines = tao_BC_and_LEM_lines(epics)\n with open(filePath, 'w') as f:\n for l in lines:\n f.write(l+'\\n')\n if verbose:\n print('Written:', filePath)\n\n \n \n return lines",
"def bmad_linac_phasing_lines(epics):\n lines = [\n '! Linac overall phasing',\n 'O_L1[phase_deg] = 0 ! K21_1 sets this directly. This is a delta on top of that.', \n 'O_L2[phase_deg] = '+str(epics.caget('SIOC:SYS0:ML00:CALC204')),\n 'O_L3[phase_deg] = '+str(epics.caget('SIOC:SYS0:ML00:AO499'))\n ]\n return lines",
"def tao_BC_and_LEM_lines(epics):\n bc1_e0=epics.caget('SIOC:SYS0:ML00:AO483')*1e6\n bc2_e0=epics.caget('SIOC:SYS0:ML00:AO489')*1e9\n l3_e0 =epics.caget('SIOC:SYS0:ML00:AO500')*1e9\n \n # Charge in LTU\n q_after_horn_cutting = epics.caget('SIOC:SYS0:ML00:CALC252')*1e-12 # pC -> C\n bc1_offset=epics.caget('BMLN:LI21:235:MOTR')*1e-3\n bc2_offset=epics.caget('BMLN:LI24:805:MOTR')*1e-3\n \n bc1_current=epics.caget('SIOC:SYS0:ML00:AO485')\n bc2_current=epics.caget('SIOC:SYS0:ML00:AO195')\n \n # Catch bad settings\n if bc1_current==0:\n print('Warning: BC1 current is zero!')\n bc1_sigma_z = 0\n else:\n # Assumes parabolic distribution\n bc1_sigma_z = q_after_horn_cutting*299792458 / sqrt(10) / bc1_current\n\n if bc2_current==0:\n print('Warning: BC1 current is zero!')\n bc2_sigma_z = 0\n else:\n # Assumes Gaussian distribution\n bc2_sigma_z = q_after_horn_cutting*299792458 / sqrt(12) / bc2_current \n \n lines = []\n lines.append('set dat BC1.energy[1]|meas = '+str(bc1_e0))\n lines.append('set dat BC2.energy[1]|meas = '+str(bc2_e0))\n lines.append('set dat L3.energy[2]|meas = '+str(l3_e0))\n lines.append('set dat BC1.offset[1]|meas = '+str(bc1_offset))\n lines.append('set dat BC2.offset[1]|meas = '+str(bc2_offset))\n \n lines.append(f'! Charge after horn cutting: {q_after_horn_cutting*1e12:10.4} pC')\n lines.append(f'! For BC1 current {bc1_current} A')\n lines.append('set dat BC1.beam[1]|meas = '+str( bc1_sigma_z))\n lines.append(f'! For BC2 current {bc2_current} A')\n lines.append('set dat BC2.beam[1]|meas = '+str( bc2_sigma_z)) \n\n return lines",
"def writelines(self, lines):\n for line in lines:\n self.write(line)",
"def writelines(self, seq):\n for line in seq:\n self.write(line)",
"def writelines(self, seq: list[str]) -> None:\n ...",
"def write_output(self,content):\n text=\"\"\"# typ eta phi pt jmass ntrk btag had/em dummy dummy\\n\"\"\"\n self.output.writelines(text)\n text=\"0 \"+str(self.nb_data)+\" \"+str(len(content))+\"\\n\"\n self.output.writelines(text)\n\n i=1\n for particle in content:\n text=str(i)+' '+particle.lhco_line()+'\\n'\n self.output.writelines(text)\n i+=1",
"def _write_to_file(self):\n with open(self.filename + \".asm\", \"w+\") as file:\n file.writelines(\n [\"\\n\" + l if p != 0 else l for p, l in enumerate(self.lines)]\n )",
"async def writelines(self, lines):\n # first check if the file is binary or not\n if 'b' in self._mode:\n raise APIException(\n \"writelines on a binary file is not permitted: {}\".format(\n self._uri)\n )\n # write all but the last line with a line break\n for l in lines:\n await self.write((l+\"\\n\").encode('utf-8'))\n return True",
"def write_lines(list_of_lines, file):\r\n for i in range(0, len(list_of_lines)):\r\n file.write(list_of_lines[i] + b\"\\n\")",
"def writelines(lines, filename, encoding='utf-8', mode='wb'):\r\n return write(os.linesep.join(lines), filename, encoding, mode)",
"def write2lines(myItrb, out_fn):\n with open(out_fn, 'w') as writer:\n for item in myItrb:\n writer.write(str(item)+'\\n')",
"def _write_to_file(self):\n with open(self.filename + \".ir\", \"w+\") as file:\n file.writelines(\n [\"\\n\" + l if p != 0 else l for p, l in enumerate(self.lines)]\n )",
"def writeData(self, lines, fpath):\n with open(fpath, 'w') as f:\n for line in lines:\n print(line, file=f)",
"def write_to(channel, lines):\n for s in lines:\n channel.write(s)\n channel.write('\\n')",
"def write(self,aFile,lines):\n # Not necessary (comment older than 021 - no idea what does that mean)\n # Maybe meant to be obsoleted by writeLine and writeLog\n self.debug.printHeader()\n for line in lines:\n if not hasattr(line,'upper'): line=self.settings.pathStorage.composeURL(line)\n # Really poor way how differ between string and list\n # Should be rewriten. Lines could contain only array of strings (not array of arrays).\n aFile.write(line)\n aFile.write('\\n')",
"def lines_to_file(file_name: str, write_dir: str, lines: Sequence[str]):\n with open(os.path.join(write_dir, file_name), \"w\", encoding=\"utf-8\") as f:\n for l in lines:\n f.write(f\"{l}\\n\")",
"def write_lines(filename, lines, verbose=True):\n with open(filename, 'w', encoding=\"utf-8\") as fp:\n for line in lines:\n print(line, file=fp)\n if verbose:\n print(\"Done writing to file %s.\" % filename)",
"def write_lines_to_file(filename, lines):\n with open(filename, 'w') as fp:\n for line in lines:\n fp.write(\"%s\\n\" % line.strip('\\n'))",
"def write(afile, seqs): \n for s in seqs :\n writeseq(afile, s)",
"def sheetbend(exe, hklin, pdbin, pdbout, ncyc, logfile):\n\n mtz_labels = mtz_util.GetLabels(hklin)\n colin = \"{0},{1}\".format(mtz_labels.f, mtz_labels.sigf)\n\n cmd = [exe, \"--pdbin\", pdbin, \"--mtzin\", hklin, \"--pdbout\", pdbout, \"--colin-fo\", colin, \"-cycles\", str(ncyc), \"-resolution-by-cycle\", \"6,3\"]\n stdout = cexec(cmd)\n with open(logfile, \"w\") as f_out:\n f_out.write(stdout)",
"def print_to_file(list_of_lines, file_path):\r\n with open(file_path) as output_file:\r\n write_lines(list_of_lines, output_file)",
"def writeBlade(self):\n\n ofname = self.blade1_file ### note, assuming they're all the same\n ofh = open(ofname,'w')\n\n for line in self.lines_blade:\n ofh.write(line)\n ofh.close()",
"def writeMultipleFileLines(self, filePaths, liness): \n \n for i,filePath in enumerate(filePaths): \n self.writeSingleFileLines(filePath,liness[i])",
"def _write_endcy():\n return []",
"def writeEcMaps( self ):\n\n self.logger.info( 'writeEcMaps: START' )\n\n self.logger.info( 'writeEcMaps: insert file will be ecMapsInsert.psql' )\n\n ecMapsFile = self.openInsertFile( 'ecMapsInsert.psql' )\n\n self.logger.info( 'writeEcMaps: keggreader.getEcMaps(): START' )\n\n ecMaps = self.reader.getEcMaps()\n\n self.logger.info( 'writeEcMaps: keggreader.getEcMaps(): START' )\n\n for ec,mapNumbers in ecMaps.iteritems():\n ecId = self.importerEc.ecsInserted[ ec ]\n \n for mapNumber in mapNumbers:\n\n if mapNumber in self.importerPathway.pathwayMapsInserted:\n\n mapId = self.importerPathway.pathwayMapsInserted[ mapNumber ]\n\n #self.writeEcMapsFile( ecMapsFile, ecId, mapId )\n self.writeFile( ecMapsFile, 'ec_maps', [ str(ecId), str(mapId) ] )\n\n self.logger.info( 'writeEcMaps: DONE' )",
"def writeOut(self):\n # import time\n self.outHeader = self.srcHeader\n for line in self.outHeader:\n self.outFile.write(line + '\\n')\n # now = time.asctime(time.localtime(time.time()))\n # self.outFile.write('%% -- %s -- Written to new alog' % now)\n for time_s in sorted(self.outData):\n for sens in self.outData[time_s]:\n for meas in self.outData[time_s][sens]:\n valu = self.outData[time_s][sens][meas]\n msg_list = [str(time_s), meas, sens, str(valu)]\n line_string = reconstructLine(msg_list)\n self.outFile.write(line_string + '\\n')",
"def save_aligned_BFE(*args):\r\n\r\n try:\r\n global bambara_bfe\r\n global francais_bfe\r\n global english_bfe\r\n bambara_bfe.append(lines_bam[line_no_1])\r\n francais_bfe.append(lines_fr[line_no_2])\r\n english_bfe.append(lines_en[line_no_3])\r\n except ValueError:\r\n pass",
"def _file_writer(self, lines, filename):\n if self.MockRun:\n return\n\n if self.Verbose:\n print \"Writing file %s\" % filename\n\n updated_file = open(filename, 'w')\n updated_file.write(''.join(lines))\n updated_file.close()",
"def write(self, lines):\n strip_lines = lines.strip()\n if strip_lines:\n for line in strip_lines.split('\\n'):\n self._add(line.strip())",
"def writelines(self, iterable):\n for line in iterable:\n self._barf_if_closed()\n self.write(line)",
"def exportBulletFile(*argv):",
"def WriteOutput(self, rows, fileName, access='wb'):\n \n outputFile = open(fileName, access)\n try: \n outputFile.write(self.GetBanner())\n csv.writer(outputFile, dialect='excel-tab').writerows(rows)\n print 'Wrote secondary output to: %s' %(fileName) \n except IOError:\n print 'Error writing output to: %s' %(fileName) \n finally:\n outputFile.close()",
"def append_circuit(self, lines: Tuple[int, int], circuit: ACircuit, content: str) -> None:",
"def save_chain(self):\n pprint('saving to file named bc_file.txt')\n with open('ddos_bc_file.txt', 'w') as output:\n output.write(serializer.serialize(self.chain))",
"def fix_line_endings(fname, eol=b'\\n'):\n lines = [chomp(line) for line in open(fname, 'rb').readlines()]\n with open(fname, 'wb') as fp:\n for line in lines:\n fp.write(line + eol)",
"def __do_write(filestream, seq, header=None):\n if header is not None:\n filestream.write(header + '\\n') # double check newlines\n try:\n for line in chunks(seq, 70):\n filestream.write(line + '\\n')\n except Exception as e:\n print(e)",
"def write_bi_code_to_file(bi_code, file_path):\r\n with open(file_path, 'w') as file:\r\n for i in bi_code:\r\n file.write(f'{i}\\n')",
"def write_PIC(self, pdbid: str = ..., chainid: str = ..., s: str = ...) -> str:\n ...",
"def writelines(self, iterable):\n for line in iterable:\n for stream in self.streams:\n stream.write(line)",
"def to_file(c, path, seq_types=None):\n with open(path, \"w\") as f:\n f.write(circuit_to_verilog(c, seq_types))",
"def writeCADFile(self, filename):\n valid_filetypes = [\"brep\", \"bstl\", \"egads\", \"egg\", \"iges\", \"igs\", \"sens\", \"step\", \"stl\", \"stp\", \"tess\", \"grid\"]\n file_extension = filename.split(\".\")[-1]\n if file_extension.lower() not in valid_filetypes:\n raise OSError(\n \"CAD filename \"\n + filename\n + \" must have a valid exension. \"\n + \"Consult the EngineeringSketchPad docs for the DUMP function\"\n )\n if self.comm.rank == 0:\n modelCopy = self.espModel.Copy()\n n_branches, _, _ = modelCopy.Info()\n modelCopy.NewBrch(\n n_branches, modelCopy.GetCode(\"dump\"), \"<none>\", 0, filename, \"0\", \"0\", \"0\", \"\", \"\", \"\", \"\", \"\"\n )\n modelCopy.Build(0, 0)",
"def print_markers(filename, day_values, hour_values, behavior_markers, \\\n behavior_change, dm, day, hour, bm, pp):\n cf = config.Config()\n fullname = os.path.join(cf.datapath, filename)\n if pp == True:\n outfile = fullname + '.day'\n str_header = generate_day_header()\n np.savetxt(outfile, day_values, delimiter=',', header=str_header)\n outfile = fullname + '.hour'\n str_header = generate_hour_header()\n np.savetxt(outfile, hour_values, delimiter=',', header=str_header)\n outfile = fullname + '.bm'\n str_header = generate_behavior_header()\n np.savetxt(outfile, [behavior_markers], delimiter=',', header=str_header)\n outfile = fullname + '.bcd'\n str_header = generate_bcd_header()\n np.savetxt(outfile, behavior_change, delimiter=',', header=str_header)\n else:\n outfile = fullname + '.bm'\n np.savetxt(outfile, [behavior_markers], delimiter=',')\n outfile = fullname + '.bcd'\n np.savetxt(outfile, behavior_change, delimiter=',')",
"def dump_processed_data_to_file(self, facts, accu_label, article_label, imprison_label):\r\n data = [facts, accu_label, article_label, imprison_label]\r\n with open(util.MID_DATA_PKL_FILE_LOC, \"wb\") as f:\r\n pickle.dump(data, f)\r\n if util.DEBUG:\r\n print(\"DEBUG: data dumped to `.pkl` file\")",
"def make_bedfile(lines, fcount, from_orderfile=False):\n f = get_prereqs(fcount)\n text = []\n with open(f, 'w') as o:\n for line in lines:\n if from_orderfile is False:\n contig, length = line\n text.append(\"%s\\t%s\\t%s\" % (contig, 0, int(length)-1)) # contig \\t start \\t stop\n else:\n scaff, start, stop = line\n text.append(\"%s\\t%s\\t%s\" % (scaff, start, stop)) # scaff \\t start \\t stop\n o.write(\"\\n\".join(text))",
"def write_to_file(filepath, lines):\n with open(filepath, 'w', encoding='utf-8') as f:\n f.write(''.join([line.replace('\\r\\n', '\\n') for line in lines]))",
"def write_all(metabolic_model, infile):\n\t#from joblib import Parallel, delayed\n patients = get_patients_dict(infile)\n\t\n #Parallel(n_jobs=2)(delayed(write_program)(metabolic_model, i, patients[i]) for i in patients)\n for i in patients:\n print i\n\t\twrite_program(metabolic_model, i, patients[i])",
"def bp_ins(filename, start, end):\n with open(filename, 'r') as f:\n lines = f.readlines()\n lines.insert(start-1, \"\")\n lines.insert(end+1, \"\")\n lines.insert(0, \"\")\n lines[start-1] = 'ipdb.set_trace()\\n'\n lines[end+1] = 'ipdb.set_trace()\\n'\n lines[0] = \"import ipdb\\n\"\n with open(f\"break_{filename}\", 'w+') as f:\n f.writelines(lines)",
"def writeElems(fil, elems1, eofs=1, nofs=1):\n #pyFormex uses the same convention for hexahedral elements as ABAQUS\n #Gambit uses a different convention\n #function currently only for hexahedral mesh\n elems = elems1.copy()\n elems[:,2] = elems1[:,3]\n elems[:,3] = elems1[:,2]\n\n elems[:,6] = elems1[:,7]\n elems[:,7] = elems1[:,6]\n \n fil.write(' ELEMENTS/CELLS 2.2.30\\n')\n for i,e in enumerate(elems+nofs):\n fil.write('%8d %2d %2d %8d%8d%8d%8d%8d%8d%8d\\n %8d\\n' % ((i+eofs,4,8)+tuple(e)))\n fil.write('ENDOFSECTION\\n')",
"def store_headlines():\n for outlet in outlets:\n articles = get_headlines(outlet)\n connect_db.store_headlines(articles,outlet)",
"def write_ip_cards(bc_file, bc_class):\n ip = bc_class.iteration_parameters\n bc_file.write('! Iteration Parameters\\n')\n bc_file.write('IP NIT {}\\n'.format(ip.non_linear_iterations))\n index = list(ip.param.non_linear_tolerance_option.get_range()).index(ip.non_linear_tolerance_option)\n if index == 0 or index == 1:\n bc_file.write('IP NTL {}\\n'.format(ip.non_linear_residual_tolerance))\n if index == 0 or index == 2:\n bc_file.write('IP ITL {}\\n'.format(ip.non_linear_incremental_tolerance))\n bc_file.write('IP MIT {}\\n'.format(ip.linear_iterations))\n\n bc_file.write('\\n') # blank line at the end of the Iteration Parameters",
"def save_lines(lines, file_path):\n lines = list(map(lambda x: f'{x}\\n', lines))\n\n with open(file_path, 'w') as f:\n f.writelines(lines)",
"def write_file_parts_to_file(xyz_file_parts, path, fixed_beginning, fixed_end,complexity, config_path):\n\t\t#load ang to bohr factor\n\t\tcfg = configparser.ConfigParser()\n\t\tcfg.read(config_path, encoding='utf-8')\n\n\t\t#write complexity to file\n\t\twith open(path+\"/complexity\", \"w\") as file_complexity:\n\t\t\tfile_complexity.write(str(complexity))\n\t\tfile_complexity.close()\n\n\t\tconcat_xyz = np.concatenate(xyz_file_parts, axis=1)\n\t\ttop.write_xyz_file(path+\"/coord.xyz\", concat_xyz)\n\t\tcoord = top.x2t(concat_xyz)\n\t\t#fix right atoms\n\t\tcoord[4,fixed_beginning] = \"f\"\n\t\tfixed_end = sum(np.array([xyz_file_parts[i].shape[1] for i in range(0,len(xyz_file_parts)-1)]))+fixed_end\n\t\tcoord[4, fixed_end] = \"f\"\n\t\ttop.write_coord_file(path+\"/coord\", coord)\n\n\t\tlower_limit = np.min(concat_xyz[3,:]) + 0.1\n\t\tupper_limit = np.max(concat_xyz[3, :]) - 0.1\n\t\twith open(path+\"/limits\", \"w\") as limits:\n\t\t\tlimits.write(str(lower_limit) + \"\\n\")\n\t\t\tlimits.write(str(upper_limit))",
"def save_barriers(self, images, neb, prefix=\"\"):\n if not os.path.exists('NEB_images'):\n os.mkdir('NEB_images')\n\n for i, image in enumerate(images):\n write(\"NEB_images/\" + prefix + \"ini_NEB_image%03i.xyz\" % i, image)\n\n plot_dir = \"NEB_plots\"\n if not os.path.exists(plot_dir):\n os.mkdir(plot_dir)\n\n fig = plt.figure()\n ax = fig.add_subplot(111)\n\n R = [atoms.positions for atoms in images]\n E = neb.get_potential_energies()\n F = [atoms.get_forces() for atoms in images]\n A = images[0].cell\n pbc = images[0].pbc\n s, E, Sfit, Efit, lines = fit0(E, F, R, A, pbc)\n\n s = np.array(s)\n norm = s.max()\n s /= norm\n Sfit /= norm\n\n for x, y in lines:\n x /= norm\n ax.plot(x, y, '-C0')\n\n ax.plot(Sfit, Efit, 'C0-')\n Ef = max(Efit) - E[0]\n ax.plot(s, E, \"oC0\", label=\"%s: $E_f = %.4f$ (meV)\"%(\"QM/MM EAM result\", Ef*1000))\n ax.legend(loc=\"best\")\n ax.grid(True, linestyle=\"dashed\")\n\n np.savetxt(plot_dir + '/' + prefix + \"_s_E.txt\", np.array([s, E]))\n np.savetxt(plot_dir + '/' + prefix + \"_fit.txt\", np.array([Sfit, Efit]))\n np.savetxt(plot_dir + '/' + prefix + \"_lines.txt\", np.vstack(lines))\n\n fig.savefig(plot_dir + '/' + prefix + \"_barrier.eps\")\n return None",
"def writeChronListToFile(self):\n ## write header\n for header_line in self.outData['header']:\n self.outFile.write(header_line + '\\n')\n ##loop through each msg list\n for msg_list in self.outData_temp:\n ## create line\n msg_line = reconstructLine(msg_list)\n ## write to file\n self.outFile.write(msg_line + '\\n')",
"def write_body(self):\r\n if self.arguments['--out']:\r\n self.file = open(self.arguments['--out'], \"a+\")\r\n for list_item in self.list_of_body_objects:\r\n self.file.write(list_item.line)\r\n self.file.close()\r\n else:\r\n for list_item in self.list_of_body_objects:\r\n print(list_item.line)",
"def write_to_file_z(path):\n path1 = path + \"/z_Macros\"\n if not os.path.exists(path1):\n os.mkdir(path1)\n for e in range(int(e_steps)+1):\n filename = \"x0y0z%ske%s.mac\" %(dz*z + z_min, e*de + e_min)\n path = path1\n fullpath = os.path.join(path, filename)\n f = open(fullpath, \"w\")\n f.write('/rat/physics_list/OmitMuonicProcesses true\\n')\n f.write(\"/rat/physics_list/OmitHadronicProcesses true \\n\")\n f.write(\"\\n\")\n f.write(\"\\n\")\n f.write('/rat/db/set DETECTOR geo_file \"geo/snoplus.geo\"\\n')\n f.write('/rat/db/set GEO[scint] material \"labppo_scintillator\"\\n')\n f.write('/rat/db/set DAQ dqxx_info 0 \\n')\n f.write(\"/run/initialize \\n\")\n f.write(\"\\n\")\n f.write(\"\\n\")\n f.write(\"/rat/proc frontend\\n\")\n f.write(\"/rat/proc trigger\\n\")\n f.write(\"/rat/proc eventbuilder\\n\")\n f.write(\"/rat/proc count\\n\")\n f.write(\"/rat/procset update 100\\n\")\n f.write(\"/rat/proc calibratePMT\\n\")\n f.write(\"/rat/proc scintFitter\\n\")\n f.write(\"/rat/proclast outroot\\n\")\n f.write('/rat/procset file \"x0y0z%ske%s.root\"\\n' %(dz*z + z_min, e*de + e_min))\n f.write(\"\\n\")\n f.write(\"\\n\")\n f.write(\"/generator/add combo gun:point:poisson\\n\")\n f.write(\"# want random, isotropic momentum distribution; energy given in MeV\\n\")\n f.write(\"/generator/vtx/set e- 0 0 0 %s\\n\" %(e*de + e_min))\n f.write(\"# position given in Cartesians, relative to detector center, in mm\\n\")\n f.write(\"/generator/pos/set 0 0 %s\\n\" % (dz*z + z_min))\n f.write(\"/generator/rate/set 1\\n\")\n f.write(\"\\n\")\n f.write(\"\\n\")\n f.write(\"/rat/run/start %s\\n\" %(n))\n f.write(\"exit\")",
"def write_pendf_xs(filename,energy,cs,mat_num,file_num,reaction_num):\n # ----------------------\n # Open new file\n # ----------------------\n with open(filename,'w+') as f:\n # ----------------------\n # Check if cross section or energy are NaN\n # ----------------------\n if (np.isnan(energy).any()) or (np.isnan(cs).any()):\n raise ValueError(\"Input energy or cross section contains NaN's\")\n # ----------------------\n # Check if cross section is negative\n # ----------------------\n cs = np.array(cs)\n if cs[cs<0].any():\n raise ValueError(\"Input cross section is negative.\")\n \n for i in range(len(energy)):\n # ----------------------\n # find exponent on power of 10\n # ----------------------\n exponent_e = int(np.floor(np.log10(energy[i])))\n if exponent_e < 0:\n sign_e = \"-\"\n else:\n sign_e = \"+\"\n\n if( cs[i] == 0.0 ):\n exponent_cs = 0\n else:\n exponent_cs = int(np.floor(np.log10(cs[i])))\n if exponent_cs < 0:\n sign_cs = \"-\"\n else:\n sign_cs = \"+\"\n # ----------------------\n # Write the energy and cross section points\n # ----------------------\n f.write(\" {:0=7.6f}{}{} {:0=7.6f}{}{}\".format(energy[i]/10**exponent_e,sign_e,abs(exponent_e),\n cs[i]/10**exponent_cs,sign_cs,abs(exponent_cs)))\n # ----------------------\n # Tag the last line with ENDF info\n # ----------------------\n if (i+1)%3 == 0:\n f.write(\"{:d} {:d} {:d}\\n\".format(mat_num,file_num,reaction_num))\n # ----------------------\n # Fill in space at the end\n # ----------------------\n if (i == len(energy)-1) and ((i+1)%3 != 0):\n how_many_gaps = 3-(i+1)%3\n spaces_per_gap = 22\n extra_space = \" \"*spaces_per_gap*how_many_gaps\n f.write(extra_space+\"{:d} {:d} {:d}\\n\".format(mat_num,file_num,reaction_num))",
"def write_boundary_string_cards(bc_file, bc_class):\n bs = bc_class.boundary_strings\n if not bs.empty:\n bc_file.write('! Boundary Strings\\n')\n bc_file.write(bs.to_csv(sep=' ', na_rep='', index=False, header=False,).replace('\\r\\n', '\\n'))\n bc_file.write('\\n') # blank line after Boundary Strings",
"def write_csv(elongation, file_name):\n e = elongation\n\n with open(file_name, 'w') as f:\n f.write(f\"\"\"\\\nBreak Load, {e.break_load()}\nBreak Strength, {e.break_strength()}\nBreak Elongation, {e.break_elongation()}\nYield Load, {e.yield_load()}\nYield Strength, {e.yield_strength()}\nYield Elongation, {e.yield_elongation()}\nGauge Length, {e.gauge_length}\nSample Width, {e.sample_width}\nSample Thickness, {e.sample_thickness}\n\nPoints\n %, N\"\"\")\n for x, y in zip(e.xs, e.ys):\n f.write(f'\\n{x:>8.4f}, {y:>8.4f}')",
"def writeOutFileBadSeqRecord(badSeqList, outFileName):\n with gzip.open(outFileName, 'wb') as out_file:\n \tfor seqRecord in badSeqList:\n out_file.write(\"\\t\".join(seqRecord) + \"\\n\")",
"def write_file(self):\r\n # -open file for writing\r\n f_fbob = open(self.fn_path, 'w')\r\n\r\n # -write header\r\n f_fbob.write('%s\\n' % (self.heading))\r\n\r\n # -write sections 1 & 2 : NOTE- what about NOPRINT?\r\n f_fbob.write('%10i%10i%10i%10i\\n' % (self.nqfb, self.nqcfb,\r\n self.nqtfb, self.iufbobsv))\r\n f_fbob.write('%10e\\n' % (self.tomultfb)) # check format\r\n\r\n # -write sections 3-5 looping through observations groups\r\n c = 0\r\n for i in range(self.nqfb):\r\n # while (i < self.nqfb):\r\n # write section 3\r\n f_fbob.write('{:10d}{:10d}\\n'.format(self.nqobfb[i],\r\n self.nqclfb[i]))\r\n\r\n # Loop through observation times for the groups\r\n for j in range(self.nqobfb[i]):\r\n # -write section 4\r\n f_fbob.write(\r\n '{}{:10d}{:10.4g}{}{:10.4g}\\n'.format(self.obsnam[c],\r\n self.irefsp[c],\r\n self.toffset[c], ' ',\r\n self.flwobs[c]))\r\n c += 1 # index variable\r\n\r\n # -write section 5 - NOTE- need to adjust factor for muliple obs same cell\r\n for j in range(abs(self.nqclfb[i])):\r\n if self.nqclfb[\r\n i] < 0: # set factor to 1.0 for all cells in group\r\n self.factor[i, :] = 1.0\r\n f_fbob.write('{:10d}{:10d}{:10d}{}{:10f}\\n'\r\n .format(self.layer[i, j], (self.row[i, j]),\r\n self.column[i, j],\r\n ' ', self.factor[\r\n i, j])) # note- is 10f good enough here?\r\n\r\n f_fbob.close()\r\n #\r\n # swm: BEGIN hack for writing standard file\r\n sfname = self.fn_path # swm:hack\r\n sfname += '_ins' # swm: hack\r\n # write header\r\n f_ins = open(sfname, 'w') # swm: hack for standard file\r\n f_ins.write('jif @\\n') # swm: hack for standard file\r\n f_ins.write('StandardFile 0 1 %s\\n' % (\r\n self.nqtfb)) # swm: hack for standard file\r\n for i in range(0, self.nqtfb):\r\n f_ins.write(\r\n '{}\\n'.format(self.obsnam[i])) # swm: hack for standard file\r\n\r\n f_ins.close()\r\n # swm: END hack for writing standard file\r\n\r\n return",
"def write_lines(file_lines, new_file):\n with open(new_file, 'w') as f:\n for l in file_lines:\n f.write(l)",
"def wikkel_n_baans_tc(input_vdp_posix_lijst, etiketten_Y, in_loop, mes, uit):\n\n inlooplijst = (\";stans.pdf;\" * mes)\n inlooplijst = '0;'+ inlooplijst[:-1] + \"\\n\" # -1 removes empty column in final file\n\n for vdp_file_naam in input_vdp_posix_lijst:\n\n\n filenaamuit = f'def_{Path(vdp_file_naam).stem}.csv'\n file_naam_met_pad = Path(uit).joinpath(filenaamuit)\n\n with open(f\"{vdp_file_naam}\", \"r\", encoding=\"utf-8\") as target:\n readline = target.readlines()\n # pad.with_name(\"VDP_map\").joinpath(f'{pad.stem}_inloop.csv)'))\n\n print(file_naam_met_pad)\n with open(file_naam_met_pad, \"w\", encoding=\"utf-8\") as target:\n target.writelines(kolom_naam_gever_omschrijving_pdf(mes))\n\n target.writelines(readline[1:etiketten_Y + 1])\n # target.writelines(readline[16:(etikettenY+etikettenY-8)])\n\n target.writelines(\n (inlooplijst) * in_loop) # inloop\n print(\"inloop maken\")\n target.writelines(readline[1:]) # bestand\n\n target.writelines(\n (inlooplijst) * in_loop) # inloop # uitloop\n print(\"uitloop maken\")\n target.writelines(readline[-etiketten_Y:])",
"def writexyz(self,fname):\n xyzfile = open(fname + \".xyz\",\"a+\")\n xyzfile.write(str(self.natoms) + \"\\n\\n\")\n for a in self.atoms:\n \tcxyz = a.xyz - np.array(self.pbc_correction(a.xyz))\n\t\t\txyzfile.write(str(a.type) + \"\\t\" + str(cxyz[0]) + \"\\t\" + str(cxyz[1]) + \"\\t\" + str(cxyz[2]) + \"\\n\")\n xyzfile.close()",
"def write_untrim08(self,fn):\n with open(fn,'wt') as fp:\n fp.write(self.hdr_08+\"\\n\")\n\n n_parms = 11\n\n Nland = sum(self.edges['mark']==self.LAND)\n Nflow = sum(self.edges['mark']==self.FLOW)\n Ninternal = sum(self.edges['mark']==0)\n Nbc = sum(self.cells['mark'] == self.BOUNDARY)\n\n\n fp.write(\"NV =%d,\\n\"%self.Nnodes())\n fp.write(\"NE =%d,\\n\"%self.Ncells())\n fp.write(\"NR =%d,\\n\"%self.Nred()) \n fp.write(\"NS =%d,\\n\"%self.Nedges())\n fp.write(\"NSI =%d,\\n\"%Ninternal)\n fp.write(\"NSF =%d,\\n\"%(Ninternal+Nflow))\n fp.write(\"NBC =%d,\\n\"%Nbc)\n fp.write(\"TNE =%d,\\n\"%self.Nsubgrid_cells()) \n fp.write(\"TNS =%d,\\n\"%self.Nsubgrid_edges()) \n fp.write(\"ANGLE =%.4f,\\n\"%self.angle)\n fp.write(\"LOCATION=%s\\n\"%self.location)\n\n fp.write(\"/\\n\")\n\n for v in range(self.Nnodes()):\n fp.write(\"%10d %13.4f %15.4f\\n\"%(v+1,\n self.nodes['x'][v,0],\n self.nodes['x'][v,1]))\n # cell lines are like:\n # 1 4 490549.7527 4176428.3398 31459 30777 31369 31716 3 1 49990 2\n # idx center_x center_y nodes--------------------- edges--------\n # Nsides\n\n # Edge lines are like:\n # 49990 31369 31716 1 0\n # idx nodes------- cells-- 0 if boundary\n centers = self.cells_center()\n \n for c in range(self.Ncells()):\n edges = self.cell_to_edges(c)\n nodes = self.cell_to_nodes(c)\n\n nsides = len(edges)\n\n fp.write(\"%10d %14d %13.4f %17.4f \"%(c+1,nsides,centers[c,0],centers[c,1]))\n edge_str = \" \".join( [\"%14d\"%(e+1) for e in edges] )\n node_str = \" \".join( [\"%14d\"%(n+1) for n in nodes] )\n fp.write(node_str+\" \"+edge_str+\"\\n\")\n\n for e in range(self.Nedges()):\n fp.write(\"%10d %14d %14d %14d %14d\\n\"%(e+1,\n self.edges['nodes'][e,0]+1,self.edges['nodes'][e,1]+1,\n self.edges['cells'][e,0]+1,self.edges['cells'][e,1]+1))\n\n # since we have to do this 4 times, make a helper function\n def fmt_wrap_lines(fp,values,fmt=\"%14.4f \",per_line=10):\n \"\"\" write values out to file fp with the given string format, but break\n the lines so no more than per_line values on a line\n ends with a newline\n \"\"\"\n for i,a in enumerate(values):\n if i>0 and i%10==0:\n fp.write(\"\\n\")\n fp.write(\"%14.4f \"%a)\n fp.write(\"\\n\")\n \n # subgrid bathy\n for c in range(self.Ncells()):\n areas,depths = self.cells['subgrid'][c]\n nis = len(areas)\n\n fp.write(\"%14d %14d\\n\"%(c+1,nis))\n fmt_wrap_lines(fp,areas)\n fmt_wrap_lines(fp,depths)\n\n edge_lengths = self.edges_length()\n\n for e in range(Ninternal+Nflow):\n lengths,depths = self.edges['subgrid'][e]\n nis = len(lengths)\n\n fp.write(\"%10d %9d\\n\"%(e+1,nis))\n fmt_wrap_lines(fp,lengths)\n fmt_wrap_lines(fp,depths)",
"def writeEcs( self ):\n\n self.logger.info( 'writeEcs: START' )\n\n # Generate inserts for ecs table.\n self.importerEc.writeEcs()\n\n self.logger.info( 'writeEcs: DONE' )",
"def lines_printed_to(file):\n with io.open(file, 'w') as fp:\n def write_line(s=\"\"):\n PRINT(s, file=fp)\n yield write_line",
"def writeatm(Smin, Smax, npoints, T1, T2, n, filename11, filename12, \\\n filename21, filename22):\n #Smin = 8.876123 #shoot.mins(n, T1, T2, 10**(-4), 1, Stry, step)[0]\n x = atmtot(Smin, Smax, npoints, n, T1, T2)\n atmprof1=x[0]\n atmset1=x[1]\n atmprof2=x[2]\n atmset2=x[3]\n## atmset1rev = atmset1[::-1]\n## atmset = 0 * numpy.ndarray(shape = (2 * n - 1, ncol), dtype = float)\n## for i in range(n):\n## atmset[i] = atmset1rev[i]\n## for i in range(n, 2 * n - 1):\n## atmset[i] = atmset2[i - n + 1]\n S = numpy.linspace(Smin, Smax, npoints)\n f = open(filename11, 'wb')\n f.write(\" Td(K)=%s \" % str(Td))\n f.write(\" Pd(dyn cm^-2)=%s \" % str(Pd))\n f.write(\" Mc(g)=%s \" % str(Mc))\n f.write(\" rc(cm)=%s \" % str(rc))\n f.write(\"%s\\n\" % str(npoints))\n f.write(\" \")\n for i in range(npoints):\n f.write(\" %s \" % str(S[i]))\n f.write(\"%s\\n\" % str(numpy.shape(atmprof1)[1]))\n for j in range(numpy.shape(atmprof1)[1]):\n f.write(\" \")\n thelist=list(atmprof1[i,j,:])\n for item in thelist:\n f.write(\"%s \" % item)\n f.write(\"\\n\")\n \n f.close()\n f = open(filename12, 'wb')\n f.write(\" Td(K)=%s \" % str(Td))\n f.write(\" Pd(dyn cm^-2)=%s \" % str(Pd))\n f.write(\" Mc(g)=%s \" % str(Mc))\n f.write(\" rc(cm)=%s \\n\" % str(rc))\n f.write(\"%s\\n\" % str(npoints))\n f.write(\" \")\n f.write(\"log(S) M_conv(Me) M_rad(Me) rb(Re) RB(Re) \\\n RHill(Re) Pc(dyn cm^-2) Pcb(dyn cm^-2) Tc(K) Tcb(K) \\\n Eg(erg) U(erg) Etot(erg) Egiso(erg) Uiso(erg) Etotiso(erg) \\\n vir L(erg s^-1) \\n\" )\n for i in range(numpy.shape(atmset1)[0]):\n f.write(\" \")\n thelist = list(atmset1[i,:])\n thelist.insert(0, S[i])\n for item in thelist:\n f.write(\"%s \" % item)\n f.write(\"\\n\")\n f.close()\n\n f = open(filename21, 'wb')\n f.write(\" Td(K)=%s \" % str(Td))\n f.write(\" Pd(dyn cm^-2)=%s \" % str(Pd))\n f.write(\" Mc(g)=%s \" % str(Mc))\n f.write(\" rc(cm)=%s \" % str(rc))\n f.write(\"%s\\n\" % str(npoints))\n f.write(\" \")\n for i in range(npoints):\n f.write(\" %s \" % str(S[i]))\n f.write(\"%s\\n\" % str(numpy.shape(atmprof2)[1]))\n for j in range(numpy.shape(atmprof2)[1]):\n f.write(\" \")\n thelist=list(atmprof2[i,j,:])\n for item in thelist:\n f.write(\"%s \" % item)\n f.write(\"\\n\")\n \n f.close()\n f = open(filename22, 'wb')\n f.write(\" Td(K)=%s \" % str(Td))\n f.write(\" Pd(dyn cm^-2)=%s \" % str(Pd))\n f.write(\" Mc(g)=%s \" % str(Mc))\n f.write(\" rc(cm)=%s \\n\" % str(rc))\n f.write(\"%s\\n\" % str(npoints))\n f.write(\" \")\n f.write(\"log(S) M_conv(Me) M_rad(Me) rb(Re) RB(Re) \\\n RHill(Re) Pc(dyn cm^-2) Pcb(dyn cm^-2) Tc(K) Tcb(K) \\\n Eg(erg) U(erg) Etot(erg) Egiso(erg) Uiso(erg) Etotiso(erg) \\\n vir L(erg s^-1) \\n\" )\n for i in range(numpy.shape(atmset2)[0]):\n f.write(\" \")\n thelist = list(atmset2[i,:])\n thelist.insert(0, S[i])\n for item in thelist:\n f.write(\"%s \" % item)\n f.write(\"\\n\")\n f.close()",
"def sequential_write(out_file_name, num_writes, offset, value_offset, \n\t\tnum_address_bits, num_data_bits, num_sets, num_ways, line_size):\n\twith open(out_file_name, 'w') as out_file:\n\t\tout_file.write('v2.0 raw\\n')\n\t\tnum_line_bits = int(log(line_size,2))\n\t\tnum_set_bits = int(log(line_size,2))\n\t\tread = 1 << (num_address_bits + num_data_bits + 1)\n\t\twrite = 1 << (num_address_bits + num_data_bits)\n\t\tdone = 1 <<(num_address_bits + num_data_bits + 1 + 1)\n\t\tfor i in range(offset, offset+num_writes):\n\t\t\taddress = i << num_data_bits\n\t\t\tvalue = (value_offset + i) % (2**num_data_bits)\n\t\t\tout_file.write('%x #write value 0x%x to addr %d \\n' % ((write | address | value), value, i))\n\t\t\n\t\t#flush the cache to see if the values were updated.\n\t\tfor set in range(num_sets): #for each set\n\t\t\tselected_set = set << (num_data_bits + num_line_bits)\n\t\t\tfor way in range(0,num_ways): #for each way in the set\n\t\t\t\ttag = way << (num_data_bits + num_line_bits + num_set_bits)\n\t\t\t\tfor i in range(2): #guarentee we knock out that line to memory\n\t\t\t\t\ttag += i << (num_data_bits + num_line_bits + num_set_bits)\n\t\t\t\t\taddress = tag | selected_set\n\t\t\t\t\tout_file.write('%x #read addr %d\\n' % ((read | address), address >> num_data_bits))\n\t\t\t\t\t\n\t\t#read everything back in to see if we updated correctly\n\t\tfor i in range(offset, offset+num_writes):\n\t\t\taddress = i << num_data_bits\n\t\t\tout_file.write('%x #read addr %d\\n' % ((read | address), i))\n\t\t\t\t\t\n\t\tout_file.write('%x\\n' % (0)) #just a quick pause\n\t\tout_file.write('%x\\n' % (done)) #mark that we are completed",
"def write_file(a_file, lines):\r\n return append_file(a_file, lines, append=False)",
"def write_file(a_file, lines):\r\n return append_file(a_file, lines, append=False)",
"def write_trajectory(self, environmnent, pdb_filename):\n # TODO\n pass",
"def write_po(self, outputfile):\n raise NotImplementedError(\n \"Writing to this file format is not yet implemented\")",
"def output(self,file):\n peep=len(self.findProID())\n f=open(file,'w')\n f.writelines(\" Apache Point Observatory\\n\"\\\n \" 3.5m Telescope Night Log\\n\")\n f.writelines(\" \"+self.link.GetLabel()+'\\n')\n #f.writelines('\\n'+self.userHeader.GetLabel()+'\\n')\n f.writelines(\"\\n ACTUAL\\n\"\\\n \" ASTRONOMER OBSERVER(S) INSTRUMENT START FINISH\\n\"\\\n \"--------------------------------------------------------------------\\n\")\n f.writelines('%s%s%s%s%s\\n' % (self.usastr0.GetValue().ljust(18),self.usobs0.GetValue().ljust(22),self.usinst0.GetValue().ljust(15),self.usstart0.GetValue().ljust(8), self.usend0.GetValue().ljust(8)))\n if oneVar==1:\n f.writelines('%s%s%s%s%s\\n' % (self.usastr0b.GetValue().ljust(18),self.usobs0b.GetValue().ljust(22),self.usinst0b.GetValue().ljust(15),self.usstart0b.GetValue().ljust(8), self.usend0b.GetValue()))\n f.writelines('%s%s%s%s%s\\n' % (self.usastr1.GetValue().ljust(18), self.usobs1.GetValue().ljust(22),self.usinst1.GetValue().ljust(15),self.usstart1.GetValue().ljust(8), self.usend1.GetValue()))\n if twoVar==1:\n f.writelines('%s%s%s%s%s\\n' % (self.usastr1b.GetValue().ljust(18),self.usobs1b.GetValue().ljust(22),self.usinst1b.GetValue().ljust(15),self.usstart1b.GetValue().ljust(8), self.usend1b.GetValue()))\n if peep > 2:\n f.writelines('%s%s%s%s%s\\n' % (self.usastr2.GetValue().ljust(18), self.usobs2.GetValue().ljust(22),self.usinst2.GetValue().ljust(15),self.usstart2.GetValue().ljust(8), self.usend2.GetValue()))\n if threeVar==1:\n f.writelines('%s%s%s%s%s\\n' % (self.usastr2b.GetValue().ljust(18),self.usobs2b.GetValue().ljust(22),self.usinst2b.GetValue().ljust(15),self.usstart2b.GetValue().ljust(8), self.usend2b.GetValue()))\n if peep > 3:\n f.writelines('%s%s%s%s%s\\n' % (self.usastr3.GetValue().ljust(18), self.usobs3.GetValue().ljust(22), self.usinst3.GetValue().ljust(15),self.usstart3.GetValue().ljust(8), self.usend3.GetValue()))\n if fourVar==1:\n f.writelines('%s%s%s%s%s\\n' % (self.usastr3b.GetValue().ljust(18),self.usobs3b.GetValue().ljust(22),self.usinst3b.GetValue().ljust(15),self.usstart3b.GetValue().ljust(8), self.usend3b.GetValue()))\n\n f.writelines('\\n' + self.schedHalf.GetLabel())\n f.writelines(\" ----------------------------------------------------------------\\n\")\n f.writelines('%s\\n' % self.sc1.GetValue())\n f.writelines('%s\\n' % self.sc2.GetValue())\n if peep > 2:\n f.writelines('%s\\n' %self.sc3.GetValue())\n if peep > 3:\n f.writelines('%s\\n' % self.sc4.GetValue())\n f.writelines(\"\\nnote: scheduled times listed include instrument change time\\n\\n\"\\\n \" ------------- ACTIVITY LOG --------------\\n\")\n f.writelines(self.obsspec.GetLabel()+'\\n\\n')\n f.writelines(self.actText.GetValue()+'\\n')\n f.writelines(\"\\n ------- FAILURE LOG -------\\n\"\\\n \"\\n\"\\\n \"PROG INST FAILURE MODE TIME\\n\"\\\n \" (SEDFNVOG) TI/SHU START FINISH DESCRIPTION\\n\"\\\n \"----------------------------------------------------------------------\\n\")\n f.writelines(self.failLog.GetValue()+'\\n')\n f.writelines('\\n'+self.focus.GetLabel()+'\\n')\n f.writelines(self.focusLog.GetValue()+'\\n')\n f.writelines(self.weathText.GetValue()+'\\n')\n f.writelines(' Note: the wind was coming from the azimuth listed.\\n'\\\n ' The convention used is north=0 degrees, east=90 degrees.\\n'\\\n ' The dust count is particles > 1u per 0.1 cubic feet.\\n\\n')\n f.writelines(self.stat.GetLabel()+'\\n')\n f.writelines(\" Telescope drives operational. Current TCC version: \" + self.statTCCText.GetValue() + '\\n')\n f.writelines(\" Current TUI version: \" + self.statTUIText.GetValue() + '\\n') \n f.close()\n\n \"\"\"In safari save as page source with filename weather.html\n In firefox save as web page, html only with filename weather.html\n \"\"\"",
"def write_patch(filename, pts, edges=None):\n if edges is None:\n edges = set()\n\n with open(filename, 'wb') as fp:\n fp.write(struct.pack('>2i', -1, len(pts)))\n for i, pt in pts:\n if i in edges:\n fp.write(struct.pack('>i3f', -i-1, *pt))\n else:\n fp.write(struct.pack('>i3f', i+1, *pt))",
"def eeg_writeavr(array,tsb,di,file):\t\t\n import shutil as shu\n f=open(file,'w')\n firstline = 'Npts= %i TSB= %i DI= %7.5f SB= %7.5f SC= %i NChan= %i\\n' %(array.shape[1],tsb,di,1,200,array.shape[0]) \n chnam = 'Cz FP1 FP2 F3 F4 C3 C4 P3 P4 O1 O2 F7 F8 T7 T8 P7 P8 Fz Pz FC1 FC2 CP1 CP2 FC5 FC6 CP5 CP6 FT9 FT10 TP9 TP10 PO9 PO10\\n'\n f.write(firstline)\n f.write(chnam)\n for i in range(array.shape[0]):\n tmp = array[i,:]\n f.write(('%7.5f ' * len(tmp)) %tuple(tmp))\n f.write('\\n')\n \n f.close()\n #may want to change this on different machines...\n src = '/Users/crislanting/Projects/EEG/data/33.elp'\n dest = file[:-4] + '.elp'\n shu.copyfile(src,dest)",
"def dump_blocks(blocks, fname):\n with open(fname, 'w') as fh:\n for block in blocks:\n for line in block:\n fh.write(line)",
"def export(self, fname):\n f = open(fname, 'w')\n for ue in self.ue_list:\n line_components = list()\n line_components.append(ue.expression)\n line_components.append(ue.meaning)\n print >>f, '\\t'.join(line_components).encode('utf-8')",
"def lines(self, lines):\n if self.cache_content:\n self.cached_content = ''.join(b2u(line) for line in lines)\n\n try:\n with self._open_dockerfile('wb') as dockerfile:\n dockerfile.writelines(u2b(line) for line in lines)\n except (IOError, OSError) as ex:\n logger.error(\"Couldn't write lines to dockerfile: %r\", ex)\n raise",
"def save_aligned_BE(*args):\r\n\r\n try:\r\n global bambara_be\r\n global english_be\r\n bambara_be.append(lines_bam[line_no_1])\r\n english_be.append(lines_en[line_no_3])\r\n except ValueError:\r\n pass",
"def writeOrganisms( self ):\n\n self.logger.info( 'writeOrganisms: START' )\n\n # Generate inserts for meabolic pathways.\n self.importerOrganism.writeOrganisms()\n \n\n self.logger.info( 'writeOrganisms: keggreader.getAllOrganismEcs() : START' )\n\n # Get all organism ecs relations.\n organismEcs = self.reader.getAllOrganismEcs()\n\n self.logger.info( 'writeOrganisms: keggreader.getAllOrganismEcs() : DONE' )\n\n\n self.logger.info( 'writeOrganisms: keggreader.getAllOrganismMaps() : START' )\n\n # Get all organism maps relations.\n organismMaps = self.reader.getAllOrganismMaps()\n\n self.logger.info( 'writeOrganisms: keggreader.getAllOrganismMaps() : DONE' )\n\n\n self.logger.info( 'writeOrganisms: organismEcFile is organismEcsInsert.psql' )\n\n # Open protein_ecs insert file.\n organismEcFile = self.openInsertFile( 'organismEcsInsert.psql' )\n\n\n self.logger.info( 'writeOrganisms: organismMapFile is organismMapsInsert.psql' )\n\n # Open organism_maps insert file.\n organismMapFile = self.openInsertFile( 'organismMapsInsert.psql' )\n\n\n # Now we have to write organism_ecs table.\n for organism,relationalDatabaseId in self.importerOrganism.organismsInserted.iteritems():\n\n\n organismId = relationalDatabaseId\n\n if len( organismEcs[ organism ] ) > 0:\n \n self.logger.info( 'writeOrganisms: the organism: ' + organism + ' : FOUND ' + str(len(organismEcs[organism])) + ' EC numbers.' )\n for ec in organismEcs[ organism ]:\n ecId = self.importerEc.ecsInserted[ ec ]\n\n #self.writeOrganismEcsFile( organismEcFile, organismId , ecId )\n self.writeFile( organismEcFile, 'organism_ecs', [ str(organismId) , str(ecId) ] )\n else:\n self.logger.info( 'writeOrganisms: the organism: ' + organism + ' : doesnt have EC numbers associated.' )\n\n\n if len( organismMaps[ organism ] ) > 0:\n \n self.logger.info( 'writeOrganisms: the organism: ' + organism + ' : FOUND ' + str(len(organismMaps[organism])) + ' MAP numbers.' )\n for mapNumber in organismMaps[ organism ]:\n\n # We don't need maps that is not metabolic maps.\n if mapNumber in self.importerPathway.pathwayMapsInserted:\n mapId = self.importerPathway.pathwayMapsInserted[ mapNumber ]\n\n #self.writeOrganismMapsFile( organismMapFile, organismId , mapId )\n self.writeFile( organismMapFile, 'organism_maps', [ str(organismId) , str(mapId) ] )\n else:\n self.logger.info( 'writeOrganisms: the organism: ' + organism + ' : doesnt have MAP numbers associated.' )\n\n\n self.logger.info( 'writeOrganisms: DONE' )",
"def write_edgelist(H, path, delimiter=\" \", encoding=\"utf-8\"):\n with open(path, \"wb\") as file:\n for line in generate_edgelist(H, delimiter):\n line += \"\\n\"\n file.write(line.encode(encoding))",
"def write_uem(uemf, uem, n_digits=3):\n with open(uemf, 'wb') as f:\n for file_id in sorted(iterkeys(uem)):\n for onset, offset in sorted(uem[file_id]):\n line = ' '.join([file_id,\n '1',\n format_float(onset, n_digits),\n format_float(offset, n_digits)\n ])\n f.write(line.encode('utf-8'))\n f.write(b'\\n')",
"def write_input_file(y,z,fname):\n file = open('c:/4nec2/out/' + fname + '.nec', 'w')\n file.write('CM Seeddesign \\n')\n file.write('CM Zigzag Antenna \\n')\n file.write('CE File generated by python \\n')\n seg = 1\n\n #write the antenna\n for i in range(0,len(y)-1):\n file.write('GW %3i %3i %8.4f %8.4f %8.4f %8.4f %8.4f %8.4f %8.4f\\n' % (1,seg,0,y[i],z[i],0,y[i+1],z[i+1],1))\n\n file.write('GE 0 \\n')\n file.write('EK \\n')\n file.write('EX %3i %3i %3i %3i %3i %3i %3i\\n' % (0,1,1,1,1,0,0))\n file.write('GN -1 \\n')\n \n file.write('FR %3i %3i %3i %3i %8.4f %8.4f\\n' % (0,1,0,0,900,0))\n file.write('FR %3i %3i %3i %3i %8.4f %8.4f\\n' % (0,11,0,0,850,10))\n\n file.write('LD %3i %3i %3i %3i %8.4f %8.4f\\n' % (5,1,0,0,58000000,2))\n file.write('RP %3i %3i %3i %3i %8.4f %8.4f %8.4f %8.4f\\n' % (0,1,1,1000,90,0,0,0))\n\n file.write('EN \\n')\n file.close()",
"def write_edges(\n edges: Mapping[str, Any],\n filename: str,\n jsonlines: bool = False,\n gzipflag: bool = False,\n yaml: bool = False,\n):\n pass",
"def make_bed(lines, num):\n f = get_prereqs(num)\n with open(f, 'w') as o:\n for contig, start, stop in lines:\n o.write(\"%s\\t%s\\t%s\\n\" % (contig, start, stop))",
"def writeAlltoFile(self):\n with open(self._fname, 'w') as f:\n for elem in self.getAll():\n line = self._writeGratoLine(elem)\n f.write(line + \"\\n\")\n f.close()",
"def generate_jaccard0_isoseq_bed(self):\n all = set(self.isoseqid2exonlen.keys())\n notwant = set(self.isoseqid2besttransidB.keys())\n want = all - notwant\n want_lines = []\n with open(\"../data/pacbio/\" + self.name + \".B.j0.bed\", 'w') as f:\n for line in self.linesPacBioBed:\n (chrom, chromStart, chromEnd, name, score, strand, thickStart, thickEnd, itemRgb, blockCount, blockSizes, blockStarts) = line.rstrip().split(\"\\t\")\n if name in want:\n f.write(line)",
"def writeAD(self):\n ofname = self.ad_file\n ofh = open(ofname,'w')\n\n for line in self.lines_ad:\n f = line.strip().split()\n if (len(f) > 1 and f[1] == 'WindFile'):\n if (self.wind_file != None):\n f[0] = \"\\\"\"+self.wind_file+\"\\\"\"\n line = unsplit(f)\n ofh.write(line)\n\n ofh.close()\n\n # now also copy relevant airfoil files, if path is relative\n tmp = self.af_dict['polar_files'][0]\n if not os.path.isabs(tmp):\n tmp = tmp.split(\"\\\\\")\n tmp = tmp[0].split(\"/\")[0]\n # tmp is now root of relative path to airfoils\n dst = tmp\n src = os.path.join(self.fst_dir, tmp)\n print \"copying aerodata from \", src, \"TO \", dst\n if (not os.path.isdir(dst)):\n shutil.copytree(src, dst)\n\n # copy of relevant wind file in separate function writeWnd",
"def wline():\n # this is the alpha\n inlist = list(\"begin\") # change data into a list element\n outlist[0:5] = inlist # place data in the list in the correct place\n # print(\"\".join(outlist)) # see result\n # this is the omega\n inlist = list(\"end\") # change data into a list element\n outlist[1247:1250] = inlist # place data in the list in the correct place\n # ok, pack em up...\n outstr = \"\".join(outlist)\n print(outstr)\n # print(\"Length is \"+lswtchro()en(outstr))\n print(len(outstr))\n # of = open(\"workfile\", \"w\")\n of.write(outstr)",
"def compose_g_carpa(\n in_carpa_path: str,\n temp_carpa_path: str,\n words_mapping: MappingType,\n carpa_path: str,\n log_file: TextIO,\n):\n bos_symbol = words_mapping[\"<s>\"]\n eos_symbol = words_mapping[\"</s>\"]\n unk_symbol = words_mapping[\"<unk>\"]\n with open(in_carpa_path, \"r\", encoding=\"utf8\") as f, open(\n temp_carpa_path, \"w\", encoding=\"utf8\"\n ) as outf:\n current_order = -1\n num_oov_lines = 0\n for line in f:\n line = line.strip()\n col = line.split()\n if current_order == -1 and not re.match(r\"^\\\\data\\\\$\", line):\n continue\n if re.match(r\"^\\\\data\\\\$\", line):\n log_file.write(r\"Processing data...\\n\")\n current_order = 0\n outf.write(line + \"\\n\")\n elif re.match(r\"^\\\\[0-9]*-grams:$\", line):\n current_order = int(re.sub(r\"\\\\([0-9]*)-grams:$\", r\"\\1\", line))\n log_file.write(f\"Processing {current_order} grams...\\n\")\n outf.write(line + \"\\n\")\n elif re.match(r\"^\\\\end\\\\$\", line):\n outf.write(line + \"\\n\")\n elif not line:\n if current_order >= 1:\n outf.write(\"\\n\")\n else:\n if current_order == 0:\n outf.write(line + \"\\n\")\n else:\n if len(col) > 2 + current_order or len(col) < 1 + current_order:\n raise Exception(f'Bad line in arpa lm \"{line}\"')\n prob = col.pop(0)\n is_oov = False\n for i in range(current_order):\n try:\n col[i] = str(words_mapping[col[i]])\n except KeyError:\n is_oov = True\n num_oov_lines += 1\n break\n if not is_oov:\n rest_of_line = \" \".join(col)\n outf.write(f\"{prob}\\t{rest_of_line}\\n\")\n carpa_proc = subprocess.Popen(\n [\n thirdparty_binary(\"arpa-to-const-arpa\"),\n f\"--bos-symbol={bos_symbol}\",\n f\"--eos-symbol={eos_symbol}\",\n f\"--unk-symbol={unk_symbol}\",\n temp_carpa_path,\n carpa_path,\n ],\n stdin=subprocess.PIPE,\n stderr=log_file,\n stdout=log_file,\n env=os.environ,\n )\n carpa_proc.communicate()\n os.remove(temp_carpa_path)",
"def writePosFilesStep(self): \n \n writeSetOfCoordinates(self._getExtraPath(), self.inputCoordinatesTiltedPairs.get().getUntilted())\n \n writeSetOfCoordinates(self._getExtraPath(), self.inputCoordinatesTiltedPairs.get().getTilted())",
"def write_ptm_gridfile(self,fn):\n vertex_hdr = \" Vertex Data: vertex_number, x, y\"\n poly_hdr = \" Polygon Data: polygon_number, number_of_sides,center_x, center_y, center_depth, side_indices(number_of_sides), marker(0=internal,1=open boundary)\"\n side_hdr = \" Side Data: side_number, side_depth, node_indices(2), cell_indices(2), marker(0=internal,1=external,2=flow boundary,3=open boundary)\"\n\n with open(fn,'wt') as fp:\n # write header counts\n fp.write(\" Number of Vertices\\n\")\n fp.write(\" %20d\\n\"%self.Nnodes())\n fp.write(\" Number of Polygons\\n\")\n fp.write(\" %20d\\n\"%self.Ncells())\n fp.write(\" Number of Sides\\n\")\n fp.write(\" %20d\\n\"%self.Nedges())\n fp.write(\" NODATA (land) value\\n\")\n fp.write(\" -9999.000000000\\n\")\n\n # write vertex info\n fp.write(vertex_hdr+\"\\n\")\n for v in range(self.Nnodes()):\n fp.write(\" %10d %16.7f %16.7f\\n\"%(v+1,\n self.nodes['x'][v,0],\n self.nodes['x'][v,1]))\n\n # write polygon info\n fp.write(poly_hdr+\"\\n\")\n cell_write_str1 = \" %10d %10d %16.7f %16.7f %16.7f \"\n cell_depths = self.cell_depths()\n for e in range(self.Ncells()):\n edges = self.cells['edges'][e,:]\n edges[edges<0] = -1\n edge_str = \" \".join( [\"%10d\"%(s+1) for s in edges] )\n edge_str = edge_str+\" %10d\\n\"%(self.cells['mark'][e])\n nsides = sum(edges>=0)\n fp.write(cell_write_str1%(e+1,\n nsides,\n self.cells['_center'][e,0],\n self.cells['_center'][e,1],\n cell_depths[e]))\n fp.write(edge_str)\n \n # write side info\n fp.write(side_hdr+\"\\n\")\n edge_depths = self.edge_depths()\n edge_write_str = \" %10d %16.7f %10d %10d %10d %10d %10d\\n\"\n for s in range(self.Nedges()):\n edges = self.edges['cells'][s,:]\n edges[edges<0] = -1 \n nodes = self.edges['nodes'][s,:]\n nodes[nodes<0] = -1\n fp.write(edge_write_str%(s+1,\n edge_depths[s],\n nodes[0]+1,\n nodes[1]+1,\n edges[0]+1,\n edges[1]+1,\n self.edges['mark'][s]))",
"def write(self, outfile, rebasings=None):\r\n raise NotImplementedError()",
"def write_to_file(self, papers, filename):\n\t\tpass",
"def writelines(self, ss):\n for s in ss:\n self.writeline(s)",
"def write_adjoint_traces(self, path, syn, obs, channel,att=\"\"):\n nt, dt, _ = self.get_time_scheme(syn)\n nn, _ = self.get_network_size(syn)\n \n adj = syn.copy()\n if att =='Yes' :\n self.adjoint = getattr(adjoint, PAR.MISFIT + '_att')\n else :\n self.adjoint = getattr(adjoint, PAR.MISFIT)\n #freq_mask = np.loadtxt('/data1/etienneb/freq_mask.txt')\n \n ft_obs_se = self.load('ft_obs_se')\n freq_mask = self.load('freq_mask_se')\n \n for ii in range(nn):\n adj[ii].data = self.adjoint(\n syn[ii].data, nt, dt,\n ft_obs_se[:,ii],\n freq_mask[:,ii]\n )\n \n adj = self.apply_filter(adj,dt)\n \n #subset = np.random.choice([i for i in range(nn)],nn-nn/3)\n #for ii in range(nn-nn/3):\n # adj[subset[ii]].data = np.zeros(len(adj[0].data)) \n\n for tr in adj:\n tr.taper(0.005, type='hann')\n self.writer(adj, path, channel)",
"def WritePostMods(fp,psf,pdb,PostMod,Loops,GlycanSegs):\n logfile=''\n logevery=1\n lobsavevery=-1\n\n if 'log_dcd_file' in PostMod:\n logfile=PostMod['log_dcd_file']\n if 'log_every' in PostMod:\n logevery=PostMod['log_every']\n if 'log_save_every' in PostMod:\n logsaveevery=PostMod['log_save_every']\n logdcd=len(logfile)>0\n if 'lay_cycles' in PostMod:\n lay_cycles=PostMod['lay_cycles']\n\n prefix=pdb[:]\n prefix=prefix.replace('.pdb','')\n fp.write('### Post modifications follow:\\n')\n fp.write('mol delete top\\n')\n fp.write('mol new {}\\n'.format(psf))\n fp.write('set molid [molinfo top get id]\\n')\n fp.write('mol addfile {}\\n'.format(pdb))\n if logdcd:\n fp.write('### logging enabled\\n')\n fp.write('mol new {}\\n'.format(psf))\n fp.write('mol addfile {}\\n'.format(pdb))\n fp.write('set logid [molinfo top get id]\\n')\n fp.write('mol top $molid\\n')\n else:\n fp.write('set logid -1\\n')\n if 'center_protein' in PostMod and PostMod['center_protein']:\n fp.write('set a [atomselect $molid \"all\"]\\n')\n fp.write('set or [measure center $a weight mass]\\n')\n fp.write('$a moveby [vecscale -1 $or]\\n')\n if logdcd:\n fp.write('set la [atomselect $logid \"all\"]\\n')\n fp.write('$la moveby [vecscale -1 $or]\\n')\n if 'reorient_protein' in PostMod and PostMod['reorient_protein']:\n fp.write('set ca [measure center [atomselect $molid \"protein and {}\"] weight mass]\\n'.format(PostMod['reorselstr'][0]))\n fp.write('set cb [measure center [atomselect $molid \"protein and {}\"] weight mass]\\n'.format(PostMod['reorselstr'][1]))\n fp.write('set pi 3.415928\\n')\n fp.write('set dv [vecsub $ca $cb]\\n')\n fp.write('set d [veclength $dv]\\n')\n fp.write('set cp [expr [lindex $dv 0]/$d]\\n')\n fp.write('set sp [expr [lindex $dv 1]/$d]\\n')\n fp.write('set p [expr acos($cp)]\\n')\n fp.write('if {[expr $sp < 0.0]} {\\n')\n fp.write(' set p [expr 2*$pi-$p]\\n')\n fp.write('}\\n')\n fp.write('set ct [expr [lindex $dv 2]/$d]\\n')\n fp.write('set t [expr acos($ct)]\\n')\n fp.write('$a move [transaxis z [expr -1 * $p] rad]\\n')\n fp.write('$a move [transaxis y [expr -1 * $t] rad]\\n')\n if logdcd:\n fp.write('$la move [transaxis z [expr -1 * $p] rad\\n')\n fp.write('$la move [transaxis y [expr -1 & $t] rad\\n')\n for crot in PostMod['Crot']:\n fp.write(crot.psfgen_str(molid=r'$molid'))\n if logdcd:\n fp.write('log_addframe $molid $logid\\n')\n if 'do_preclose_min_smd' in PostMod and PostMod['do_preclose_min_smd']:\n lay_cycles=100\n if 'preclose_params' in PostMod:\n p=PostMod['preclose_params']\n lay_cycles=lay_cycles if 'lay_cycles' not in p else p['lay_cycles']\n for l in sorted(Loops, key=lambda x: len(x.residues)):\n if (l.term and len(l.residues)>2):\n fp.write('lay_loop $molid {} [range {} {} 1] {}\\n'.format(l.replica_chainID,\n l.residues[0].resseqnum,l.residues[-1].resseqnum,lay_cycles))\n\n \n if 'do_multiflex_mc' in PostMod and PostMod['do_multiflex_mc']:\n nc=1000\n rcut=4.0\n sigma=1.8\n epsilon=0.5\n cutoff=math.pow(2,(1./6.))*sigma\n shift=epsilon\n mctemperature=3.0\n mck=10.0\n dstop=2.0\n sstop=2.0\n maxanglestep=60.0 # degrees\n do_loops = 0\n do_gly = 1\n if 'multiflex_mc_params' in PostMod:\n p=PostMod['multiflex_mc_params']\n nc=nc if 'maxcycles' not in p else p['maxcycles']\n rcut=rcut if 'rcut' not in p else p['rcut']\n sigma=sigma if 'sigma' not in p else p['sigma']\n epsilon=epsilon if 'epsilon' not in p else p['epsilon']\n shift=shift if 'shift' not in p else p['shift']\n cutoff=cutoff if 'cutoff' not in p else p['cutoff']\n mctemperature=mctemperature if 'temperature' not in p else p['temperature']\n mck=mck if 'k' not in p else p['k']\n dstop=dstop if 'dstop' not in p else p['dstop']\n sstop=sstop if 'sstop' not in p else p['sstop']\n maxanglestep=maxanglestep if 'maxanglestep' not in p else p['maxanglestep']\n do_loops=do_loops if 'loops' not in p else p['loops']\n do_gly=do_gly if 'gly' not in p else p['gly']\n fp.write('set mcp [dict create]\\n')\n fp.write('dict set mcp nc {}\\n'.format(nc))\n fp.write('dict set mcp cellsize {}\\n'.format(rcut))\n fp.write('dict set mcp ljsigma {}\\n'.format(sigma))\n fp.write('dict set mcp ljepsilon {}\\n'.format(epsilon))\n fp.write('dict set mcp ljcutoff {}\\n'.format(cutoff))\n fp.write('dict set mcp ljshift {}\\n'.format(shift))\n fp.write('dict set mcp temperature {}\\n'.format(mctemperature))\n fp.write('dict set mcp mck {}\\n'.format(mck))\n fp.write('dict set mcp dstop {}\\n'.format(dstop))\n fp.write('dict set mcp sstop {}\\n'.format(sstop))\n fp.write('dict set mcp maxanglestep {}\\n'.format(maxanglestep))\n fp.write('set bg [atomselect $molid \"noh\"]\\n')\n fp.write('set fa {}\\n')\n fp.write('set i {}\\n')\n fp.write('set j {}\\n')\n # fp.write('set loopindex 0\\n')\n # fp.write('set loops {\\n')\n # build rotsel as as all atom indices in selection with rotatable bonds\n # that is all atoms in all residues except for the C and O of last residue in each loop\n rotsel=''\n if len(Loops)>0 and do_loops == 1:\n loopsel_substr=[]\n fa_substr=[]\n ca_substr=[]\n c_substr=[]\n #Loops.sort(key=lambda l: len(l.residues))\n for l in Loops:\n if l.term and len(l.residues)>1:\n # fp.write('{{ {} {} {} }}\\n'.format(l.replica_chainID,l.residues[0].resseqnum,l.residues[-1].resseqnum))\n loopsel_substr.append(' (chain {} and resid {} to {} and not (resid {} and name C O) )'.format(l.replica_chainID,l.residues[0].resseqnum,l.residues[-1].resseqnum,l.residues[-1].resseqnum))\n fa_substr.append(' (chain {} and resid {} and name CA) '.format(l.replica_chainID,l.residues[0].resseqnum))\n ca_substr.append(' (chain {} and resid {} and name CA) '.format(l.replica_chainID,l.residues[-1].resseqnum))\n c_substr.append(' (chain {} and resid {} and name C) '.format(l.replica_chainID,l.residues[-1].resseqnum))\n loopsel=' or '.join(loopsel_substr)\n fa_sel=' or '.join(fa_substr)\n ca_sel=' or '.join(ca_substr)\n c_sel=' or '.join(c_substr)\n loopsel='(protein and ('+loopsel+'))'\n fa_sel='(protein and ('+fa_sel+'))'\n ca_sel='(protein and ('+ca_sel+'))'\n c_sel='(protein and ('+c_sel+'))'\n fp.write('set lfa [[atomselect $molid \"{}\"] get index]\\n'.format(fa_sel))\n fp.write('set lca [[atomselect $molid \"{}\"] get index]\\n'.format(ca_sel))\n fp.write('set lc [[atomselect $molid \"{}\"] get index]\\n'.format(c_sel))\n fp.write(r'set fa [list {*}$fa {*}$lfa]'+'\\n')\n fp.write(r'set i [list {*}$i {*}$lca]'+'\\n')\n fp.write(r'set j [list {*}$j {*}$lc]'+'\\n')\n rotsel=loopsel\n \n if len(GlycanSegs)>0 and do_gly == 1:\n glysel='(segname '+' '.join(GlycanSegs)+')'\n fp.write('set gra {}\\n')\n fp.write('set gi {}\\n')\n fp.write('set gj {}\\n')\n fp.write('set glycan_segs [list '+' '.join(GlycanSegs)+']\\n')\n fp.write('set ng [llength $glycan_segs]\\n')\n fp.write('foreach g $glycan_segs {\\n')\n fp.write(' set sel [atomselect $molid \"segname $g\"]\\n')\n fp.write(' set rid [$sel get resid]\\n')\n fp.write(' set root [lindex [lsort -unique -real $rid] 0]\\n')\n fp.write(' lappend gra [[atomselect $molid \"segname $g and name C1 and resid $root\"] get index]\\n')\n fp.write(' lappend gi -1\\n')\n fp.write(' lappend gj -1\\n')\n fp.write('}\\n')\n fp.write(r'set fa [list {*}$fa {*}$gra]'+'\\n')\n fp.write(r'set i [list {*}$i {*}$gi]'+'\\n')\n fp.write(r'set j [list {*}$j {*}$gj]'+'\\n')\n\n if len(rotsel)>0:\n rotsel = rotsel+' or '+glysel\n else:\n rotsel=glysel\n\n fp.write('set rotsel [atomselect $molid \"{}\"]\\n'.format(rotsel))\n fp.write('dict set atomind fa $fa\\n')\n fp.write('dict set atomind i $i\\n')\n fp.write('dict set atomind j $j\\n')\n fp.write('do_multiflex_mc $molid $rotsel atomind mcp [irand_dom 1000 9999] $logid {} {}\\n'.format(logevery,logsaveevery))\n \n new_pdb_out=prefix+'_mod.pdb'\n fp.write('$a writepdb {}\\n'.format(new_pdb_out))\n if logdcd:\n fp.write('set loga [atomselect $logid all]\\n')\n fp.write('animate write dcd {} waitfor all sel $loga $logid\\n'.format(logfile))\n fp.write('mol delete $logid\\n')\n return new_pdb_out",
"def hMMsplc_jDict2edgeBED(jDict,outPath):\n oFile = open(outPath, 'w')\n # --- get list of all coverages ---\n for chrm in jDict:\n for jnc in jDict[chrm]:\n origLine = jDict[chrm][jnc][0].split('\\t')\n oFile.write('%s\\t%s\\t%s\\t%s\\t%s\\n' % (chrm, jnc[0], jnc[1], origLine[3], origLine[4]))"
] | [
"0.6657263",
"0.6484899",
"0.5828503",
"0.5651945",
"0.5593861",
"0.5564902",
"0.54197687",
"0.5337634",
"0.5288988",
"0.5269306",
"0.52646947",
"0.52209884",
"0.5204748",
"0.51610637",
"0.5106298",
"0.5046272",
"0.5036513",
"0.5005771",
"0.49783012",
"0.49764898",
"0.49703205",
"0.49586713",
"0.4950611",
"0.49445367",
"0.49172002",
"0.49154788",
"0.4909088",
"0.48975632",
"0.4897193",
"0.48945704",
"0.48718226",
"0.4866325",
"0.48505673",
"0.4850563",
"0.4850324",
"0.48471683",
"0.4846046",
"0.48262244",
"0.48221928",
"0.4807746",
"0.48075607",
"0.4801895",
"0.48013687",
"0.4792195",
"0.4767324",
"0.47640017",
"0.47601402",
"0.47558",
"0.47534004",
"0.47422963",
"0.47389495",
"0.47380176",
"0.47363883",
"0.4736241",
"0.47361785",
"0.473467",
"0.47322258",
"0.47282478",
"0.47199613",
"0.47184157",
"0.4717598",
"0.4716908",
"0.47139886",
"0.47033393",
"0.4703059",
"0.46946138",
"0.46912357",
"0.46881875",
"0.46870944",
"0.46764475",
"0.46750015",
"0.46750015",
"0.46668422",
"0.46589813",
"0.46559685",
"0.46512514",
"0.4650738",
"0.46451074",
"0.46389374",
"0.4635466",
"0.46344998",
"0.46324083",
"0.4630043",
"0.46279627",
"0.46258366",
"0.4619921",
"0.46196234",
"0.46166804",
"0.46107805",
"0.46046224",
"0.46028382",
"0.4597783",
"0.45963243",
"0.45959306",
"0.45840955",
"0.4572139",
"0.45713216",
"0.45610595",
"0.4559765",
"0.45584503"
] | 0.7545118 | 0 |
Linac energy set points and bunch compressor offsets | def tao_BC_and_LEM_lines(epics):
bc1_e0=epics.caget('SIOC:SYS0:ML00:AO483')*1e6
bc2_e0=epics.caget('SIOC:SYS0:ML00:AO489')*1e9
l3_e0 =epics.caget('SIOC:SYS0:ML00:AO500')*1e9
# Charge in LTU
q_after_horn_cutting = epics.caget('SIOC:SYS0:ML00:CALC252')*1e-12 # pC -> C
bc1_offset=epics.caget('BMLN:LI21:235:MOTR')*1e-3
bc2_offset=epics.caget('BMLN:LI24:805:MOTR')*1e-3
bc1_current=epics.caget('SIOC:SYS0:ML00:AO485')
bc2_current=epics.caget('SIOC:SYS0:ML00:AO195')
# Catch bad settings
if bc1_current==0:
print('Warning: BC1 current is zero!')
bc1_sigma_z = 0
else:
# Assumes parabolic distribution
bc1_sigma_z = q_after_horn_cutting*299792458 / sqrt(10) / bc1_current
if bc2_current==0:
print('Warning: BC1 current is zero!')
bc2_sigma_z = 0
else:
# Assumes Gaussian distribution
bc2_sigma_z = q_after_horn_cutting*299792458 / sqrt(12) / bc2_current
lines = []
lines.append('set dat BC1.energy[1]|meas = '+str(bc1_e0))
lines.append('set dat BC2.energy[1]|meas = '+str(bc2_e0))
lines.append('set dat L3.energy[2]|meas = '+str(l3_e0))
lines.append('set dat BC1.offset[1]|meas = '+str(bc1_offset))
lines.append('set dat BC2.offset[1]|meas = '+str(bc2_offset))
lines.append(f'! Charge after horn cutting: {q_after_horn_cutting*1e12:10.4} pC')
lines.append(f'! For BC1 current {bc1_current} A')
lines.append('set dat BC1.beam[1]|meas = '+str( bc1_sigma_z))
lines.append(f'! For BC2 current {bc2_current} A')
lines.append('set dat BC2.beam[1]|meas = '+str( bc2_sigma_z))
return lines | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_caliblamp_offset(spec1, spec2, colname1='flux', colname2='flux',\n aperture_k=None, pixel_k=None, pixel_range=(-30, 30),\n max_order_offset=20,\n mode='normal'):\n\n if isinstance(pixel_range, int) or isinstance(pixel_range, float):\n if pixel_range <=0:\n print('Error: pixel_range must be positive')\n raise ValueError\n pixel_range = int(pixel_range)\n pixel_shift_lst = np.arange(-pixel_range, pixel_range)\n elif isinstance(pixel_range, list) or isinstance(pixel_range, tuple):\n if len(pixel_range)<2:\n print('Error: pixel_range must have length of 2')\n raise ValueError\n if pixel_range[0] >= pixel_range[1]:\n print('Error: pixel_range error')\n raise ValueError\n pixel_shift_lst = np.arange(pixel_range[0], pixel_range[1])\n else:\n pass\n\n if mode=='debug':\n dbgpath = 'debug'\n if not os.path.exists(dbgpath):\n os.mkdir(dbgpath)\n plot_ccf = True\n plot_scatter = True\n figname_ccf = os.path.join(dbgpath,\n 'lamp_ccf_{:+2d}_{:+03d}.png')\n figname_scatter = os.path.join(dbgpath,\n 'lamp_ccf_scatter.png')\n else:\n plot_ccf = False\n plot_scatter = False\n\n mean_lst = {(1, 1):[], (1, -1):[], (-1, 1):[], (-1, -1):[]}\n scatter_lst = {(1, 1):[], (1, -1):[], (-1, 1):[], (-1, -1):[]}\n all_scatter_lst = []\n all_mean_lst = []\n scatter_id_lst = []\n\n aper1_lst = spec1['aperture']\n aper2_lst = spec2['aperture']\n min_aper1 = aper1_lst.min()\n max_aper1 = aper1_lst.max()\n min_aper2 = aper2_lst.min()\n max_aper2 = aper2_lst.max()\n\n # determine the maxium absolute offsets between the orders of the two\n # spectra\n maxoff = min(max(aper1_lst.size, aper2_lst.size)//2, max_order_offset)\n aperture_offset_lst = np.arange(-maxoff, maxoff)\n\n def get_aper2(aper1, k, offset):\n if k == 1:\n # (aper2 - min_aper2) = (aper1 - min_aper1) + offset\n # in this case, real_offset = offset - min_aper1 + min_aper2\n aper2 = (aper1 - min_aper1) + offset + min_aper2\n elif k == -1:\n # (aper2 - min_aper2) = -(aper1 - max_aper1) + offset\n # in this cose, real_offset = offset + max_aper1 + min_aper2\n aper2 = -aper1 + max_aper1 + offset + min_aper2\n else:\n raise ValueError\n return aper2\n\n # aperture_k = 1: same cross-order direction;\n # -1: reverse cross-order direction.\n if aperture_k is None:\n search_aperture_k_lst = [1, -1]\n elif aperture_k in [1, -1]:\n search_aperture_k_lst = [aperture_k]\n else:\n print('Warning: Unknown aperture_k:', aperture_k)\n raise ValueError\n\n # pixel_k = 1: same main-dispersion direction;\n # -1: reverse main-dispersion direction.\n if pixel_k is None:\n search_pixel_k_lst = [1, -1]\n elif pixel_k in [1, -1]:\n search_pixel_k_lst = [pixel_k]\n else:\n print('Warning: Unknown pixel_k:', pixel_k)\n raise ValueError\n\n\n for aperture_k in search_aperture_k_lst:\n for aperture_offset in aperture_offset_lst:\n calc_pixel_shift_lst = {1: [], -1: []}\n if plot_ccf:\n fig2 = plt.figure(figsize=(10,8), dpi=150)\n axes2 = { 1: fig2.add_subplot(211),\n -1: fig2.add_subplot(212),\n }\n for row1 in spec1:\n aperture1 = row1['aperture']\n aperture2 = get_aper2(aperture1, aperture_k, aperture_offset)\n m = spec2['aperture'] == aperture2\n if m.sum()==0:\n continue\n row2 = spec2[m][0]\n flux1 = row1[colname1]\n flux2 = row2[colname2]\n for pixel_k in search_pixel_k_lst:\n '''\n if aperture_k == -1 and pixel_k == -1:\n fig1 = plt.figure(dpi=150)\n ax1 = fig1.gca()\n ax1.plot(flux1[::pixel_k], 'C0')\n ax1.plot(flux2, 'C1')\n ax1.set_title('Aper1 = %d, Aper2 = %d (%d, %d, %d)'%(\n aperture1, aperture2, aperture_k, aperture_offset,\n pixel_k))\n fig1.savefig('check_%d_%d_%d_%02d_%02d_.png'%(\n aperture_k, aperture_offset, pixel_k, aperture1,\n aperture2))\n plt.close(fig1)\n '''\n\n ccf_lst = get_simple_ccf(flux1[::pixel_k], flux2,\n pixel_shift_lst)\n # find the pixel shift\n calc_shift = pixel_shift_lst[ccf_lst.argmax()]\n # pack the pixel shift into a list\n calc_pixel_shift_lst[pixel_k].append(calc_shift)\n\n if plot_ccf:\n axes2[pixel_k].plot(pixel_shift_lst, ccf_lst, alpha=0.4)\n # pixel direction loop ends here\n # order-by-order loop ends here\n\n # adjust the ccf figure and save\n if plot_ccf:\n for ax in axes2.values():\n ax.set_xlim(pixel_shift_lst[0], pixel_shift_lst[-1])\n fig2.savefig(figname_ccf.format(aperture_k, aperture_offset))\n plt.close(fig2)\n\n # convert calc_pixel_shift_lst to numpy array\n pixel_shift_mean = {1: None, -1: None}\n pixel_shift_std = {1: None, -1: None}\n for pixel_k in search_pixel_k_lst:\n tmp = np.array(calc_pixel_shift_lst[pixel_k])\n\n mean = tmp.mean()\n std = tmp.std()\n\n mean_lst[(aperture_k, pixel_k)].append(mean)\n scatter_lst[(aperture_k, pixel_k)].append(std)\n\n # used to search the global minimum shift scatter along all the\n # (aperture_k, aperture_offset, pixel_k) space\n all_mean_lst.append(mean)\n all_scatter_lst.append(std)\n scatter_id_lst.append((aperture_k, aperture_offset, pixel_k))\n\n # direction loop ends here\n\n # plot the scatters of peaks and save it as a figure file\n if plot_scatter:\n fig3 = plt.figure(dpi=150, figsize=(8,6))\n ax3 = fig3.gca()\n for key, scatters in scatter_lst.items():\n aperture_k, pixel_k = key\n if len(scatters)==0:\n continue\n ax3.plot(aperture_offset_lst, scatters,\n color = {1:'C0', -1:'C1'}[aperture_k],\n ls = {1:'-', -1:'--'}[pixel_k],\n label = 'Aperture k = {}, Pixel k = {}'.format(\n aperture_k, pixel_k))\n ax3.set_xlabel('Aperture Offset')\n ax3.set_ylabel('Scatter (pixel)')\n ax3.legend(loc='lower right')\n fig3.savefig(figname_scatter)\n plt.close(fig3)\n\n imin = np.argmin(all_scatter_lst)\n scatter_id = scatter_id_lst[imin]\n result_aperture_k = scatter_id[0]\n result_aperture_offset = scatter_id[1]\n result_pixel_k = scatter_id[2]\n result_pixel_offset = all_mean_lst[imin]\n\n # convert aperture_offset to real aperture_offset\n real_aperture_offset = {\n 1: result_aperture_offset - min_aper1 + min_aper2,\n -1: result_aperture_offset + max_aper1 + min_aper2,\n }[result_aperture_k]\n return (result_aperture_k, real_aperture_offset,\n result_pixel_k, result_pixel_offset)",
"def do_hc_wavesol(p, loc):\n\n # ----------------------------------------------------------------------\n # Read UNe solution\n # ----------------------------------------------------------------------\n wave_u_ne, amp_u_ne = spirouImage.ReadLineList(p)\n loc['LL_LINE'], loc['AMPL_LINE'] = wave_u_ne, amp_u_ne\n source = __NAME__ + '.main() + spirouImage.ReadLineList()'\n loc.set_sources(['ll_line', 'ampl_line'], source)\n\n # ----------------------------------------------------------------------\n # Generate wave map from wave solution\n # ----------------------------------------------------------------------\n loc = generate_wave_map(p, loc)\n\n # ----------------------------------------------------------------------\n # Create new wavelength solution (method 0, old cal_HC_E2DS_EA)\n # ----------------------------------------------------------------------\n if p['WAVE_MODE_HC'] == 0:\n\n # ---------------------------------------------------------------------\n # Find Gaussian Peaks in HC spectrum\n # ---------------------------------------------------------------------\n loc = find_hc_gauss_peaks(p, loc)\n\n # ---------------------------------------------------------------------\n # Start plotting session\n # ---------------------------------------------------------------------\n if p['DRS_PLOT'] > 0:\n # start interactive plot\n sPlt.start_interactive_session(p)\n\n # ---------------------------------------------------------------------\n # Fit Gaussian peaks (in triplets) to\n # ---------------------------------------------------------------------\n loc = fit_gaussian_triplets(p, loc)\n\n # ---------------------------------------------------------------------\n # Generate Resolution map and line profiles\n # ---------------------------------------------------------------------\n # log progress\n wmsg = 'Generating resolution map and calculating line spread function'\n WLOG(p, '', wmsg)\n # generate resolution map\n loc = generate_resolution_map(p, loc)\n # map line profile map\n if p['DRS_PLOT'] > 0:\n sPlt.wave_ea_plot_line_profiles(p, loc)\n\n # ---------------------------------------------------------------------\n # End plotting session\n # ---------------------------------------------------------------------\n # end interactive session\n if p['DRS_PLOT'] > 0:\n sPlt.end_interactive_session(p)\n\n # ----------------------------------------------------------------------\n # Set up all_lines storage\n # ----------------------------------------------------------------------\n\n # initialise up all_lines storage\n all_lines_1 = []\n\n # get parameters from p\n n_ord_start = p['WAVE_N_ORD_START']\n n_ord_final = p['WAVE_N_ORD_FINAL']\n\n # get values from loc:\n # line centers in pixels\n xgau = np.array(loc['XGAU_T'])\n # distance from catalogue in km/s - used for sanity checks\n dv = np.array(loc['DV_T'])\n # fitted polynomials per order\n fit_per_order = np.array(loc['POLY_WAVE_SOL'])\n # equivalent width of fitted gaussians to each line (in pixels)\n ew = np.array(loc['EW_T'])\n # amplitude of fitted gaussians to each line\n peak = np.array(loc['PEAK_T'])\n # catalogue line amplitude\n amp_catalog = np.array(loc['AMP_CATALOG'])\n # catalogue line wavelength\n wave_catalog = np.array(loc['WAVE_CATALOG'])\n # spectral order for each line\n ord_t = np.array(loc['ORD_T'])\n\n # loop through orders\n for iord in range(n_ord_start, n_ord_final):\n # keep relevant lines\n # -> right order\n # -> finite dv\n gg = (ord_t == iord) & (np.isfinite(dv))\n # put lines into ALL_LINES structure\n # reminder:\n # gparams[0] = output wavelengths\n # gparams[1] = output sigma(gauss fit width)\n # gparams[2] = output amplitude(gauss fit)\n # gparams[3] = difference in input / output wavelength\n # gparams[4] = input amplitudes\n # gparams[5] = output pixel positions\n # gparams[6] = output pixel sigma width (gauss fit width in pixels)\n # gparams[7] = output weights for the pixel position\n\n # dummy array for weights\n test = np.ones(np.shape(xgau[gg]), 'd') * 1e4\n # get the final wavelength value for each peak in the order\n output_wave_1 = np.polyval(fit_per_order[iord][::-1], xgau[gg])\n # convert the pixel equivalent width to wavelength units\n xgau_ew_ini = xgau[gg] - ew[gg] / 2\n xgau_ew_fin = xgau[gg] + ew[gg] / 2\n ew_ll_ini = np.polyval(fit_per_order[iord, :], xgau_ew_ini)\n ew_ll_fin = np.polyval(fit_per_order[iord, :], xgau_ew_fin)\n ew_ll = ew_ll_fin - ew_ll_ini\n # put all lines in the order into single array\n gau_params = np.column_stack((output_wave_1, ew_ll, peak[gg],\n wave_catalog[gg] - output_wave_1,\n amp_catalog[gg],\n xgau[gg], ew[gg], test))\n # append the array for the order into a list\n all_lines_1.append(gau_params)\n\n # add to loc\n loc['ALL_LINES_1'] = all_lines_1\n loc['LL_PARAM_1'] = np.array(fit_per_order)\n loc['LL_OUT_1'] = np.array(loc['WAVE_MAP2'])\n loc.set_sources(['ALL_LINES_1', 'LL_PARAM_1'], __NAME__ + '/main()')\n\n # For compatibility w/already defined functions, I need to save\n # here all_lines_2\n all_lines_2 = list(all_lines_1)\n loc['ALL_LINES_2'] = all_lines_2\n\n # ------------------------------------------------------------------\n # Littrow test\n # ------------------------------------------------------------------\n\n start = p['IC_LITTROW_ORDER_INIT_1']\n end = p['IC_LITTROW_ORDER_FINAL_1']\n\n # calculate echelle orders\n o_orders = np.arange(start, end)\n echelle_order = p['IC_HC_T_ORDER_START'] - o_orders\n loc['ECHELLE_ORDERS'] = echelle_order\n loc.set_source('ECHELLE_ORDERS', __NAME__ + '/main()')\n\n # Do Littrow check\n ckwargs = dict(ll=loc['LL_OUT_1'][start:end, :], iteration=1, log=True)\n loc = calculate_littrow_sol(p, loc, **ckwargs)\n\n # Plot wave solution littrow check\n if p['DRS_PLOT'] > 0:\n # plot littrow x pixels against fitted wavelength solution\n sPlt.wave_littrow_check_plot(p, loc, iteration=1)\n\n # ------------------------------------------------------------------\n # extrapolate Littrow solution\n # ------------------------------------------------------------------\n ekwargs = dict(ll=loc['LL_OUT_1'], iteration=1)\n loc = extrapolate_littrow_sol(p, loc, **ekwargs)\n\n # ------------------------------------------------------------------\n # Plot littrow solution\n # ------------------------------------------------------------------\n if p['DRS_PLOT'] > 0:\n # plot littrow x pixels against fitted wavelength solution\n sPlt.wave_littrow_extrap_plot(p, loc, iteration=1)\n\n return loc",
"def prepare(self):\n if self.pin.lower() == \"homo\":\n for i,line in enumerate(self.x):\n self.x[i] = [x - self.Lead_HOMOs_xval[i] for x in line]\n elif self.pin.lower() == \"lumo\":\n for i,line in enumerate(self.x):\n self.x[i] = [x - self.Lead_LUMOs_xval[i] for x in line]\n elif \"vac\" in self.pin.lower():\n for i,line in enumerate(self.x):\n self.x[i] = [x - self.vacuum[i] for x in line]\n elif \"ef\" in self.pin.lower():\n for i,line in enumerate(self.x):\n self.x[i] = [x - self.fermi_levels[i] for x in line]",
"def at_b (self):\n self.argc = int((len(n.coord[0]))/2)\n self.pts_con = np.array(self.coord[:,self.argc:len(n.coord[0])])\n\n self.xd = self.xdi\n self.zd = self.zdi \n \n for i, x in enumerate(self.xdi):\n self.aux_con = self.pts_con[0] - x \n self.arg1 = np.argmin(abs(self.aux_con)) \n \n if (self.aux_con[self.arg1] < 0 and self.arg1 == 0) or (self.aux_con[self.arg1] > 0 and self.arg1 == len(self.aux_con)-1):\n self.yd[i] = 99999.\n #print(self.yd[i],self.arg1)\n #print(self.aux_con)\n \n elif (self.aux_con[self.arg1] > 0 and self.aux_con[self.arg1+1] > self.aux_con[self.arg1]): #(self.aux_con[self.arg1] < 0 and self.aux_con[self.arg1-1] > self.aux_con[self.arg1]) or \n self.yd[i] = 99999.\n #print(self.yd[i],self.arg1)\n #print(self.aux_con)\n \n elif self.aux_con[self.arg1] < 0:\n #print(self.arg1)\n self.arg1 = self.arg1 + self.argc\n self.arg2 = self.arg1 - 1\n self.yd[i] = self.coord[1,n.arg1] + (x-self.coord[0,n.arg1])*(self.coord[1,n.arg2]-self.coord[1,n.arg1])/(self.coord[0,n.arg2]-self.coord[0,n.arg1])\n #print(self.yd[i],self.arg1,self.arg2)\n #print(self.aux_con)\n\n elif self.aux_con[self.arg1] > 0:\n #print(self.arg1) \n self.arg1 = self.arg1 + self.argc\n self.arg2 = self.arg1 + 1\n self.yd[i] = self.coord[1,n.arg1] + (x-self.coord[0,n.arg1])*(self.coord[1,n.arg2]-self.coord[1,n.arg1])/(self.coord[0,n.arg2]-self.coord[0,n.arg1]) \n #print(self.yd[i],self.arg1,self.arg2)\n #print(self.aux_con)\n \n #print('Defensa {0}\\n{1}: {2}\\n{3}: {4}'.format(i,self.arg1,self.aux_con[self.arg1],self.arg2,self.aux_con[self.arg2])) \n \n #self.yd = self.yd\n self.b = np.array([self.xd,self.yd,self.zd])\n #self.b.loc[:,('y')] = self.b.loc[:,('y')] ",
"def layer_offsets(self):\n ...",
"def center(sourcelocs, facutmznum, fachemi):\n \n # Fill up lists of x and y coordinates of all source vertices \n vertx_l = []\n verty_l = []\n for index, row in sourcelocs.iterrows():\n \n vertx_l.append(row[\"utme\"])\n verty_l.append(row[\"utmn\"])\n \n # If this is an area source, add the other 3 corners to vertex list\n if row[\"source_type\"].upper() == \"A\":\n angle_rad = m.radians(row[\"angle\"])\n utme1 = row[\"utme\"] + row[\"lengthx\"] * m.cos(angle_rad)\n utmn1 = row[\"utmn\"] - row[\"lengthx\"] * m.sin(angle_rad)\n utme2 = (row[\"utme\"] + (row[\"lengthx\"] * m.cos(angle_rad)) +\n (row[\"lengthy\"] * m.sin(angle_rad)))\n utmn2 = (row[\"utmn\"] + (row[\"lengthy\"] * m.cos(angle_rad)) -\n (row[\"lengthx\"] * m.sin(angle_rad)))\n utme3 = row[\"utme\"] + row[\"lengthy\"] * m.sin(angle_rad)\n utmn3 = row[\"utmn\"] + row[\"lengthy\"] * m.cos(angle_rad)\n vertx_l.append(utme1)\n vertx_l.append(utme2)\n vertx_l.append(utme3)\n verty_l.append(utmn1)\n verty_l.append(utmn2)\n verty_l.append(utmn3)\n \n # If this is a volume source, then add the vertices of it\n if row[\"source_type\"].upper() == \"V\":\n utme1 = row[\"utme\"] + row[\"lengthx\"] * m.sqrt(2)/2\n utmn1 = row[\"utmn\"] - row[\"lengthy\"] * m.sqrt(2)/2\n utme2 = row[\"utme\"] + row[\"lengthx\"] * m.sqrt(2)/2\n utmn2 = row[\"utmn\"] + row[\"lengthy\"] * m.sqrt(2)/2\n utme3 = row[\"utme\"] - row[\"lengthx\"] * m.sqrt(2)/2\n utmn3 = row[\"utmn\"] + row[\"lengthy\"] * m.sqrt(2)/2\n vertx_l.append(utme1)\n vertx_l.append(utme2)\n vertx_l.append(utme3)\n verty_l.append(utmn1)\n verty_l.append(utmn2)\n verty_l.append(utmn3)\n \n # If line or buoyant line source, add second vertex\n if row[\"source_type\"].upper() == \"N\" or row[\"source_type\"].upper() == \"B\":\n vertx_l.append(row[\"utme_x2\"])\n verty_l.append(row[\"utmn_y2\"]) \n \n vertx_a = np.array(vertx_l)\n verty_a = np.array(verty_l)\n\n \n # Combine the x and y vertices lists into list of tuples and then get a\n # unique list of vertices of the form (x, y) where x=utme and y=utmn\n sourceverts = list(zip(vertx_l, verty_l))\n unique_verts = list(set(sourceverts))\n \n \n # Find the two vertices that are the farthest apart\n # Also find the corners of the modeling domain\n \n max_dist = 0\n max_x = min_x = vertx_a[0]\n max_y = min_y = verty_a[0]\n \n if len(unique_verts) > 1: #more than one source coordinate\n \n # initialize\n xmax1 = unique_verts[0][0]\n ymax1 = unique_verts[0][1]\n xmax2 = unique_verts[1][0]\n ymax2 = unique_verts[1][1]\n \n for i in range(0, len(unique_verts)-1):\n \n # corners\n max_x = max(max_x, unique_verts[i][0])\n max_y = max(max_y, unique_verts[i][1])\n min_x = min(min_x, unique_verts[i][0])\n min_y = min(min_y, unique_verts[i][1])\n \n # find farthest apart\n j = i + 1\n for k in range(j, len(unique_verts)):\n dist = m.sqrt((unique_verts[i][0] - unique_verts[k][0])**2 + \n (unique_verts[i][1] - unique_verts[k][1])**2)\n if dist > max_dist:\n max_dist = dist\n xmax1 = unique_verts[i][0]\n ymax1 = unique_verts[i][1]\n xmax2 = unique_verts[k][0]\n ymax2 = unique_verts[k][1]\n \n # Calculate the center of the facility in utm coordinates\n cenx = round((xmax1 + xmax2) / 2)\n ceny = round((ymax1 + ymax2) / 2)\n \n else: #single source coordinate\n \n # Calculate the center of the facility in utm coordinates\n cenx = round(max_x)\n ceny = round(max_y)\n\n\n # Compute the lat/lon of the center\n utmz = str(facutmznum) + fachemi\n cenlat, cenlon = UTM.utm2ll(ceny, cenx, utmz)\n \n return cenx, ceny, cenlon, cenlat, max_dist, vertx_a, verty_a",
"def power_points():\n next_reading = power_readings()\n stretch = []\n\n def next():\n nonlocal stretch, next_reading\n stretch.append(next_reading())\n if len(stretch) > XMAX + 1:\n stretch.pop(0)\n x = XMAX + 1 - len(stretch)\n points = []\n for y in stretch:\n points.append((x, y))\n points.append((x, 0))\n x += 1\n return points\n\n return next",
"def p2p_xyz(start_point, end_point, top_left_cor, cellsize, dem):\n start_cell = (int((start_point[0] - top_left_cor[0]) / cellsize[0]),\n int((start_point[1] - top_left_cor[1]) / cellsize[1]))\n end_cell = (int((end_point[0] - top_left_cor[0]) / cellsize[0]),\n int((end_point[1] - top_left_cor[1]) / cellsize[1]))\n cells = misc.get_line(start_cell, end_cell) \n pnts = []\n elev = []\n \n dem_elv = dem[:,1]\n dem_indx = dem[:,2:4]\n\n for cell in cells:\n x = top_left_cor[0] + cell[0] * cellsize[0] + cellsize[0] / 2\n y = top_left_cor[1] + cell[1] * cellsize[1] + cellsize[1] / 2\n #xy_indx=[str(cell[0]),str(cell[1])]\n z_indx=np.logical_and(np.equal(dem_indx[:,0],cell[0]),np.equal(dem_indx[:,1],cell[1]))\n try:\n z=dem_elv[z_indx][0]\n except (np.sum(z_indx)>1):\n print(\"Oops! That was more than one indices in dem matching the query index (in getCellValue)\")\n #z_indx = [i for i,j in enumerate(dem_indx) if j == xy_indx]\n z = float(dem_elv[z_indx])\n pnts.append((x, y))\n elev.append(z)\n return pnts, elev",
"def __init__(self, model, line, segments = None, influence = None, \r\n strength = 1, variables = [], priors=[]):\r\n\r\n import numpy as np\r\n from scipy.interpolate import interp1d\r\n import copy\r\n \r\n self.model = model\r\n model.elementlist.append(self)\r\n \r\n self.variables = variables\r\n self.priors = priors\r\n \r\n # ---------------------------------------------------------------------\r\n # Subdivide the provided no flow boundary into #segments pieces\r\n \r\n self.line_raw = copy.copy(line)\r\n \r\n if segments is None:\r\n \r\n self.segments = line.shape[0]-1\r\n \r\n else:\r\n self.segments = segments\r\n \r\n if self.segments < self.line_raw.shape[0]-1:\r\n \r\n raise Exception('Number of segments '+str(self.segments)+\" mustn't be smaller than number of line points \"+str(line.shape[0])+'.')\r\n \r\n if self.segments > self.line_raw.shape[0]:\r\n \r\n # Subdivide the line\r\n self.line = self.subdivide_line(line,self.segments)\r\n self.line_c = copy.copy(self.line[:,0] + 1j*self.line[:,1])\r\n else:\r\n \r\n self.line = self.line_raw.copy()\r\n self.line_c = self.line[:,0] + 1j*self.line[:,1]\r\n \r\n # Also get the normal vector components to each segment\r\n self.line_nvec = self.line[:,1] - 1j*self.line[:,0]\r\n self.line_nvec = self.line_nvec/np.abs(self.line_nvec)\r\n\r\n # --------------------------------------------------------------------- \r\n \r\n \r\n \r\n \r\n self.strength = np.ones(self.segments)*strength\r\n \r\n if influence is None:\r\n self.influence = self.model.domain_radius*2\r\n else:\r\n self.influence = influence\r\n \r\n \r\n self.Zi = []\r\n self.offset_outside = []\r\n self.L = []\r\n self.zc = []\r\n self.segment_nvec = []\r\n self.head_target = []\r\n \r\n for seg in range(self.segments):\r\n \r\n self.L += [np.abs(self.line_c[seg+1] - self.line_c[seg])]\r\n \r\n influence_pt = (self.line_c[seg+1]-self.line_c[seg])*self.influence/self.L[seg] + self.line_c[seg]\r\n Z = (2*influence_pt-(self.line_c[seg]+self.line_c[seg+1]))/(self.line_c[seg+1]-self.line_c[seg])\r\n self.Zi += [copy.copy(Z)]\r\n \r\n self.zc += [(self.line_c[seg]+self.line_c[seg+1])/2]\r\n \r\n # Calculate the normal vector to this segment\r\n self.segment_nvec += [(self.line_c[seg]-self.line_c[seg+1])]\r\n self.segment_nvec[-1]= [np.imag(self.segment_nvec[-1])-1j*np.real(self.segment_nvec[-1])]\r\n \r\n part1 = np.nan_to_num((Z+1)*np.log(Z+1))\r\n part2 = np.nan_to_num((Z-1)*np.log(Z-1))\r\n self.offset_outside += [self.L[seg] / (4*np.pi) * (part1 - part2)]\r\n \r\n # Convert list of segment centers to array\r\n self.zc = np.asarray(self.zc)\r\n \r\n \r\n # Check if the prior matches the number of parameters\r\n if len(self.priors) != len(self.variables):\r\n raise Exception('Number of priors must match number of unknown variables. Number of priors: '+str(self.priors)+' / Number of unknown variables: '+str(len(self.variables)))\r\n \r\n # Go through all elements\r\n if len(self.variables) > 0:\r\n # There are some model variables specified\r\n for idx,var in enumerate(self.variables):\r\n self.model.num_params += 1\r\n exec(\"self.model.params += [self.%s]\" % var)\r\n self.model.priors += [self.priors[idx]]\r\n self.model.variables += [var]\r\n if 'name' in list(self.priors[idx].keys()):\r\n self.model.param_names += [self.priors[idx]['name']] \r\n else: \r\n self.model.param_names += ['unknown']",
"def get_m1_m4_SEN_info(tx, ty, m1_info, y_k):\n \"\"\"\n 1 Get information from m1_info\n \"\"\"\n x_m1 = m1_info[0]\n y_m1 = m1_info[1]\n z_m = m1_info[2]\n t_m = z_m / 3\n\n t_sen = m1_info[4]\n\n m1_points = m1_info[3]\n\n m1_p0 = m1_points[0]\n m1_p1 = m1_points[1]\n m1_p2 = m1_points[2]\n m1_p3 = m1_points[3]\n\n b_p = (tx, ty)\n\n u_distance = rs.Distance(m1_p3, b_p) - y_k / 2\n l_distance = rs.Distance(m1_p0, b_p) - y_k / 2\n\n \"\"\"\n 2 Get SEN information\n \"\"\"\n # Automatically fixed---------------------------------------------------\n # t_sen = rs.GetReal(\"Put Int(mm): Thickness of material to cut SEN.\", t_m / 2, None, None)\n w_sen = t_sen\n n_w_sen = w_sen / 2\n h_sen = z_m\n\n u_max_n = u_distance / (2 * w_sen - n_w_sen) # NOTE: divide max_n by 2 to controll \"n\"\n u_max_n = int(u_max_n)\n\n u_n = u_max_n / 4\n u_n = int(u_n)\n\n l_max_n = l_distance / (2 * w_sen - n_w_sen) # NOTE: divide max_n by 2 to controll \"n\"\n l_max_n = int(l_max_n)\n\n l_n = l_max_n / 4\n l_n = int(l_n)\n\n\n set = 20\n u_offset = (u_distance - 2 * set) / (u_n - 1)\n l_offset = (l_distance - 2 * set) / (l_n - 1)\n\n SEN_info = [w_sen, n_w_sen, h_sen, t_sen, u_n, l_n, set, u_offset, l_offset]\n\n return SEN_info",
"def _gather_points(self):\n # This is just a stub for now. We should really find the lines only\n # inside the screen range here.\n\n x = self.index.get_data()\n y = self.value.get_data()\n rad= min(self.width/2.0,self.height/2.0)\n sx = x*rad+ self.x + self.width/2.0\n sy = y*rad+ self.y + self.height/2.0\n\n points = transpose(array((sx,sy)))\n self._cached_data_pts = points\n self._cache_valid = True\n return",
"def line(x1,y1,x2,y2,z_thickness,laser):\r\n\t#Global variables that are used by all algorithms\r\n\tlayers = int(z_thickness/laser[\"z_spacing\"])\r\n\r\n\t#Works out offset when beginning on a new layer\r\n\ttaper = math.tan(math.radians(laser[\"kerf_angle\"]/2)) * laser[\"z_spacing\"]\r\n\ttaper_x,taper_y = offset(x1,y1,x2,y2,taper)\r\n\r\n\t#Works out offset between each parallel scan on the same layer\r\n\tdelta_x,delta_y = offset(x1,y1,x2,y2,laser[\"xy_spacing\"])\r\n\r\n\t#Works out maximum offset from starting line, we don't want to exceed this at any point.\r\n\tmax_taper = math.tan(math.radians(laser[\"kerf_angle\"]/2)) * (z_thickness) * 2\r\n\tmax_delta_x, max_delta_y = offset(x1,y1,x2,y2,max_taper)\r\n\t#max_delta_x, max_delta_y = 2*max_delta_x, 2*max_delta_y\r\n\r\n\t#Loops through each layer, in which we fit as many parallel raster scans as the maximum offset allows\r\n\tcutlist = []\r\n\tfor a in range(layers):\r\n\t\tnew_x1,new_x2,new_y1,new_y2 = x1 + a*taper_x, x2 + a*taper_x, y1 + a*taper_y, y2 + a*taper_y\r\n\t\ti = 0\r\n\t\tcutlist.append([\"z_step\", str(-laser[\"z_spacing\"])])\r\n\t\twhile abs(new_x1-x1) < abs(max_delta_x) or abs(new_y1-y1) < abs(max_delta_y):\r\n\t\t\t#This use of i is to reduce the jump distance between individual scans\r\n\t\t\tif i % 2 == 0:\r\n\t\t\t\tcutlist.append([\"jump\", f\"{new_x1:.6f}\", f\"{new_y1:.6f}\"])\r\n\t\t\t\tcutlist.append([\"mark\", f\"{new_x2:.6f}\", f\"{new_y2:.6f}\"])\r\n\t\t\telse:\r\n\t\t\t\tcutlist.append([\"jump\", f\"{new_x2:.6f}\", f\"{new_y2:.6f}\"])\r\n\t\t\t\tcutlist.append([\"mark\", f\"{new_x1:.6f}\", f\"{new_y1:.6f}\"])\r\n\t\t\tnew_x1,new_x2,new_y1,new_y2 = new_x1 + delta_x, new_x2 + delta_x, new_y1 + delta_y, new_y2 + delta_y\r\n\t\t\ti = i + 1\r\n\t\t#Having completed one layer, the laser moves down to begin the next layer\r\n\t\tmax_delta_x = max_delta_x - taper_x\r\n\r\n\tcutlist.insert(0, [\"set_trigger4\", \"1\", \"0\", \"7\", \"8\", \"45\"])\r\n\tcutlist.append([\"stop_trigger\"])\r\n\treturn json.dumps(cutlist)",
"def test():\n\n file = 'crosssection.dat'\n f = open(file,'r')\n lines = f.readlines()\n nline = len(lines)\n points = np.zeros(shape=(nline,4))\n sigtable = np.zeros(nline)\n for i in range(nline):\n points[i,0] = float(lines[i].split()[0])\n points[i,1] = float(lines[i].split()[1])\n points[i,2] = float(lines[i].split()[2])\n points[i,3] = float(lines[i].split()[3])\n sigtable[i] = float(lines[i].split()[4])\n\n nbin = 60\n npts = nline/nbin\n\n # checking lensing cross section against magnitude\n '''\n for i in range(npts):\n plt.plot(points[i*nbin:(i+1)*nbin,3],sigtable[i*nbin:(i+1)*nbin])\n plt.show()\n '''\n npts = npts/nbin\n\n # checking lensing cross section against velocity dispersion\n '''\n for i in range(nline):\n mask, = np.where((points[:,1]==points[i,1])&(points[:,0]==points[i,0])\\\n &(points[:,3]==points[i,3]))\n vel = points[mask,2]\n sigma = sigtable[mask]\n plt.plot(vel,sigma)\n plt.show()\n '''\n\n # checking lensing cross section against lens redshift\n #'''\n for i in range(3000,nline):\n mask, = np.where((points[:,1]==points[i,1])&(points[:,2]==points[i,2])\\\n &(points[:,3]==points[i,3]))\n print mask\n zl = points[mask,0]\n sigma = sigtable[mask]\n plt.plot(zl,sigma)\n plt.show()\n #'''\n\n # checking lensing cross section against source redshift\n for i in reversed(range(nline)):\n mask, = np.where((points[:,0]==points[i,0])&(points[:,2]==points[i,2])\\\n &(points[:,3]==points[i,3]))\n print mask\n zs = points[mask,1]\n sigma = sigtable[mask]\n plt.plot(zs,sigma)\n plt.show()",
"def multi_velo_inspec(self, n = 60, lat0 = 60, lat1 = 90, pole = \"north\"):\n inds = self.mlat_finder(lat1, lat0, pole)[1]\n NeA = self.NeA[inds]\n NeB = self.NeB[inds]\n NeC = self.NeC[inds]\n\n secondsA = self.secondsA[inds]\n secondsB = self.secondsB[inds]\n secondsC = self.secondsC[inds]\n\n\n mlatA = self.mlatA[inds]\n mlatB = self.mlatB[inds]\n mlatC = self.mlatC[inds]\n\n mean_range = 5\n NeA = self.meanie(NeA, mean_range)\n NeB = self.meanie(NeB, mean_range)\n NeC = self.meanie(NeC, mean_range)\n \n \n N = int((len(NeA)/n*2) - 1) #nr of windows\n \n dx = (secondsB[1]-secondsB[0])*self.velB[0]\n \n nBAs = []\n nBCs = []\n nACs = []\n \n for i in range(N):\n startind = int(i/2*n)\n stopind = int((i/2+1)*n)\n temp_NeA = NeA[startind:stopind]\n temp_NeB = NeB[startind:stopind]\n temp_NeC = NeC[startind:stopind]\n \n temp_secondsA = secondsA[startind:stopind]\n temp_secondsB = secondsB[startind:stopind]\n temp_secondsC = secondsC[startind:stopind]\n \n \n curr_timediff = np.round((temp_secondsB[1:] - temp_secondsB[:-1])-(1/self.fs))\n if np.sum(curr_timediff) > 2:\n continue\n \n gradA = (temp_NeA[1:] - temp_NeA[:-1])/dx\n gradB = (temp_NeB[1:] - temp_NeB[:-1])/dx\n gradC = (temp_NeC[1:] - temp_NeC[:-1])/dx\n \n if np.max(gradA) < 0.9:\n continue\n \n stdA = np.std(gradA)\n stdB = np.std(gradB)\n stdC = np.std(gradC)\n \n meanA = temp_secondsB[np.where(gradA == np.max(gradA))][0]\n meanB = temp_secondsB[np.where(gradB == np.max(gradB))][0]\n meanC = temp_secondsB[np.where(gradC == np.max(gradC))][0]\n \n p0A = [1, meanA, stdA]\n p0B = [1, meanB, stdB]\n p0C = [1, meanB, stdB]\n \n poptA, pcovA = curve_fit(self.gaussian, temp_secondsB[:-1], gradA, p0 = p0A)\n poptB, pcovB = curve_fit(self.gaussian, temp_secondsB[:-1], gradB, p0 = p0B)\n poptC, pcovC = curve_fit(self.gaussian, temp_secondsB[:-1], gradC, p0 = p0C)\n \n nBA = poptB[1] - poptA[1]\n nBC = poptB[1] - poptC[1]\n nAC = poptA[1] - poptC[1]\n \n nBAs.append(nBA)\n nBCs.append(nBC)\n nACs.append(nAC)\n \n \n sBA = self.BA_shift/2 #time delay BA\n sBC = self.BC_shift/2 #time delay BC\n sAC = (self.BC_shift - self.BA_shift)/2\n V = self.velA[0]\n for i in range(len(nBAs)):\n VBA = self.along_track_velo(V, sBA, nBAs[i])\n VBC = self.along_track_velo(V, sBC, nBCs[i])\n VAC = self.along_track_velo(V, sAC, nACs[i])\n \n print(VBA)\n print(VBC)\n print(VAC)\n print(\"________________________________________\")",
"def fun_location(self, l1_xpos, l1_xscale):\n const_1 = self.tik_instance.Tensor(\"float32\", (8, 8),\n name=\"const_1\",\n scope=tik.scope_ubuf)\n const_0 = self.tik_instance.Tensor(\"float32\", (8, 8),\n name=\"const_0\",\n scope=tik.scope_ubuf)\n index_256 = self.tik_instance.Tensor(\"float32\", (256, 8),\n name=\"index_256\",\n scope=tik.scope_ubuf)\n self.tik_instance.vector_dup(MASK, const_1, float(1), 1, 1, 8)\n self.tik_instance.vector_dup(MASK, const_0, 0, 1, 1, 8)\n int32_256_ub = self.tik_instance.Tensor(\"int32\", (256, 8),\n name=\"int32_256_ub\",\n scope=tik.scope_ubuf)\n scale_512_x = self.tik_instance.Tensor(\"float32\", (512, 8),\n name=\"scale_512_x\",\n scope=tik.scope_ubuf)\n const_weight = self.tik_instance.Tensor(\"float32\", (8, 8),\n name=\"const_weight\",\n scope=tik.scope_ubuf)\n self.tik_instance.vector_dup(MASK, const_weight, float(self.weight_in), 1, 1, 8)\n #x zuobiao\n with self.tik_instance.for_range(0, self.w_in_loop) as w_index:\n with self.tik_instance.for_range(0, 256) as num_index:\n self.tik_instance.vector_dup(8, index_256[num_index*8],\n w_index*256+num_index, 1, 1, 8)\n if self.half_pixel_centers:\n self.tik_instance.vadds(MASK, index_256, index_256, float(0.5),\n 32, 1, 1, 8, 8)\n self.tik_instance.vmuls(MASK, scale_512_x, index_256, float(self.weight_out),\n 32, 1, 1, 8, 8)\n self.tik_instance.vdiv(MASK, scale_512_x, scale_512_x, const_weight,\n 32, 1, 1, 1, 8, 8, 0)\n if self.half_pixel_centers:\n self.tik_instance.vadds(MASK, scale_512_x, scale_512_x, float(-0.5),\n 32, 1, 1, 8, 8)\n self.tik_instance.vmax(MASK, scale_512_x[0], scale_512_x[0], const_0[0],\n 32, 1, 1, 1, 8, 8, 0)\n self.tik_instance.vconv(MASK, \"floor\", int32_256_ub[0],\n scale_512_x[0], 32, 1, 1, 8, 8)\n self.tik_instance.vconv(MASK, \"\", scale_512_x[2048],\n int32_256_ub[0], 32, 1, 1, 8, 8)\n self.tik_instance.data_move(l1_xpos[w_index*256*8], int32_256_ub[0],\n 0, 1, 256, 0, 0)\n self.tik_instance.vsub(MASK, scale_512_x[2048],\n scale_512_x[0], scale_512_x[2048],\n 32, 1, 1, 1, 8, 8, 8)\n self.tik_instance.vsub(MASK, scale_512_x[0],\n const_1[0], scale_512_x[2048],\n 32, 1, 1, 1, 8, 0, 8)\n self.tik_instance.data_move(l1_xscale[w_index*512*8],\n scale_512_x[0], 0, 1, 512, 0, 0)",
"def hexapodZernikeLinearModel_hexapodcoordinate():\n Tfile='/home/jghao/research/decamFocus/psf_withseeing/finerGrid_coeff_matrix/zernike_coeff_finerGrid_training.cp'\n b=p.load(open(Tfile))\n nobs = len(b)\n x = b[:,0]\n y = b[:,1]\n z = b[:,2]\n theta = b[:,3]\n phi = b[:,4]\n fwhm = b[:,5]\n e1 = b[:,6]\n e2 = b[:,7]\n thetax = theta*np.cos(np.deg2rad(phi))\n thetay = theta*np.sin(np.deg2rad(phi))\n xh = x*1000 # convert to hexapod coordinate\n yh = -y*1000\n zh = -z*1000\n xtilth = - thetay\n ytilth = - thetax\n\n M22realTrefoil2 = b[:,37] # for x decenter\n M22imagTrefoil1 = b[:,54] \n M22TrefoilXshift = 0.5*(M22realTrefoil2+M22imagTrefoil1)\n\n M22realTrefoil1 = b[:,34] # for y decenter\n M22imagTrefoil2 = b[:,57] \n M22TrefoilYshift = 0.5*(M22realTrefoil1 - M22imagTrefoil2)\n\n M20defocus = b[:,12] # for defocus\n\n M22realComa2 = b[:,36] # for x-tilt\n M22imagComa1 = b[:,55]\n M22ComaXtilt = 0.5*(M22realComa2+M22imagComa1)\n\n M22realComa1 = b[:,35] # for y-tilt\n M22imagComa2 = b[:,56]\n M22ComaYtilt = 0.5*(M22realComa1 - M22imagComa2)\n \n pl.figure(figsize=(21,12))\n pl.subplot(2,3,1)\n t=bp.bin_scatter(M22TrefoilXshift,xh,nbins=20,fmt='bo',scatter=True)\n res = linefit(M22TrefoilXshift,xh)\n pl.plot(M22TrefoilXshift,M22TrefoilXshift*res[1]+res[0],'r,')\n pl.ylabel('x-decenter [micron]')\n pl.xlabel('(M22realTrefoil2+M22imagTrefoil1)/2')\n pl.title('slope: '+str(round(res[1],4))+' Intercept: '+str(round(res[0],4)))\n pl.grid()\n pl.subplot(2,3,2)\n t=bp.bin_scatter(M22TrefoilYshift,yh,nbins=20,fmt='bo',scatter=True)\n res = linefit(M22TrefoilYshift,yh)\n pl.plot(M22TrefoilYshift,M22TrefoilYshift*res[1]+res[0],'r,')\n pl.ylabel('y-decenter [micron]')\n pl.xlabel('(M22realTrefoil1 - M22imagTrefoil2)/2')\n pl.title('slope: '+str(round(res[1],4))+' Intercept: '+str(round(res[0],4)))\n pl.grid()\n pl.subplot(2,3,3)\n t=bp.bin_scatter(M20defocus,zh,nbins=20,fmt='bo',scatter=True)\n res = linefit(M20defocus,zh)\n pl.plot(M20defocus,M20defocus*res[1]+res[0],'r,')\n pl.ylabel('z-defocus [micron]')\n pl.xlabel('M20defocus')\n pl.title('slope: '+str(round(res[1],4))+' Intercept: '+str(round(res[0],4)))\n pl.grid()\n pl.subplot(2,3,4)\n t=bp.bin_scatter(M22ComaXtilt,ytilth,nbins=20,fmt='bo',scatter=True)\n res = linefit(M22ComaXtilt,ytilth)\n pl.plot(M22ComaXtilt,M22ComaXtilt*res[1]+res[0],'r,')\n pl.ylabel('y-tilt [arcsec]') # in hexapod coordiate, xtilt and y tilt is switched from the CRAY coordiante\n pl.xlabel('(M22realComa2+M22imagComa1)/2')\n pl.title('slope: '+str(round(res[1],4))+' Intercept: '+str(round(res[0],4)))\n pl.grid()\n pl.subplot(2,3,5)\n t=bp.bin_scatter(M22ComaYtilt,xtilth,nbins=20,fmt='bo',scatter=True)\n res = linefit(M22ComaYtilt,xtilth)\n pl.plot(M22ComaYtilt,M22ComaYtilt*res[1]+res[0],'r,')\n pl.ylabel('x-tilt [arcsec]')\n pl.xlabel('(M22realComa1 - M22imagComa2)/2')\n pl.title('slope: '+str(round(res[1],4))+' Intercept: '+str(round(res[0],4)))\n pl.grid()\n pl.savefig('linearModel_hexapod_coordinate.png')\n pl.close()",
"def coordinates(self):",
"def m1_make_lower_shape_points_list(tx, ty, m1_info, SEN_info):\n \"\"\"\n 1 Get information from m1_info & SEN_info\n \"\"\"\n x_m1 = m1_info[0]\n y_m1 = m1_info[1]\n z_m = m1_info[2]\n\n m1_points = m1_info[3]\n\n m1_p0 = m1_points[0]\n m1_p1 = m1_points[1]\n m1_p2 = m1_points[2]\n m1_p3 = m1_points[3]\n\n w_sen = SEN_info[0]\n n_w_sen = SEN_info[1]\n h_sen = SEN_info[2]\n t_sen = SEN_info[3]\n u_n = SEN_info[4]\n l_n = SEN_info[5]\n set = SEN_info[6]\n u_offset = SEN_info[7]\n l_offset = SEN_info[8]\n\n \"\"\"\n 2 Make lists.\n lower_shape_upper_left_row list\n lower_shape_upper_right_row list\n\n lower_shape_lower_left_row list\n lower_shape_lower_right_row list\n \"\"\"\n # upper side\n lower_shape_upper_left_row = []\n lower_shape_upper_right_row = []\n\n for i in range(u_n):\n # left row\n ix = tx + t_sen\n iy = ty + (i * u_offset + set) + 10\n\n p0, p1, p2, p3, p4, p5, p6, p7, p8 = Y_lower_shape_points(ix, iy, w_sen, t_sen, n_w_sen)\n left_points = [p4, p3, p2, p1, p8, p7, p6, p5]\n lower_shape_upper_left_row.extend((left_points))\n\n for i in range(u_n - 1, -1, -1):\n # right row\n ix = tx + (x_m1 - t_sen)\n iy = ty + (i * u_offset + set) + 10\n\n p0, p1, p2, p3, p4, p5, p6, p7, p8 = Y_lower_shape_points(ix, iy, w_sen, t_sen, n_w_sen)\n right_points = [p8, p7, p6, p5, p4, p3, p2, p1]\n lower_shape_upper_right_row.extend(right_points)\n\n # lower side\n lower_shape_lower_left_row = []\n lower_shape_lower_right_row = []\n\n for i in range(l_n - 1, -1, -1):\n # left row\n ix = tx + t_sen\n iy = ty - (i * l_offset + set) - 10\n\n p0, p1, p2, p3, p4, p5, p6, p7, p8 = Y_lower_shape_points(ix, iy, w_sen, t_sen, n_w_sen)\n left_points = [p4, p3, p2, p1, p8, p7, p6, p5]\n lower_shape_lower_left_row.extend((left_points))\n\n for i in range(l_n):\n # right row\n ix = tx + (x_m1 - t_sen)\n iy = ty - (i * l_offset + set) - 10\n\n p0, p1, p2, p3, p4, p5, p6, p7, p8 = Y_lower_shape_points(ix, iy, w_sen, t_sen, n_w_sen)\n right_points = [p8, p7, p6, p5, p4, p3, p2, p1]\n lower_shape_lower_right_row.extend(right_points)\n\n lower_shape_upper = [lower_shape_upper_left_row, lower_shape_upper_right_row]\n lower_shape_lower = [lower_shape_lower_left_row, lower_shape_lower_right_row]\n\n return lower_shape_upper, lower_shape_lower",
"def gen_centers(self):\n\n \"\"\"x_track = self.cs.discrete_rollout()\n t = np.arange(len(x_track))*self.dt\n # choose the points in time we'd like centers to be at\n c_des = np.linspace(0, self.cs.run_time, self.n_bfs)\n self.c = np.zeros(len(c_des))\n for ii, point in enumerate(c_des):\n diff = abs(t - point)\n self.c[ii] = x_track[np.where(diff == min(diff))[0][0]]\"\"\"\n\n # desired activations throughout time\n des_c = jnp.linspace(0, self.cs.run_time, self.n_bfs)\n\n self.c = np.ones(len(des_c))\n for n in range(len(des_c)):\n # finding x for desired times t\n self.c[n] = jnp.exp(-self.cs.ax * des_c[n])\n self.c = jnp.array(self.c)",
"def initial_coordinates(mof, energy_map, atom_list, energy_limit):\n reference_atom = 'C'\n ref_atom_index = atom_list['atom'].index(reference_atom) + 3\n initial_coors = []\n energy_count = 0\n pbc_count = 0\n for emap_line in energy_map:\n emap_coor = Coor([emap_line[0], emap_line[1], emap_line[2]])\n pbc_coor = emap_coor.pbc(mof.uc_size, mof.uc_angle, mof.frac_ucv)\n pbc_x = round(pbc_coor.x, 1)\n pbc_y = round(pbc_coor.y, 1)\n pbc_z = round(pbc_coor.z, 1)\n # print(emap_coor.x, pbc_x)\n if pbc_x == emap_coor.x and pbc_y == emap_coor.y and pbc_z == emap_coor.z:\n if emap_line[ref_atom_index] < energy_limit:\n initial_coors.append(Coor([emap_line[0], emap_line[1], emap_line[2]]))\n else:\n energy_count += 1\n else:\n pbc_count += 1\n\n # print('Ommited PBC: ', pbc_count, ' Energy: ', energy_count)\n return initial_coors",
"def getLocalMap(dist_compl):\n sdc=dist_compl*RES\n #clms are real ;)\n #rws are imaginary :D #rows\n map_padd = 1*RES #add a meter\n rws_ofs = abs(sdc.imag.min())+map_padd #offsetX\n rws = abs(sdc.imag.max())+(rws_ofs)\n clms_ofs = abs(sdc.real.min())+map_padd\n clms = abs(sdc.real.max())+(clms_ofs)\n M = np.zeros((np.round(rws+map_padd).astype(int),np.round(clms+map_padd).astype(int))).astype(dtype=MAP_D_TYPE)#empty local map\n Mg = M.copy()\n points = sdc + np.array([clms_ofs+1j*rws_ofs]) #scale\n #M[points.imag.astype(int),points.real.astype(int)]=10 \n for p in points:\n r=np.round(p.imag).astype(int)\n c=np.round(p.real).astype(int)\n try:\n #draw line in matrix\n lc = [np.round(rws_ofs).astype(int),np.round(clms_ofs).astype(int),r,c]\n rr, cc, val = line_aa(*lc) #not really demaning --> 1%\n M[rr, cc] = np.logical_or(M[rr,cc]>0, val>0) \n #add gaussian\n Mg[r-GPoints//2:r+GPoints//2,c-GPoints//2:c+GPoints//2]+=Gau\n except:\n print('Error: out of array when calculating the local map',r,c)\n Mg[Mg>100]=100 #cap the gaussian matrix\n car_pos_in_loc_mat = np.array([np.round(clms_ofs).astype(int), np.round(rws_ofs).astype(int)])\n #Mg[car_pos_in_loc_mat[1],car_pos_in_loc_mat[0]]=300 #add car pos\n return M*(-100)+Mg, car_pos_in_loc_mat",
"def buildcutlineset():\r\n cutlineset=[[[-3.2697,-3.2697],[-4.3304,-4.3304]],[[-3.2697,-4.3304],[-4.3304,-3.2697]]]\r\n cutlineset.extend([[[-3.2697,176.0104],[-4.3304,174.9497]],[[-3.2697,174.9497],[-4.3304,176.0104]]])\r\n cutlineset.extend([[[176.0104,176.0104],[174.9497,174.9497]],[[176.0104,174.9497],[174.9497,176.0104]]])\r\n cutlineset.extend([[[175.4800,-3.05],[175.4800,-4.55]],[[174.7300,-3.8],[176.2300,-3.8]]])\r\n \r\n for cutline in cutlineset:\r\n for pos in cutline:\r\n pos[0]=pos[0]+globalconfig.CUTLINE_X_OFFSET\r\n pos[1]=pos[1]+globalconfig.CUTLINE_Y_OFFSET\r\n \r\n for row in range(0,globalconfig.X_ARRAY_NUM):\r\n cutlineset.append([[globalconfig.X_BLANK+row*(globalconfig.X_LENGTH/globalconfig.CENTER_RATIO)+globalconfig.CUTLINE_X_OFFSET,0.0+globalconfig.CUTLINE_Y_OFFSET],[globalconfig.X_BLANK+row*(globalconfig.X_LENGTH/globalconfig.CENTER_RATIO)+globalconfig.CUTLINE_X_OFFSET,-3.0+globalconfig.CUTLINE_Y_OFFSET]])\r\n cutlineset.append([[globalconfig.X_BLANK+row*(globalconfig.X_LENGTH/globalconfig.CENTER_RATIO)+globalconfig.CUTLINE_X_OFFSET,171.68+globalconfig.CUTLINE_Y_OFFSET],[globalconfig.X_BLANK+row*(globalconfig.X_LENGTH/globalconfig.CENTER_RATIO)+globalconfig.CUTLINE_X_OFFSET,174.68+globalconfig.CUTLINE_Y_OFFSET]])\r\n for line in range(0,globalconfig.Y_ARRAY_NUM):\r\n cutlineset.append([[0.0+globalconfig.CUTLINE_X_OFFSET,globalconfig.Y_BLANK+line*(globalconfig.Y_LENGTH/globalconfig.CENTER_RATIO)+globalconfig.CUTLINE_Y_OFFSET],[-3.0+globalconfig.CUTLINE_X_OFFSET,globalconfig.Y_BLANK+line*(globalconfig.Y_LENGTH/globalconfig.CENTER_RATIO)+globalconfig.CUTLINE_Y_OFFSET]])\r\n cutlineset.append([[171.68+globalconfig.CUTLINE_X_OFFSET,globalconfig.Y_BLANK+line*(globalconfig.Y_LENGTH/globalconfig.CENTER_RATIO)+globalconfig.CUTLINE_Y_OFFSET],[174.68+globalconfig.CUTLINE_X_OFFSET,globalconfig.Y_BLANK+line*(globalconfig.Y_LENGTH/globalconfig.CENTER_RATIO)+globalconfig.CUTLINE_Y_OFFSET]])\r\n return cutlineset",
"def chaintocoords(chain, ang=False):\n newcoords = []\n for i in range(len(chain)):\n M_obj = chain.Structures[i].M\n if ang:\n coord = M_obj.xyzs[0]\n else:\n coord = M_obj.xyzs[0] / bohr2ang\n newcoords.append(coord.tolist())\n return newcoords",
"def point1(source, ants, elmin=15.0, showGUI=True, zoom=1, mode=0, \n nextSource=None, writeFITS=True, brightness=50,isInner=False,\n subtractBackground=False, ncoadd=1, dazbkg=2.0, centroidLoopMax=16,\n minsamples=None):\n global previous_source_\n antlist = helpers.makeList(ants)\n el = elevation(source)\n \n # Check for source too low, and bail out if it is\n if el < elmin:\n print 'Skipping %s because elevation(%.1f) is less than limit(%.1f)' \\\n %(source,el,elmin)\n return\n \n # Check for maximum elevation as well\n maxelLimit = 87.0\n if el > maxelLimit:\n print 'Skipping %s because elevation(%.1f) is greater than limit(%.1f)' \\\n %(source,el,maxelLimit)\n return\n\n # Print message\n msg = \"Pointing on %s at el=%5.1f (elmin = %5.1f); ants = %s\" % (source, el, elmin, str(ants))\n print msg\n SAC.scriptlog(msg)\n \n d = distance(source,previous_source_)\n print \" Distance from %s to %s is %.1f deg\" %(previous_source_,source,d)\n if mode > 0:\n if nextSource != None:\n print \" Next source will be %s\" % nextSource\n else:\n print \" Last source.\"\n # Send all antennas tracking to new source, even though some are\n # already there.\n if isInner :\n SAC.track(source)\n else : SAC.track(source, antlist, phaseCenter=False, waiton=SAC.NONE)\n antsTodo = antlist\n while len(antsTodo) > 0 :\n r = SAC.wait(SAC.TRACK, antsTodo, waiton=SAC.ANY)\n antsReady = r.ready\n antsTodo = r.notready\n for i in antsReady :\n show(source)\n #print \"Loading camera on antenna %d\" %i\n opticalSystem(i, auto=True, object=source, repeat=4, zoom=zoom,\n showGUI=showGUI, dontClose=True, \n brightness=brightness, ncoadd=ncoadd, dazbkg=dazbkg,\n subtractBackground=subtractBackground, \n centroidLoopMax=centroidLoopMax, minsamples=minsamples)\n # now that this ant is done, send it to next source\n if mode > 0:\n if nextSource != None: \n if isInner : SAC.track(nextSource)\n else : SAC.track(nextSource, i, waiton=SAC.NONE)\n previous_source_ = source",
"def GEEviLandsat(ptsFile,metric,timeStep,sensor,buf,poly,username,folderOut, scalePix = 30):\n \n # load required libraries\n import ee\n import math\n \n # Initialize the Earth Engine object, using the authentication credentials.\n ee.Initialize()\n\n ID_field = \"geeID\"\n\n #load pts or poly file\n pts1 = ee.FeatureCollection('users/' + username + '/' + str(ptsFile))\n\n #define dictionary for raster random names\n sensor_d = {}\n sensor_d['L4'] = 'LANDSAT/LT04/C01/T1_SR'\n sensor_d['L5'] = 'LANDSAT/LT05/C01/T1_SR'\n sensor_d['L7'] = 'LANDSAT/LE07/C01/T1_SR'\n sensor_d['L8'] = 'LANDSAT/LC08/C01/T1_SR'\n\n time_d = {}\n time_d['lowest'] = 'rl'\n time_d['month'] = 'rm'\n time_d['year'] = 'ry'\n \n \n #Computes the bits we need to extract.\n def getQABits(image, start, end, newName):\n pattern = 0\n listB = list(range(start, end+1))\n for one in listB:\n pattern += math.pow(2, one)\n pattern = int(pattern)\n \n return (image.select([0], [newName])\n .bitwiseAnd(pattern)\n .rightShift(start))\n \n for sen in sensor:\n LS = ee.ImageCollection(sensor_d[sen])\n #senL = [sen]\n \n def maskbyBits(img):\n QA = img.select('pixel_qa')\n QA1 = getQABits(QA, 3, 3, 'QA')\n QA2 = getQABits(QA, 5, 5, 'QA')\n\n mask = QA1.eq(0).And(QA2.eq(0))\n return img.updateMask(mask)\n \n LSm = LS.map(maskbyBits)\n \n lastImage = ee.Image(ee.ImageCollection(sensor_d[sen])\n .sort('system:time_start',False)\n .first())\n lastImageDate = lastImage.get('system:index').getInfo()\n\n firstImage = ee.Image(ee.ImageCollection(sensor_d[sen])\n .sort('system:time_start',True)\n .first())\n firstImageDate = firstImage.get('system:index').getInfo()\n \n startYear = int(firstImageDate[(len(firstImageDate)-8):(len(firstImageDate)-4)])\n endYear = int(lastImageDate[(len(lastImageDate)-8):(len(lastImageDate)-4)])\n startMonth = int(firstImageDate[(len(firstImageDate)-4):(len(firstImageDate)-2)])\n endMonth = int(lastImageDate[(len(lastImageDate)-4):(len(lastImageDate)-2)])-1\n startYearAll = startYear + 1\n endYearAll = endYear - 1\n \n years = list(range(startYear, endYearAll + 1))\n monthsEE = ee.List(list(range(startMonth,(12*len(years)+endMonth))))\n yearsEE = ee.List(list(range(startYearAll, endYearAll + 1)))\n \n for met in metric:\n # metL = [met]\n\n if (sen == 'L8' and met == \"NDVI\"):\n bands = ['B5', 'B4']\n elif (sen != 'L8' and met == \"NDVI\"):\n bands = ['B4', 'B3']\n elif (sen == 'L8' and met == \"NDWI\"):\n bands = ['B5', 'B6']\n elif (sen != 'L8' and met == \"NDWI\"):\n bands = ['B4', 'B5']\n elif (sen == 'L8' and met == \"NBR\"):\n bands = ['B5', 'B7']\n elif (sen != 'L8' and met == \"NBR\"):\n bands = ['B4', 'B7']\n #else:\n #print(\"wrong metric specified\")\n \n def addVI(image):\n vi = (image.normalizedDifference(bands)\n .rename('VI'))\n return image.addBands(vi)\n\n withVI = LSm.map(addVI)\n\n VI_col = withVI.select('VI')\n\n if timeStep == 'year':\n\n def map_m(i):\n i = ee.Number(i).int()\n image2 = (VI_col\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .first())\n filtered = (VI_col\n .filter(ee.Filter.calendarRange(i, i, 'year'))\n .mean()\n .copyProperties(image2,['system:time_start','system:time_end']))\n return filtered\n\n img_col = ee.ImageCollection(yearsEE.map(map_m).flatten())\n\n elif timeStep == 'month':\n \n def map_m(i):\n i = ee.Number(i)\n y = i.divide(12).add(years[0]).int()\n m = i.mod(12).add(1)\n image2 = (VI_col\n .filter(ee.Filter.calendarRange(m, m, 'month'))\n .filter(ee.Filter.calendarRange(y, y, 'year'))\n .first())\n filtered = (VI_col\n .filter(ee.Filter.calendarRange(m, m, 'month'))\n .filter(ee.Filter.calendarRange(y, y, 'year'))\n .mean()\n .copyProperties(image2,['system:time_start','system:time_end']))\n return filtered\n\n img_col = ee.ImageCollection(monthsEE.map(map_m).flatten())\n\n elif timeStep == 'lowest':\n\n img_col = VI_col\n\n #else:\n #print(\"incorrect time step specified\")\n \n if buf > 0:\n bufL = [buf]\n def bufferPoly(feature):\n return feature.buffer(bufL[0])\n\n ptsB = pts1.map(bufferPoly)\n def table_m(image):\n table = (image\n .select('VI')\n .reduceRegions(collection = ptsB.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_'+str(sen)+'_'+str(met)+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_ptsB',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n \n #print ('buffered pts by:' + str(buf) + ' for Landsat: ' + sen + '_' + met)\n\n elif poly > 0:\n \n def table_m(image):\n table = (image\n .select('VI')\n .reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_'+str(sen)+'_'+str(met)+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_poly1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n \n #print ('spatial mean in poly: no buffer for Landsat: ' + sen + '_' + met)\n\n else:\n def table_m(image):\n table = (image\n .select('VI')\n .reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix))\n \n def table_add_date(f):\n return f.set('startDate', ee.Date(image.get('system:time_start')))\n\n return table.map(table_add_date)\n\n triplets = img_col.map(table_m).flatten()\n\n task_tc = ee.batch.Export.table.toDrive(collection = triplets\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = str(time_d[timeStep])+'_'+str(sen)+'_'+str(met)+'_'+str(years[0])+'_'+str(years[len(years)-1])+'_pts1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n \n #print('value at point: no buffer for Landsat: ' + sen + '_' + met)",
"def sample_pin_position_range():\n #Create a sample goniometer\n g = TopazInHouseGoniometer()\n\n #Initialize the leg limits\n g.relative_sample_position = column([0.0, 0.0, 0.0])\n g.getplatepos(0.0, 0.0, 0.0)\n g.calculate_leg_xy_limits(visualize=True)\n\n# if True:\n# pylab.show()\n# return\n\n n = 17\n positions = np.linspace(-8, 8, n) #Range calculated in mm\n allowed = np.zeros( (n,n,n) )\n for (ix, x) in enumerate(positions):\n print \"Calculating x\", x\n for (iy, y) in enumerate(positions):\n for (iz, z) in enumerate(positions):\n #Set up\n g.relative_sample_position = column([x, y, z])\n allowed[ix,iy,iz] = g.are_angles_allowed([0., 0., 0.], return_reason=False)\n\n #Do a plot\n\n pylab.figure(1, figsize=[15,15])\n pylab.title(\"Allowable XZ sample positions\")\n for (iy, y) in enumerate(positions):\n print \"At y of\", y, \", # of points = \", np.sum( allowed[:, iy,:])\n if iy < 16:\n pylab.subplot(4,4,iy+1)\n pylab.pcolor(positions, positions, allowed[:, iy, :].transpose(), norm=pylab.Normalize(0, 1))\n pylab.xlabel(\"x\")\n pylab.ylabel(\"z\")\n pylab.title(\"y = %.3f mm\" % y)\n pylab.draw()\n pylab.axis('equal')\n pylab.show()\n #pylab.",
"def m4_make_lower_shape_points_list(tx, ty, m4_info, SEN_info):\n \"\"\"\n 1 Get information from m4_info & SEN_info\n \"\"\"\n x_m4 = m4_info[0]\n y_m4 = m4_info[1]\n z_m = m4_info[2]\n\n m4_points = m4_info[3]\n\n m4_p0 = m4_points[0]\n m4_p1 = m4_points[1]\n m4_p2 = m4_points[2]\n m4_p3 = m4_points[3]\n\n w_sen = SEN_info[0]\n n_w_sen = SEN_info[1]\n h_sen = SEN_info[2]\n t_sen = SEN_info[3]\n u_n = SEN_info[4]\n l_n = SEN_info[5]\n set = SEN_info[6]\n u_offset = SEN_info[7]\n l_offset = SEN_info[8]\n\n \"\"\"\n 2 Make lists.\n lower_shape_upper_left_row list\n lower_shape_upper_right_row list\n\n lower_shape_lower_left_row list\n lower_shape_lower_right_row list\n \"\"\"\n # upper side\n lower_shape_upper_left_row = []\n lower_shape_upper_right_row = []\n\n for i in range(u_n - 1, -1, -1):\n # left row\n ix = tx - (x_m4 - t_sen)\n iy = ty + (i * u_offset + set) + 10\n\n p0, p1, p2, p3, p4, p5, p6, p7, p8 = Y_lower_shape_points(ix, iy, w_sen, t_sen, n_w_sen)\n left_points = [p5, p6, p7, p8, p1, p2, p3, p4]\n lower_shape_upper_left_row.extend((left_points))\n\n for i in range(u_n):\n # right row\n ix = tx - t_sen\n iy = ty + (i * u_offset + set) + 10\n\n p0, p1, p2, p3, p4, p5, p6, p7, p8 = Y_lower_shape_points(ix, iy, w_sen, t_sen, n_w_sen)\n right_points = [p1, p2, p3, p4, p5, p6, p7, p8]\n lower_shape_upper_right_row.extend(right_points)\n\n # lower side\n lower_shape_lower_left_row = []\n lower_shape_lower_right_row = []\n\n for i in range(l_n):\n # left row\n ix = tx - (x_m4 - t_sen)\n iy = ty - (i * l_offset + set) - 10\n\n p0, p1, p2, p3, p4, p5, p6, p7, p8 = Y_lower_shape_points(ix, iy, w_sen, t_sen, n_w_sen)\n left_points = [p5, p6, p7, p8, p1, p2, p3, p4]\n lower_shape_lower_left_row.extend((left_points))\n\n for i in range(l_n - 1, -1, -1):\n # right row\n ix = tx - t_sen\n iy = ty - (i * l_offset + set) - 10\n\n p0, p1, p2, p3, p4, p5, p6, p7, p8 = Y_lower_shape_points(ix, iy, w_sen, t_sen, n_w_sen)\n right_points = [p1, p2, p3, p4, p5, p6, p7, p8]\n lower_shape_lower_right_row.extend(right_points)\n\n lower_shape_upper = [lower_shape_upper_left_row, lower_shape_upper_right_row]\n lower_shape_lower = [lower_shape_lower_left_row, lower_shape_lower_right_row]\n\n return lower_shape_upper, lower_shape_lower",
"def calc_points_expansion(self):\n tot_points = 0\n if 'capi' in args.exp:\n be = ['_'] * 8\n be += self.b[ 0: 5]\n be += ['_'] * 2\n be += self.b[ 5:10]\n be += ['_'] * 2\n be += self.b[10:15]\n be += ['_'] * 2\n be += self.b[15:20]\n be += ['_'] * 8\n max_points = 0\n for i in range(8, 34):\n if be[i] == 'U':\n points = 0\n if be[i - 1] == 'P' or be[i - 1] == 'G':\n points += 5\n elif be[i - 1] == 'S':\n points += 3\n elif be[i - 1] == 'U':\n points += 2\n elif be[i - 1] == 'A' or be[i - 1] == 'F' or ord(be[i - 1]) < 54:\n points -= 5\n if be[i + 1] == 'P' or be[i + 1] == 'G':\n points += 5\n elif be[i + 1] == 'S':\n points += 3\n elif be[i + 1] == 'U':\n points += 2\n elif be[i + 1] == 'A' or be[i + 1] == 'F' or ord(be[i + 1]) < 54:\n points -= 5\n if be[i - 7] == 'P' or be[i - 7] == 'G':\n points += 5\n elif be[i - 7] == 'S':\n points += 3\n elif be[i - 7] == 'U':\n points += 2\n elif be[i - 7] == 'A' or be[i - 7] == 'F' or ord(be[i - 7]) < 54:\n points -= 5\n if be[i + 7] == 'P' or be[i + 7] == 'G':\n points += 5\n elif be[i + 7] == 'S':\n points += 3\n elif be[i + 7] == 'U':\n points += 2\n elif be[i + 7] == 'A' or be[i + 7] == 'F' or ord(be[i + 7]) < 54:\n points -= 5\n if points > max_points:\n max_points = points\n tot_points += max_points\n if 'plan' in args.exp:\n nb_b_in_district = [0, 0, 0, 0, 0]\n i_to_district = (0, 0, 1, 2, 2, 0, 0, 1, 2, 2, 3, 3, 1, 4, 4, 3, 3, 1, 4, 4)\n for i in range(20):\n if self.b[i] != '_':\n nb_b_in_district[i_to_district[i]] += 1\n points = len([1 for x in nb_b_in_district if x == 4])\n if points == 5:\n points = 6\n tot_points += points\n if 'fire' in args.exp:\n be = ['_'] * 8\n be += self.b[ 0: 5]\n be += ['_'] * 2\n be += self.b[ 5:10]\n be += ['_'] * 2\n be += self.b[10:15]\n be += ['_'] * 2\n be += self.b[15:20]\n be += ['_'] * 8\n max_points = 0\n for i in range(8, 34):\n if be[i] == 'U':\n points = 0\n if be[i - 1] == 'A' or be[i - 1] == 'F':\n points += 3\n if be[i + 1] == 'A' or be[i + 1] == 'F':\n points += 3\n if be[i - 7] == 'A' or be[i - 7] == 'F':\n points += 3\n if be[i + 7] == 'A' or be[i + 7] == 'F':\n points += 3\n if points > max_points:\n max_points = points\n tot_points += max_points\n return tot_points",
"def block_offsets(self):\n ...",
"def __init__(self, wavelength):\n # store experimental data\n self.x = wavelength\n\n # Central wavelengths of the lines are known constants:\n self.c1 = 422.\n self.c2 = 428.",
"def init_block():\n final_locs = [[1 for x in range(LOC_SIZE)] for y in range(LOC_SIZE)]\n for a in range(int(LOC_SIZE / 2)):\n for b in range(a, int(LOC_SIZE / 2)):\n # creating and ringing each of the fleas individually\n print(a, b)\n locs = [[1 if x == a and y == b else 0 for x in range(LOC_SIZE)] for y in range(LOC_SIZE)]\n for i in range(50):\n locs = ring(locs)\n # finding complement of all probabilities to find probabilities of not having a flea there\n for r in range(LOC_SIZE):\n for s in range(LOC_SIZE):\n locs[r][s] = 1 - locs[r][s]\n # transposes and adds the set of probabilities to not have to recalculate for mirrored values\n if a != b:\n locs = operate_on_narray(locs, zip(*locs), lambda o, p: o*p)\n # multiplying the probabilities together\n final_locs = operate_on_narray(final_locs, locs, lambda o, p: o*p)\n return final_locs",
"def __init__(self, x=0, y=0, flux=None, time=None, wcs=None, quality=None, mask=None, exposure=1800, sector=0,\n size=150,\n camera=1, ccd=1, cadence=None):\n super(Source, self).__init__()\n if cadence is None:\n cadence = []\n if quality is None:\n quality = []\n if wcs is None:\n wcs = []\n if time is None:\n time = []\n if flux is None:\n flux = []\n\n self.size = size\n self.sector = sector\n self.camera = camera\n self.ccd = ccd\n self.cadence = cadence\n self.quality = quality\n self.exposure = exposure\n self.wcs = wcs\n co1 = 38.5\n co2 = 116.5\n catalog_1 = self.search_gaia(x, y, co1, co1)\n catalog_2 = self.search_gaia(x, y, co1, co2)\n catalog_3 = self.search_gaia(x, y, co2, co1)\n catalog_4 = self.search_gaia(x, y, co2, co2)\n catalogdata = vstack([catalog_1, catalog_2, catalog_3, catalog_4], join_type='exact')\n catalogdata = unique(catalogdata, keys='DESIGNATION')\n coord = wcs.pixel_to_world([x + (size - 1) / 2 + 44], [y + (size - 1) / 2])[0].to_string()\n ra = float(coord.split()[0])\n dec = float(coord.split()[1])\n catalogdata_tic = tic_advanced_search_position_rows(ra=ra, dec=dec, radius=(self.size + 2) * 21 * 0.707 / 3600)\n # print(f'no_of_stars={len(catalogdata_tic)}, camera={camera}, ccd={ccd}: ra={ra}, dec={dec}, radius={(self.size + 2) * 21 * 0.707 / 3600}')\n self.tic = convert_gaia_id(catalogdata_tic)\n self.flux = flux[:, y:y + size, x:x + size]\n self.mask = mask[y:y + size, x:x + size]\n self.time = np.array(time)\n median_time = np.median(self.time)\n interval = (median_time - 388.5) / 365.25\n\n num_gaia = len(catalogdata)\n tic_id = np.zeros(num_gaia)\n x_gaia = np.zeros(num_gaia)\n y_gaia = np.zeros(num_gaia)\n tess_mag = np.zeros(num_gaia)\n in_frame = [True] * num_gaia\n for i, designation in enumerate(catalogdata['DESIGNATION']):\n ra = catalogdata['ra'][i]\n dec = catalogdata['dec'][i]\n if not np.isnan(catalogdata['pmra'].mask[i]): # masked?\n ra += catalogdata['pmra'][i] * np.cos(np.deg2rad(dec)) * interval / 1000 / 3600\n if not np.isnan(catalogdata['pmdec'].mask[i]):\n dec += catalogdata['pmdec'][i] * interval / 1000 / 3600\n pixel = self.wcs.all_world2pix(\n np.array([catalogdata['ra'][i], catalogdata['dec'][i]]).reshape((1, 2)), 0, quiet=True)\n x_gaia[i] = pixel[0][0] - x - 44\n y_gaia[i] = pixel[0][1] - y\n try:\n tic_id[i] = catalogdata_tic['ID'][np.where(catalogdata_tic['GAIA'] == designation.split()[2])[0][0]]\n except:\n tic_id[i] = np.nan\n if np.isnan(catalogdata['phot_g_mean_mag'][i]):\n in_frame[i] = False\n elif catalogdata['phot_g_mean_mag'][i] >= 25:\n in_frame[i] = False\n elif -4 < x_gaia[i] < self.size + 3 and -4 < y_gaia[i] < self.size + 3:\n dif = catalogdata['phot_bp_mean_mag'][i] - catalogdata['phot_rp_mean_mag'][i]\n tess_mag[i] = catalogdata['phot_g_mean_mag'][\n i] - 0.00522555 * dif ** 3 + 0.0891337 * dif ** 2 - 0.633923 * dif + 0.0324473\n if np.isnan(tess_mag[i]):\n tess_mag[i] = catalogdata['phot_g_mean_mag'][i] - 0.430\n if np.isnan(tess_mag[i]):\n in_frame[i] = False\n else:\n in_frame[i] = False\n\n tess_flux = 10 ** (- tess_mag / 2.5)\n t = Table()\n t[f'tess_mag'] = tess_mag[in_frame]\n t[f'tess_flux'] = tess_flux[in_frame]\n t[f'tess_flux_ratio'] = tess_flux[in_frame] / np.nanmax(tess_flux[in_frame])\n t[f'sector_{self.sector}_x'] = x_gaia[in_frame]\n t[f'sector_{self.sector}_y'] = y_gaia[in_frame]\n catalogdata = hstack([catalogdata[in_frame], t]) # TODO: sorting not sorting all columns\n catalogdata.sort('tess_mag')\n self.gaia = catalogdata",
"def get_lat_offsets(self):\n\n startlat = self.parameters['startlatitude']\n stoplat = self.parameters['stoplatitude']\n\n #Given the start and stops,\n startidx, startvalue = utils.getnearest(self.latitudes, startlat)\n stopidx, stopvalue = utils.getnearest(self.latitudes, stoplat)\n startidx -= 2\n stopidx += 2\n latslice = np.arange(startidx, stopidx + 1)\n if utils.checkmonotonic(latslice):\n latslice = latslice\n else:\n #TODO: Support pole crossing images\n logger.error('Image is pole crossing, not currently supported.')\n '''\n print \"NOT MONOTONIC\"\n #Handle wraps around the poles\n latslice = np.arange(start_idx, stop_idx + 1)\n nlats = self.startlookup.shape[1]\n greatermask = np.where(latslice >= nlats)\n latslice[greatermask] -= nlats\n lessmask = np.where(latslice < 0)\n latslice[lessmask] += self.startlookup.shape[1]\n\n self.latsort = np.argsort(latslice)\n self.latslice = latslice[self.latsort]\n self.latsort = np.argsort(self.latsort)\n '''\n latslice = None\n logger.debug('Start latitude node is {}. Nearest lookup node is {}.'.format(startlat, startidx))\n logger.debug('Stop latitude node is {}. Nearest lookup node is {}.'.format(stoplat, stopidx))\n return latslice",
"def test_linear_buckling_iso_CCSS(plot_static=False, plot_lb=False):\n # number of nodes\n nx = 5 # along x\n ny = 5 # along y\n\n # getting integration points\n nint = 4\n points, weights = get_points_weights(nint=nint)\n\n # geometry\n a = 3 # along x\n b = 3 # along y\n\n # material properties\n E = 200e9\n nu = 0.3\n laminaprop = (E, E, nu)\n stack = [0]\n h = 0.001\n lam = read_stack(stack=stack, plyt=h, laminaprop=laminaprop)\n\n # creating mesh\n x = np.linspace(0, a, nx)\n y = np.linspace(0, b, ny)\n xmesh, ymesh = np.meshgrid(x, y)\n\n # node coordinates and position in the global matrix\n ncoords = np.vstack((xmesh.T.flatten(), ymesh.T.flatten())).T\n nids = 1 + np.arange(ncoords.shape[0])\n nid_pos = dict(zip(nids, np.arange(len(nids))))\n\n # identifying nodal connectivity for plate elements\n # similar than Nastran's CQUAD4\n #\n # ^ y\n # |\n #\n # 4 ________ 3\n # | |\n # | | --> x\n # | |\n # |_______|\n # 1 2\n\n\n nids_mesh = nids.reshape(nx, ny)\n n1s = nids_mesh[:-1, :-1].flatten()\n n2s = nids_mesh[1:, :-1].flatten()\n n3s = nids_mesh[1:, 1:].flatten()\n n4s = nids_mesh[:-1, 1:].flatten()\n\n num_elements = len(n1s)\n print('num_elements', num_elements)\n\n N = DOF*nx*ny\n Kr = np.zeros(KC0_SPARSE_SIZE*num_elements, dtype=INT)\n Kc = np.zeros(KC0_SPARSE_SIZE*num_elements, dtype=INT)\n Kv = np.zeros(KC0_SPARSE_SIZE*num_elements, dtype=DOUBLE)\n KGr = np.zeros(KG_SPARSE_SIZE*num_elements, dtype=INT)\n KGc = np.zeros(KG_SPARSE_SIZE*num_elements, dtype=INT)\n KGv = np.zeros(KG_SPARSE_SIZE*num_elements, dtype=DOUBLE)\n init_k_KC0 = 0\n init_k_KG = 0\n\n plates = []\n for n1, n2, n3, n4 in zip(n1s, n2s, n3s, n4s):\n plate = BFSPlate2D()\n plate.n1 = n1\n plate.n2 = n2\n plate.n3 = n3\n plate.n4 = n4\n plate.c1 = DOF*nid_pos[n1]\n plate.c2 = DOF*nid_pos[n2]\n plate.c3 = DOF*nid_pos[n3]\n plate.c4 = DOF*nid_pos[n4]\n plate.ABD = lam.ABD\n plate.lex = a/(nx - 1)\n plate.ley = b/(ny - 1)\n plate.init_k_KC0 = init_k_KC0\n plate.init_k_KG = init_k_KG\n update_KC0(plate, points, weights, Kr, Kc, Kv)\n init_k_KC0 += KC0_SPARSE_SIZE\n init_k_KG += KG_SPARSE_SIZE\n plates.append(plate)\n\n KC0 = coo_matrix((Kv, (Kr, Kc)), shape=(N, N)).tocsc()\n\n # applying boundary conditions\n\n # locating nodes\n bk = np.zeros(KC0.shape[0], dtype=bool) # constrained DOFs, can be used to prescribe displacements\n\n x = ncoords[:, 0]\n y = ncoords[:, 1]\n\n # applying boundary conditions\n # simply supported\n check = isclose(x, 0) | isclose(x, a) | isclose(y, 0) | isclose(y, b)\n bk[2::DOF] = check\n check = isclose(x, 0) | isclose(x, a)\n bk[3::DOF] = check\n # point supports\n check = isclose(x, a/2) & (isclose(y, 0) | isclose(y, b))\n bk[0::DOF] = check\n check = isclose(y, b/2) & (isclose(x, 0) | isclose(x, a))\n bk[1::DOF] = check\n\n # unconstrained nodes\n bu = ~bk # logical_not\n\n # defining external force vector\n fext = np.zeros(KC0.shape[0], dtype=float)\n\n # applying unitary load along u at x=a\n # nodes at vertices get 1/2 the force\n for plate in plates:\n pos1 = nid_pos[plate.n1]\n pos2 = nid_pos[plate.n2]\n pos3 = nid_pos[plate.n3]\n pos4 = nid_pos[plate.n4]\n if isclose(x[pos3], a):\n Nxx = -1\n xi = +1\n elif isclose(x[pos1], 0):\n Nxx = +1\n xi = -1\n else:\n continue\n lex = plate.lex\n ley = plate.ley\n indices = []\n c1 = DOF*pos1\n c2 = DOF*pos2\n c3 = DOF*pos3\n c4 = DOF*pos4\n cs = [c1, c2, c3, c4]\n for ci in cs:\n for i in range(DOF):\n indices.append(ci + i)\n fe = np.zeros(4*DOF, dtype=float)\n for j in range(nint):\n eta = points[j]\n plate.update_Nu(xi, eta)\n Nu = np.asarray(plate.Nu)\n fe += ley/2*weights[j]*Nu*Nxx\n fext[indices] += fe\n\n Kuu = KC0[bu, :][:, bu]\n fextu = fext[bu]\n\n # static solver\n uu = spsolve(Kuu, fextu)\n u = np.zeros(KC0.shape[0], dtype=float)\n u[bu] = uu\n\n if plot_static:\n import matplotlib\n matplotlib.use('TkAgg')\n import matplotlib.pyplot as plt\n plt.gca().set_aspect('equal')\n uplot = u[0::DOF].reshape(nx, ny).T\n vplot = u[1::DOF].reshape(nx, ny).T\n print('u extremes', uplot.min(), uplot.max())\n print('v extremes', vplot.min(), vplot.max())\n levels = np.linspace(uplot.min(), uplot.max(), 300)\n plt.contourf(xmesh, ymesh, uplot, levels=levels)\n plt.colorbar()\n plt.show()\n\n # eigenvalue solver\n\n # getting integration points\n for plate in plates:\n update_KG(u, plate, points, weights, KGr, KGc, KGv)\n KG = coo_matrix((KGv, (KGr, KGc)), shape=(N, N)).tocsc()\n KGuu = KG[bu, :][:, bu]\n\n # solving modified generalized eigenvalue problem\n # Original: (KC0 + lambda*KG)*v = 0\n # Modified: (-1/lambda)*KC0*v = KG*v #NOTE here we find (-1/lambda)\n num_eigenvalues = 5\n eigvals, eigvecsu = eigsh(A=KGuu, k=num_eigenvalues, which='SM', M=Kuu,\n tol=1e-6, sigma=1., mode='cayley')\n eigvals = -1./eigvals\n eigvecs = np.zeros((KC0.shape[0], num_eigenvalues), dtype=float)\n eigvecs[bu, :] = eigvecsu\n\n if plot_lb:\n import matplotlib\n matplotlib.use('TkAgg')\n import matplotlib.pyplot as plt\n plt.gca().set_aspect('equal')\n mode = 0\n wplot = eigvecs[2::DOF, mode].reshape(nx, ny).T\n levels = np.linspace(wplot.min(), wplot.max(), 300)\n plt.contourf(xmesh, ymesh, wplot, levels=levels)\n plt.colorbar()\n plt.show()\n\n kc = eigvals[0]/(E*np.pi**2*(h/b)**2/(12*(1 - nu**2))*h)\n assert isclose(kc, 6.6, rtol=0.05)",
"def line_moved(self):\n\n # The line is supposed to be moved by hand to the beginning of first wrinkle.\n # The optimal spot is local maximum (not always visible)\n ext_index = self.index_of_drop + int(self.line.value() * 10000)\n ext_value = self.data[ext_index]\n\n p_i, p_f = toolbox_2.get_pressure_change(self.measurement)\n smallest_growing_particle = toolbox_2.minimum_particle_diameter(p_i, p_f, self.saturation_percentage / 100)\n\n n = toolbox_2.particle_count_2(ext_value)\n\n # measurement series 1\n if self.selected_data == 3 and 7 <= self.meas_selected_number <= 17 and self.meas_selected_series == 1:\n index = self.meas_selected_number - 7 # Assumes that first measurement is number 7\n self.smallest_particles[index] = smallest_growing_particle\n self.number_counts[index] = n\n\n self.update_distribution()\n # Update plot\n self.curve_distribution.setData(self.particle_distribution_x, self.particle_distribution_y*1e-10)\n self.curve_distribution_cumulative.setData(self.smallest_particles, self.number_counts*1e-10)\n\n # measurement series 2\n elif self.selected_data == 3 and self.meas_selected_series == 2:\n index = self.meas_selected_number - 1 # begins from 1, 0th measurement is just copy of 8th\n self.number_counts_2[index] = n\n\n self.curve_rotatometer.setData(np.array([4, 6, 8, 10, 12, 14, 16, 18]), self.number_counts_2*1e-10)\n x = np.linspace(3.5, 20, 100)\n self.curve_rotatometer_fit.setData(x, self.number_counts_2[0] * 4 * (1 / x) *1e-10)\n\n #print(\"N\", \"%.2e\"%n, \"dpres\", round(p_i - p_f))",
"def find_endpoints(batch_trajectories):\n # empty lists to fill\n site_lats = []\n site_lons = []\n last_lats = []\n last_lons = []\n lats_150 = []\n lons_150 = [] \n last_times = []\n times_150 = []\n last_sst = []\n sst_150 = []\n \n # temporary lists as placeholders\n temp_site_lats = []\n temp_site_lons = []\n temp_lats = []\n temp_lons = []\n temp_lats150 = []\n temp_lons150 = []\n temp_times = []\n temp_times150 = []\n temp_sst = []\n temp_sst150 = []\n\n for speed in range(len(batch_trajectories)):\n # working with one speed at a time means working with one nc file at\n # a time\n \n # reset temporary lists\n temp_site_lats = []\n temp_site_lons = []\n temp_lats = []\n temp_lons = []\n temp_lats150 = []\n temp_lons150 = []\n temp_times = []\n temp_times150 = []\n temp_sst = []\n temp_sst150 = []\n\n # extract variables into lists\n lats = batch_trajectories[speed].variables['lat'][:]\n lons = batch_trajectories[speed].variables['lon'][:]\n lats150 = batch_trajectories[speed].variables['lat150'][:]\n lons150 = batch_trajectories[speed].variables['lon150'][:]\n times = batch_trajectories[speed].variables['time'][:]\n ssts = batch_trajectories[speed].variables['temp'][:]\n ssts_150 = batch_trajectories[speed].variables['temp150'][:]\n\n # if a particle is deleted before time is up, values are masked. \n # We'd like to get the last valid number.\n for trajectory in range(len(lats)):\n i = -1 # index for the last value\n while np.ma.is_masked(lats[trajectory][i]) is True:\n i -= 1 # if the value is masked, go to one value sooner\n \n j = i # use j for the 150m values\n while lats150[trajectory][j] > 0:\n # we want the first index where the latitude is recorded.\n # j is actually the last one where it's not recorded, so we\n # extract the information at index j+1\n j -= 1\n\n # once i and j are determined for a trajectory, we can extract the\n # variables and append them to temporary lists.\n temp_site_lats.append(lats[trajectory][0])\n temp_site_lons.append(lons[trajectory][0])\n temp_lats.append(lats[trajectory][i])\n temp_lons.append(lons[trajectory][i])\n temp_lats150.append(lats150[trajectory][j+1])\n temp_lons150.append(lons150[trajectory][j+1])\n temp_times.append(times[trajectory][i])\n temp_sst.append(ssts[trajectory][i])\n temp_sst150.append(ssts_150[trajectory][j+1])\n temp_times150.append(times[trajectory][j+1])\n \n # after the temporary lists are appended by sinking speed, they\n # are appended to the big lists that are returned by the function.\n # this keeps the structure of being separated by sinking speed.\n site_lats.append(temp_site_lats)\n site_lons.append(temp_site_lons)\n last_lats.append(temp_lats)\n last_lons.append(temp_lons)\n lats_150.append(temp_lats150)\n lons_150.append(temp_lons150)\n last_times.append(temp_times)\n times_150.append(temp_times150)\n last_sst.append(temp_sst)\n sst_150.append(temp_sst150)\n \n return site_lats, site_lons, last_lats, last_lons, lats_150, lons_150,\\\n last_times, times_150, last_sst, sst_150",
"def get_probeLocs_calib_setup(dir, num_probes = 16):\n position_vectors = [[0] * 3 for i in range(num_probes)]\n\n #every x postion\n\n # Convert to meters\n x_pos = [-4.25*1e-3*25.4, -4.25*1e-3*25.4, 4.24*1e-3*25.4, 4.24*1e-3*25.4]\n y_pos = [-4.25*1e-3*25.4, 4.24*1e-3*25.4, 4.24*1e-3*25.4, -4.25*1e-3*25.4]\n z_pos = [-2.25*1e-3*25.4, -0.75*1e-3*25.4, 0.75*1e-3*25.4, 2.25*1e-3*25.4]\n x = 0\n for i in range(num_probes):\n if(i%4 ==0 and i>0):\n x+=1\n position_vectors[i][0] =x_pos[x]\n position_vectors[i][1] = y_pos[x]\n position_vectors[i][2] =z_pos[i%4]\n # print(position_vectors[i][0])\n\n \"\"\" Now take into account the direction\n r shots : x,y,z - > r,t,z\n t shots : x,y,z - > r,t,z\n z shots : x,y,z - > r,t,z\n \"\"\"\n if dir ==2 :#r\n # don't need to switch anything\n return position_vectors\n if dir == 0:#t\n # looks like -90 degree rotation about y-axis of probe coil orientation, so switch x and z\n position_vectors[:][0], position_vectors[:][2] = position_vectors[:][2], position_vectors[:][0]\n return position_vectors\n if dir ==1:#z\n # also like -90 degree rotation, switch x and z\n position_vectors[:][0], position_vectors[:][2] = position_vectors[:][2], position_vectors[:][0]\n return position_vectors\n\n return position_vectors",
"def _generate_coordinates(self):\n a0 = +0.2969\n a1 = -0.1260\n a2 = -0.3516\n a3 = +0.2843\n a4 = -0.1036 # zero thickness TE\n\n x = np.linspace(0.0, 1.0, num=self.n_points)\n\n if len(self.digits) == 4:\n # Returns n+1 points in [0 1] for the given 4-digits NACA string\n m = float(self.digits[0]) / 100.0\n p = float(self.digits[1]) / 10.0\n t = float(self.digits[2:]) / 100.0\n\n # half-thickness distribution\n yt = 5 * t * (a0 * np.sqrt(x) + a1 * x + a2 * np.power(x, 2) +\n a3 * np.power(x, 3) + a4 * np.power(x, 4))\n\n if p == 0:\n # Symmetric foil\n self.xup_coordinates = np.linspace(0.0, 1.0, num=self.n_points)\n self.yup_coordinates = yt\n self.xdown_coordinates = np.linspace(\n 0.0, 1.0, num=self.n_points)\n self.ydown_coordinates = -yt\n else:\n # Cambered foil\n xc1 = np.asarray([xx for xx in x if xx <= p])\n xc2 = np.asarray([xx for xx in x if xx > p])\n yc1 = m / np.power(p, 2) * xc1 * (2 * p - xc1)\n yc2 = m / np.power(1 - p, 2) * (1 - 2 * p + xc2) * (1 - xc2)\n # Y-coordinates of camber line\n yc = np.append(yc1, yc2)\n\n if self.cosine_spacing:\n # points are generated according to cosine distribution of\n # the X-coordinates of the chord\n dyc1_dx = m / np.power(p, 2) * (2 * p - 2 * xc1)\n dyc2_dx = m / np.power(1 - p, 2) * (2 * p - 2 * xc2)\n dyc_dx = np.append(dyc1_dx, dyc2_dx)\n theta = np.arctan(dyc_dx)\n self.xup_coordinates = x - yt * np.sin(theta)\n self.yup_coordinates = yc + yt * np.cos(theta)\n self.xdown_coordinates = x + yt * np.sin(theta)\n self.ydown_coordinates = yc - yt * np.cos(theta)\n else:\n # Linear spacing distribution of the foil coordinates\n self.xup_coordinates = np.linspace(\n 0.0, 1.0, num=self.n_points)\n self.xdown_coordinates = np.linspace(\n 0.0, 1.0, num=self.n_points)\n self.yup_coordinates = yc + yt\n self.ydown_coordinates = yc - yt\n\n elif len(self.digits) == 5:\n # Returns n+1 points in [0 1] for the given 5-digits NACA string\n cld = float(self.digits[0]) * 0.15\n p = 5.0 * float(self.digits[1]) / 100.0\n s = float(self.digits[2])\n t = float(self.digits[3:]) / 100.0\n\n # half-thickness distribution\n yt = 5 * t * (a0 * np.sqrt(x) + a1 * x + a2 * np.power(x, 2) +\n a3 * np.power(x, 3) + a4 * np.power(x, 4))\n\n if s == 1:\n # Relfex camber\n P = np.array([0.1, 0.15, 0.2, 0.25])\n M = np.array([0.13, 0.2170, 0.318, 0.441])\n K = np.array([51.99, 15.793, 6.520, 3.191])\n elif s == 0:\n # Standard camber\n P = np.array([0.05, 0.1, 0.15, 0.2, 0.25])\n M = np.array([0.0580, 0.1260, 0.2025, 0.2900, 0.3910])\n K = np.array([361.4, 51.64, 15.957, 6.643, 3.230])\n else:\n raise ValueError(\n 'For NACA \"LPSTT\" the value of \"S\" can be either 0 or 1.')\n\n if p == 0:\n # Symmetric foil\n self.xup_coordinates = np.linspace(0.0, 1.0, num=self.n_points)\n self.yup_coordinates = yt\n self.xdown_coordinates = np.linspace(\n 0.0, 1.0, num=self.n_points)\n self.ydown_coordinates = -yt\n else:\n # Cambered foil\n spl_m = splrep(P, M)\n spl_k = splrep(M, K)\n m = splev(p, spl_m)\n k1 = splev(m, spl_k)\n xc1 = np.asarray([xx for xx in x if xx <= m])\n xc2 = np.asarray([xx for xx in x if xx > m])\n yc1 = k1 / 6.0 * (np.power(xc1, 3) - 3 * m * np.power(xc1, 2) +\n np.power(m, 2) * (3 - m) * xc1)\n yc2 = k1 / 6.0 * np.power(m, 3) * (1 - xc2)\n yc = np.append(yc1, yc2)\n\n if self.cosine_spacing:\n # points are generated according to cosine distribution of\n # the X-coordinates of the chord\n zc = cld / 0.3 * yc\n dyc1_dx = 1.0 / 6.0 * k1 * (\n 3 * np.power(xc1, 2) - 6 * m * xc1 + np.power(m, 2) *\n (3 - m))\n dyc2_dx = np.tile(-1.0 / 6.0 * k1 * np.power(m, 3),\n len(xc2))\n dyc_dx = np.append(dyc1_dx, dyc2_dx)\n theta = np.arctan(dyc_dx)\n self.xup_coordinates = x - yt * np.sin(theta)\n self.yup_coordinates = zc + yt * np.cos(theta)\n self.xdown_coordinates = x + yt * np.sin(theta)\n self.ydown_coordinates = zc - yt * np.cos(theta)\n else:\n # Linear spacing distribution of the foil coordinates\n self.xup_coordinates = np.linspace(\n 0.0, 1.0, num=self.n_points)\n self.xdown_coordinates = np.linspace(\n 0.0, 1.0, num=self.n_points)\n self.yup_coordinates = yc + yt\n self.ydown_coordinates = yc - yt\n\n else:\n raise Exception",
"def externalEnergy(self, controlpoints):\n # compute the factor the energy of each control points get's weighed with\n external = 0.0\n if len(self.controlpoints) > 0:\n factor = float(1)/len(self.controlpoints)\n else:\n factor = 1\n \n # check if the given controlpoints are equal to the current ones\n if np.equal(controlpoints, self.controlpoints).all():\n # take the current normals\n normals = self.normals\n else:\n # otherwise calculate the according normals\n spline = Spline()\n spline.addControlPoints(*controlpoints)\n normals = spline.normals\n \n # ACHTUNG! hier müssen die Normalen zur Berechnung gedreht werden,\n # falls flip es vorgibt\n if self.flip:\n normals = map(lambda n: rotateVector(n, angle=pi), normals)\n \n # only remember each external control point energy if the given control points are\n # the snakes current control points\n memorize_energies = np.equal(controlpoints, self.controlpoints).all()\n # reset the controlpointenergy list if necessary\n if memorize_energies:\n self.ext_energies = []\n \n # sum up the energies at the single control points multiplied by the inverse\n # of the number of control points\n for i in range(len(controlpoints)):\n point = controlpoints[i]\n \n# if len(normals) > 0:\n# normal = normals[i]\n# else:\n# normal = None\n normal = normals[i]\n \n pointenergy = self.ExternalEnergy.getEnergy(point, iteration=self.iteration, normal=normal)\n # check wether to save the point energy\n if memorize_energies:\n #self.ext_energies.append(self.ExternalEnergy.getEnergy(point))\n self.ext_energies.append(pointenergy)\n external += pointenergy * factor\n return external",
"def find_cea_coord(header,phi_c,lambda_c,nx,ny,dx,dy):\n nx = int(nx)\n ny = int(ny)\n\n # Array of CEA coords\n x = []\n y = []\n\n for j in range(ny):\n col = []\n row = []\n for i in range(nx):\n col.append(np.radians((i-(nx-1)/2)*dx))\n row.append(np.radians((j-(ny-1)/2)*dy))\n x.append(col)\n y.append(row)\n\n x = np.array(x)\n y = np.array(y)\n\n # Relevant header values\n rSun = header['rsun_obs']/header['cdelt1'] #solar radius in pixels\n disk_latc = np.radians(header['CRLT_OBS'])\n disk_lonc = np.radians(header['CRLN_OBS'])\n disk_xc = header['CRPIX1'] - 1 #disk center wrt lower left of patch\n disk_yc = header['CRPIX2'] - 1\n pa = np.radians(header['CROTA2']*-1)\n\n latc = np.radians(lambda_c)\n lonc = np.radians(phi_c) - disk_lonc\n\n # Convert coordinates\n lat = []\n lon = []\n xi = []\n eta = []\n\n for j in range(ny):\n lat_col = []\n lon_col = []\n xi_col = []\n eta_col = []\n for i in range(nx):\n lat0,lon0 = plane2sphere(x[j,i],y[j,i],latc,lonc)\n lat_col.append(lat0)\n lon_col.append(lon0)\n\n xi0,eta0 = sphere2img(lat0,lon0,disk_latc,0.0,disk_xc,disk_yc,rSun,pa)\n xi_col.append(xi0)\n eta_col.append(eta0)\n lat.append(lat_col)\n lon.append(lon_col)\n xi.append(xi_col)\n eta.append(eta_col)\n\n lat = np.array(lat)\n lon = np.array(lon)\n xi = np.array(xi)\n eta = np.array(eta)\n\n return xi,eta,lat,lon",
"def integrate(coords,data,fault_pts,dshape_hex8,gll_weights,elmt):\n norm=0.0\n normx=0.0\n normy=0.0\n normz=0.0\n div=0.0 #normalizing factor to divide by\n divx=0.\n divy=0.\n divz=0.\n\n eps=1.0*g.mesh_spacing/(g.ngllx-1.)\n print 'eps=', eps\n f=open('eliminated_coords.vtk','w')\n\n #create integer versions of arrays to use in pulling out gll pts for each element\n data_round=np.rint(data)\n dati=data_round.astype(int)\n coord_round=np.rint(coords)\n coordi=coord_round.astype(int)\n\n #remove duplicates from data array\n dat_struc=np.ascontiguousarray(dati).view(np.dtype((np.void,dati.dtype.itemsize *dati.shape[1])))\n _,idx=np.unique(dat_struc,return_index=True)\n datu=dati[idx]\n data_unique=data[idx]\n\n for i_elmt in range(g.nelmt):\n #pull out geometric coordinates for this element\n elmt_coord_id=[j-1 for j in elmt[i_elmt]]\n elmt_coord=coordi[elmt_coord_id]\n\n #find corresponding gll pts for this element\n xmin=min(elmt_coord[:,0]);xmax=max(elmt_coord[:,0])\n ymin=min(elmt_coord[:,1]);ymax=max(elmt_coord[:,1])\n zmin=min(elmt_coord[:,2]);zmax=max(elmt_coord[:,2])\n gll_coord_id=np.nonzero((datu[:,0]>=xmin) & (datu[:,0]<=xmax) & (datu[:,1]>=ymin) & (datu[:,1]<=ymax) & (datu[:,2]>=zmin) & (datu[:,2]<=zmax))\n elmt_data=data_unique[gll_coord_id]\n if len(gll_coord_id[0]) != g.ngll:\n print \"elmt=\", elmt_coord_id\n print xmin,xmax,ymin,ymax,zmin,zmax\n print 'elmt_data=', elmt_data\n print \"gll pts found=\", len(gll_coord_id[0])\n raise ValueError(\"incorrect number of gll points found in element!\")\n exit\n\n #sort the gll coords so they correspond the order of the arrays giving the weights and shape function\n dat_sorted=elmt_data[npi.argsort((elmt_data[:,0], elmt_data[:,1],elmt_data[:,2]))]\n func=dat_sorted[:,3:]\n\n #if any gll pt is too close to fault, remove the element from the integration\n dist=distance.cdist(fault_pts,dat_sorted[:,0:3],'euclidean')\n if (dist<eps).any():\n print \"eliminated element #\", i_elmt\n np.savetxt(f,dat_sorted[:,0:3],fmt='%3.3f')\n continue\n\n for i_gll in range(g.ngll):\n\n #compute jacobian, its derivative and inverse\n jac=np.matmul(dshape_hex8[:,:,i_gll],elmt_coord)\n det_jac=np.linalg.det(jac)\n\n #perform the integration\n norm=norm+det_jac*gll_weights[i_gll]*np.dot((func[i_gll,3:6]-func[i_gll,0:3]),(func[i_gll,3:6]-func[i_gll,0:3]))\n div=div+det_jac*gll_weights[i_gll]*np.dot(func[i_gll,3:6],func[i_gll,3:6])\n normx=normx+det_jac*gll_weights[i_gll]*(func[i_gll,3]-func[i_gll,0])**2\n divx=divx+det_jac*gll_weights[i_gll]*(func[i_gll,3])**2\n normy=normy+det_jac*gll_weights[i_gll]*(func[i_gll,4]-func[i_gll,1])**2\n divy=divy+det_jac*gll_weights[i_gll]*(func[i_gll,4])**2\n normz=normz+det_jac*gll_weights[i_gll]*(func[i_gll,5]-func[i_gll,2])**2\n divz=divz+det_jac*gll_weights[i_gll]*(func[i_gll,5])**2\n\n norm_finalx=sqrt(normx/divx)\n norm_finaly=sqrt(normy/divy)\n norm_finalz=sqrt(normz/divz)\n norm_final=sqrt(norm/div)\n\n f.close()\n\n return norm_finalx, norm_finaly, norm_finalz,norm_final",
"def add_bitline_contacts(self):\n\n stack=(\"metal1\", \"via1\", \"metal2\")\n pos = self.lower_pmos_inst.get_pin(\"S\").center()\n self.add_contact_center(layers=stack,\n offset=pos)\n pos = self.lower_pmos_inst.get_pin(\"D\").center()\n self.add_contact_center(layers=stack,\n offset=pos)\n pos = self.upper_pmos1_inst.get_pin(\"S\").center()\n self.add_contact_center(layers=stack,\n offset=pos)\n pos = self.upper_pmos2_inst.get_pin(\"D\").center()\n self.add_contact_center(layers=stack,\n offset=pos)",
"def bcL(self, rng=None):\n if rng is None:\n rng = random.PRNGKey(1)\n n = self.n\n x = onp.sin(self.bcmesh * np.pi)\n n_y = (np.floor((n + 1) / 2) - 1).astype(int)\n if rng is not None:\n coeffs = random.multivariate_normal(rng, np.zeros(16),\n np.diag(np.ones(16)))\n else:\n key = random.randint(random.PRNGKey(1), (1,), 1, 1000)\n coeffs = random.multivariate_normal(\n random.PRNGKey(key[0]), np.zeros(16), np.diag(np.ones(16)))\n left = coeffs[0] * x**3 + coeffs[1] * x**2 + coeffs[2] * x #+ coeffs[3]\n right = coeffs[4] * x**3 + coeffs[5] * x**2 + coeffs[6] * x #+ coeffs[7]\n lower = coeffs[8] * x**3 + coeffs[9] * x**2 + coeffs[10] * x #+ coeffs[11]\n upper = coeffs[12] * x**3 + coeffs[13] * x**2 + coeffs[14] * x #+ coeffs[15]\n shape = 2 * x.shape\n source = onp.zeros(shape)\n source[0, :] = upper\n source[n_y - 1, n_y - 1:] = lower[:n - n_y + 1]\n source[n_y - 1:, n_y - 1] = right[:n - n_y + 1]\n source[:, 0] = left\n source[-1, :n_y - 1] = right[n:n - n_y:-1]\n source[:n_y - 1, -1] = lower[n:n - n_y:-1]\n # because this makes the correct order of boundary conditions\n return source * (n + 1)**2",
"def sensordaten_einlesen(self):\n self.caldata = []\n self.caldata_raw = np.genfromtxt(self.sensorfile, usecols = np.asarray(self.sensorspalte), skip_header = 1)\n for ele in self.caldata_raw:\n self.caldata.append(int(ele))\n self.Sensordata = Channel()",
"def get_probeLocs_calib_setup_cm(dir, num_probes = 16):\n position_vectors = [[0] * 3 for i in range(num_probes)]\n\n #every x postion\n\n # Convert to meters\n x_pos = [-4.25*2.54, -4.25*2.54, 4.24*2.54, 4.24*2.54]\n y_pos = [-4.25*2.54, 4.24*2.54, 4.24*2.54, -4.25*2.54]\n z_pos = [-2.25*2.54, -0.75*2.54, 0.75*2.54, 2.25*2.54]\n x = 0\n for i in range(num_probes):\n if(i%4 ==0 and i>0):\n x+=1\n position_vectors[i][0] =x_pos[x]\n position_vectors[i][1] = y_pos[x]\n position_vectors[i][2] =z_pos[i%4]\n # print(position_vectors[i][0])\n\n \"\"\" Now take into account the direction\n r shots : x,y,z - > r,t,z\n t shots : x,y,z - > r,t,z\n z shots : x,y,z - > r,t,z\n \"\"\"\n if dir ==2 :#r\n # don't need to switch anything\n return position_vectors\n if dir == 0:#t\n # looks like -90 degree rotation about y-axis of probe coil orientation, so switch x and z\n position_vectors[:][0], position_vectors[:][2] = position_vectors[:][2], position_vectors[:][0]\n return position_vectors\n if dir ==1:#z\n # also like -90 degree rotation, switch x and z\n position_vectors[:][0], position_vectors[:][2] = position_vectors[:][2], position_vectors[:][0]\n return position_vectors\n\n return position_vectors",
"def offset_all(x, y, beck_bed, t):\n length = x.size\n xyz1 = np.zeros((length, 3))\n xyz2 = np.zeros((length, 3))\n xyz1[:, 0] = np.copy(x)\n xyz1[:, 1] = np.copy(y)\n xyz1[:, 2] = np.copy(beck_bed[:, NUM])\n allxyz = np.copy(xyz1)\n offsetx = np.zeros((length, 2))\n offsety = np.zeros((length, 2))\n for i in range(NUM-1, -1, -1):\n \"\"\"Offset distance L is looping from INTERVAL to B.\"\"\"\n if np.mod(t, LPRINT) == 0:\n if i == NUM - 1:\n extr = '...(innermost)'\n elif i == 0:\n extr = '...(outermost)'\n else:\n extr = '...'\n print('+> Offsetting Polyline #'\n + str(i+1) + ' & #' + str(2*NUM+1-i) + extr, end='')\n offsetx, offsety = offset(x, y, WIDTH/2-i*INTERVAL)\n if i == 0 and SAVEBOUND and t == 0:\n t1 = np.copy(offsetx)\n t2 = np.copy(offsetx)\n t1[:,0] = np.copy(offsetx[:, 0])\n t1[:,1] = np.copy(offsety[:, 0])\n t2[:,0] = np.copy(offsetx[:, 1])\n t2[:,1] = np.copy(offsety[:, 1])\n t3 = np.concatenate((t1, t2[::-1], np.array([t1[0, :]])), axis=0)\n np.savetxt(FNAME.rsplit('.', 1)[0] + '_boundary.i2s', t3, fmt='%.6e')\n xyz1[:, 0] = offsetx[:, 0]\n xyz1[:, 1] = offsety[:, 0]\n xyz1[:, 2] = beck_bed[:, -1-i]\n xyz2[:, 0] = offsetx[:, 1]\n xyz2[:, 1] = offsety[:, 1]\n xyz2[:, 2] = beck_bed[:, i]\n allxyz = np.concatenate((allxyz, xyz1, xyz2), axis=0)\n if np.mod(t, LPRINT) == 0:\n print(' [done]')\n if i == 0 and np.mod(t, LPRINT) == 0:\n print(' * Note: Polyline #' + str(NUM + 1) + ' is centerline')\n return allxyz",
"def _compute(self, w_beg, w_end, signal, station_availability):\n\n avail_idx = np.where(station_availability == 1)[0]\n sige = signal[0]\n sign = signal[1]\n sigz = signal[2]\n\n p_onset_raw, p_onset = self._compute_p_onset(sigz,\n self.sampling_rate)\n s_onset_raw, s_onset = self._compute_s_onset(sige, sign,\n self.sampling_rate)\n self.data.p_onset = p_onset\n self.data.s_onset = s_onset\n self.data.p_onset_raw = p_onset_raw\n self.data.s_onset_raw = s_onset_raw\n\n ps_onset = np.concatenate((self.data.p_onset, self.data.s_onset))\n ps_onset[np.isnan(ps_onset)] = 0\n\n p_ttime = self.lut.fetch_index(\"TIME_P\", self.sampling_rate)\n s_ttime = self.lut.fetch_index(\"TIME_S\", self.sampling_rate)\n ttime = np.c_[p_ttime, s_ttime]\n del p_ttime, s_ttime\n\n nchan, tsamp = ps_onset.shape\n\n pre_smp = int(round(self.pre_pad * int(self.sampling_rate)))\n pos_smp = int(round(self.post_pad * int(self.sampling_rate)))\n nsamp = tsamp - pre_smp - pos_smp\n\n # Prep empty 4-D coalescence map and run C-compiled ilib.migrate()\n ncell = tuple(self.lut.cell_count)\n map_4d = np.zeros(ncell + (nsamp,), dtype=np.float64)\n ilib.migrate(ps_onset, ttime, pre_smp, pos_smp, nsamp, map_4d,\n self.n_cores)\n\n # Prep empty coa and loc arrays and run C-compiled ilib.find_max_coa()\n max_coa = np.zeros(nsamp, np.double)\n grid_index = np.zeros(nsamp, np.int64)\n ilib.find_max_coa(map_4d, max_coa, grid_index, 0, nsamp, self.n_cores)\n\n # Get max_coa_norm\n sum_coa = np.sum(map_4d, axis=(0, 1, 2))\n max_coa_norm = max_coa / sum_coa\n max_coa_norm = max_coa_norm * map_4d.shape[0] * map_4d.shape[1] * \\\n map_4d.shape[2]\n\n tmp = np.arange(w_beg + self.pre_pad,\n w_end - self.post_pad + (1 / self.sampling_rate),\n 1 / self.sampling_rate)\n daten = [x.datetime for x in tmp]\n\n # Calculate max_coa (with correction for number of stations)\n max_coa = np.exp((max_coa / (len(avail_idx) * 2)) - 1.0)\n\n loc = self.lut.xyz2index(grid_index, inverse=True)\n\n return daten, max_coa, max_coa_norm, loc, map_4d",
"def degree_elevation(order, control_net, print_logs=False):\n\n d, ntot = np.shape(control_net)\n old_order = order\n new_order = old_order + 1\n\n _, U = u_bar(ntab=old_order)\n\n tri, _, partitioned_triangles = u_bar(ntab=old_order-1, triangles_partitions=True)\n\n _, N = np.shape(U)\n\n sandbox = np.zeros((d, N, ntot))\n\n augmented_control_net = np.empty((d, int((old_order+1)*(old_order+2)/2)))\n\n def foreach_dimension(func):\n for di in range(d): func(di)\n\n def initialize_sandbox_with_control_points(di): \n sandbox[di,:,:] = np.ones((N,1)) * control_net[di,:] \n\n foreach_dimension(initialize_sandbox_with_control_points)\n \n covered_points = []\n\n def rotate_clockwise(triangle): \n a, b, c = triangle\n return a, c, b\n\n def log(*args):\n log = \"\"\"\ncontrol point position {} is under assignment \nfrom combination coordinate {} \nusing upside down triangle {} (rotated clockwise {})\n \"\"\"\n if print_logs: print(log.format(*args))\n\n def barycentric_combine(triangle):\n \n ind1, ind2, ind3 = triangle\n barycentric_comb = np.empty((d, N))\n\n def update_surface(di):\n first_row = np.multiply(U[0,:], sandbox[di, :, ind1])\n second_row = np.multiply(U[1,:], sandbox[di, :, ind2])\n third_row = np.multiply(U[2,:], sandbox[di, :, ind3])\n barycentric_comb[di,:] = first_row + second_row + third_row\n\n foreach_dimension(update_surface)\n\n return barycentric_comb\n\n def upside_down_triangles_handler(triangles):\n t = 0\n for top_most_vertex_in_right_diagonal_triangles, forward_offset in zip(\n np.cumsum([old_order + 2] + list(range(old_order,3,-1))),\n range(old_order-2, 0, -1)): \n \n if print_logs:\n print(\"start of diagonal iteration on upside down triangles for \\\n control point position {}, with forward offset {}:\".\n format(top_most_vertex_in_right_diagonal_triangles, forward_offset))\n\n for k, comb in zip([top_most_vertex_in_right_diagonal_triangles + i \n for i in range(forward_offset)],\n np.cumsum([old_order+1+forward_offset] + list(range(old_order-1,old_order-forward_offset,-1)))): \n\n log(k, comb, triangles[t], rotate_clockwise(triangles[t]))\n \n barycentric_combination = barycentric_combine(rotate_clockwise(triangles[t]))\n augmented_control_net[:, k] = barycentric_combination[:, comb]\n\n covered_points.append(k)\n t += 1\n \n def on_left_diagonals_triangles_handler(triangles): \n if print_logs: print(\"on LEFT diagonal:\") \n\n for l, triangle in zip(range(1, old_order), triangles):\n comb = old_order - l \n log(l, comb, triangle, rotate_clockwise(triangle))\n barycentric_combination = barycentric_combine(triangle)\n augmented_control_net[:, l] = barycentric_combination[:, comb]\n covered_points.append(l)\n\n def on_right_diagonals_triangles_handler(triangles):\n if print_logs: print(\"on RIGHT diagonal:\") \n\n right_diagonal = [r for r in np.cumsum([old_order+1] + list(range(old_order,2,-1)))]\n for (ri, r), triangle in zip(enumerate(right_diagonal), triangles):\n comb = right_diagonal[-(ri+1)] \n log(r, comb, triangle, rotate_clockwise(triangle))\n barycentric_combination = barycentric_combine(triangle)\n augmented_control_net[:, r] = barycentric_combination[:, comb]\n covered_points.append(r)\n\n def on_bottom_diagonals_triangles_handler(triangles):\n if print_logs: print(\"on BOTTOM diagonal:\") \n\n bottom_diagonal = [b for b in np.cumsum([2*old_order] + list(range(old_order-1,1,-1)))]\n for (bi, b), triangle in zip(enumerate(bottom_diagonal), triangles):\n comb = bottom_diagonal[-(bi+1)]\n log(b, comb, triangle, rotate_clockwise(triangle))\n barycentric_combination = barycentric_combine(triangle)\n augmented_control_net[:, b] = barycentric_combination[:, comb]\n covered_points.append(b)\n\n upside_down_triangles_handler(partitioned_triangles['upside_down']) \n on_left_diagonals_triangles_handler(partitioned_triangles['on_left_inv_diagonal'])\n on_right_diagonals_triangles_handler(partitioned_triangles['on_right_diagonal'])\n on_bottom_diagonals_triangles_handler(partitioned_triangles['on_bottom_diagonal'])\n\n augmented_control_net[:, 0] = control_net[:, 0]\n augmented_control_net[:, old_order] = control_net[:, old_order - 1]\n augmented_control_net[:, -1] = control_net[:, -1] \n\n covered_points.append(0)\n covered_points.append(old_order)\n covered_points.append(int((old_order+1)*(old_order+2)/2)-1)\n \n assert sorted(covered_points) == list(range(int((old_order+1)*(old_order+2)/2))), (\n \"order: {}\\ncovered_points: {}\\nexpected points: {}\".format(\n order, sorted(covered_points), list(range(int((old_order+1)*(old_order+2)/2)))))\n\n return new_order, augmented_control_net",
"def prep_data(filename):\n column_name = ['time', 'x_accel', 'y_accel', 'z_accel', 'total_accel']\n raw_data = pd.read_csv(filename, names=column_name, header=None) \n raw_data = raw_data.drop(0)\n\n raw_data['time'] = raw_data['time'].apply(to_float)\n raw_data['total_accel'] = raw_data['total_accel'].apply(to_float)\n raw_data['x_accel'] = raw_data['x_accel'].apply(to_float)\n raw_data['y_accel'] = raw_data['y_accel'].apply(to_float)\n raw_data['z_accel'] = raw_data['z_accel'].apply(to_float)\n \n accel = raw_data['total_accel'].tolist()\n time = raw_data['time'].tolist()\n x = raw_data['x_accel'].tolist()\n y = raw_data['y_accel'].tolist()\n z = raw_data['z_accel'].tolist()\n\n # Lowess accelerations \n x_lowess = lowess(x, time, frac=0.09)\n y_lowess = lowess(y, time, frac=0.09)\n z_lowess = lowess(z, time, frac=0.09)\n\n x_vel = np.trapz(x_lowess[:,1], time)\n y_vel = np.trapz(y_lowess[:,1], time)\n z_vel = np.trapz(z_lowess[:,1], time)\n \n print(x_vel, y_vel, z_vel)\n print(calc_distance((0,0), (x_vel, y_vel))) \n lowess_columns = {'time':time,\n 'x_lowess':x_lowess[:,1],\n 'y_lowess':y_lowess[:,1],\n 'z_lowess':z_lowess[:,1]\n }\n data_lowess = pd.DataFrame(lowess_columns)\n plt.plot(time, x_lowess[:,1], \"r-\", linewidth=2, alpha=0.2)\n plt.plot(time, y_lowess[:,1], \"g-\", linewidth=2, alpha=0.2)\n plt.plot(time, z_lowess[:,1], \"b-\", linewidth=2, alpha=0.2)\n plt.title('LOWESS smoothed acceleration')\n plt.xlabel('time')\n plt.ylabel('acceleration')\n plt.legend(['x', 'y', 'z'])\n plt.show()\n \n # Kalman Filter accelerations\n \n \"\"\"\n # Kalman Filter\n std = np.std(accel)\n print(std)\n initial_guess = [0]\n observation_covariance = std**2#np.diag([std, std]) ** 2\n kf = KalmanFilter(\n initial_state_mean= initial_guess,\n initial_state_covariance=observation_covariance,\n observation_covariance=observation_covariance,\n )\n pred_state, state_cov = kf.smooth(accel)\n \n plt.plot(time, pred_state[:,0], 'g-', linewidth=2, alpha=0.5)\n plt.plot(time, accel, 'b.', alpha = 0.1)\n plt.show()\n \"\"\"",
"def construct_linear_system(self):\n N=self.grid.Ncells()\n Nbc = len(self.dirichlet_bcs)\n self.Ncalc=Ncalc = N - Nbc\n\n # map cells to forced values\n dirichlet = dict( [ (c,v) for c,v,xy in self.dirichlet_bcs])\n\n self.is_calc_c = is_calc_c = np.ones(N,np.bool8)\n for c,v,xy in self.dirichlet_bcs:\n is_calc_c[c] = False\n\n # is_calc_c[self.c_mask] = False\n\n # c_map is indexed by real cell indices, and returns the matrix index\n c_map = self.c_map = np.zeros(N,np.int32)\n self.c_map[is_calc_c] = np.arange(Ncalc)\n\n dzc=self.dzc\n dzf=self.dzf\n area_c=self.area_c\n\n meth='coo' # 'dok'\n if meth == 'dok':\n A=sparse.dok_matrix((Ncalc,Ncalc),np.float64)\n else:\n # construct the matrix from a sequence of indices and values\n ij=[]\n values=[] # successive value for the same i.j will be summed\n \n b = np.zeros(Ncalc,np.float64)\n flux_per_gradient_j = -self.K_j * self.l_j * dzf / self.d_j * self.dt\n\n self.grid.edge_to_cells() # makes sure that edges['cells'] exists.\n \n for j in range(self.grid.Nedges()):\n e = self.grid.edges[j]\n ic1,ic2 = e['cells']\n \n if ic1<0 or ic2<0 or e['deleted']:\n continue # boundary edge, or deleted edge\n \n flux_per_gradient=flux_per_gradient_j[j]\n \n # this is the desired operation:\n # Cdiff[ic1] -= flux_per_gradient / (An[ic1]*dzc) * (C[ic2] - C[ic1])\n # Cdiff[ic2] += flux_per_gradient / (An[ic2]*dzc) * (C[ic2] - C[ic1])\n # Where Cdiff is row, C is col\n\n if is_calc_c[ic1] and is_calc_c[ic2]:\n mic2 = c_map[ic2]\n mic1 = c_map[ic1]\n v1=flux_per_gradient / (area_c[ic1]*dzc[ic1])\n v2=flux_per_gradient / (area_c[ic2]*dzc[ic2])\n \n if meth == 'dok':\n A[mic1,mic2] -= v1\n A[mic1,mic1] += v1\n A[mic2,mic2] += v2\n A[mic2,mic1] -= v2\n else:\n ij.append( (mic1,mic2) ) ; values.append(-v1)\n ij.append( (mic1,mic1) ) ; values.append(v1)\n ij.append( (mic2,mic2) ) ; values.append(v1)\n ij.append( (mic2,mic1) ) ; values.append(-v1)\n \n elif not ( is_calc_c[ic1] or is_calc_c[ic2] ):\n # both are dirichlet, so nothing to do\n pass\n elif not is_calc_c[ic2]:\n mic1 = c_map[ic1]\n v=flux_per_gradient / (self.area_c[ic1]*dzc[ic1])\n if meth == 'dok':\n A[mic1,mic1] += v\n else:\n ij.append( (mic1,mic1) )\n values.append(v)\n\n # roughly\n # A[1,1]*x[1] + A[1,2]*x[2] + ... = b[1]\n # but we already know x[2],\n # A[1,1]*x[1] + ... = b[1] - A[1,2]*x[2]\n # so flip the sign, multiply by known dirichlet value, and\n # add to the RHS\n b[mic1] += flux_per_gradient / (area_c[ic1]*dzc[ic1]) * dirichlet[ic2]\n else: # not is_calc_c[c1]\n mic2 = c_map[ic2]\n # A[mic2,mic2] += flux_per_gradient / (area_c[ic2]*dzc[ic2])\n # A[mic2,mic1] -= flux_per_gradient / (area_c[ic2]*dzc[ic2])\n\n # A[mic2,mic2]*x[2] + A[mic2,mic1]*x[1] = b[2]\n # ...\n # A[mic2,mic2]*x[2] - flux_per_gradient / (area_c[ic2]*dzc[ic2])*x[1] = b[2]\n # ...\n # A[mic2,mic2]*x[2] = b[2] + flux_per_gradient / (area_c[ic2]*dzc[ic2])*x[1]\n v=flux_per_gradient / (area_c[ic2]*dzc[ic2])\n if meth == 'dok':\n A[mic2,mic2] += v\n else:\n ij.append( (mic2,mic2) )\n values.append(v)\n b[mic2] += flux_per_gradient / (area_c[ic2]*dzc[ic2]) * dirichlet[ic1]\n\n # Used to test 'is not 0:' but modern python complains\n if isinstance(self.alpha,np.ndarray): \n for c in range(N):\n if self.is_calc_c[c]:\n mic=self.c_map[c]\n v=self.alpha[c]*self.dt\n if meth == 'dok':\n A[mic,mic] -= v\n else:\n ij.append( (mic,mic) )\n values.append(-v)\n\n # Flux boundary conditions:\n for ic,value,xy in self.neumann_bcs:\n mic=c_map[ic]\n # make mass/time into concentration/step\n # arrived at minus sign by trial and error.\n # 2023-08-04: there was a bug here that used ic2 instead of ic.\n b[mic] -= value/(area_c[ic]*dzc[ic]) * self.dt\n\n if meth == 'dok':\n self.A = sparse.coo_matrix(A)\n else:\n ijs=np.array(ij,dtype=np.int32)\n data=np.array(values,dtype=np.float64)\n A=sparse.coo_matrix( (data, (ijs[:,0],ijs[:,1]) ), shape=(Ncalc,Ncalc) )\n self.A=A\n \n # report scale to get a sense of whether dt is too large\n Ascale = A.diagonal().min()\n log.debug(\"Ascale is %s\"%Ascale)\n\n self.b = b",
"def lagrangePoints(mu):\n \n # define l = 1-mu\n l = 1 - mu\n \n # collinear points\n def eqL1(x):\n fval = x**5 + 2*(mu-l)*x**4 + (l**2-4*l*mu+mu**2)*x**3 + (2*mu*l*(l-mu)+mu-l)*x**2 + (mu**2*l**2+2*(l**2+mu**2))*x + mu**3-l**3\n #fval = gamma**5 - (3-mu)*gamma**4 + (3-2*mu)*gamma**3 - mu*gamma**2 + 2*mu*gamma - mu\n return fval\n sol_l1 = optimize.root(eqL1, 0.5, method='hybr')\n l1 = np.array([sol_l1.x[0] , 0, 0])\n \n def eqL2(x):\n fval = x**5 + 2*(mu-l)*x**4 + (l**2-4*l*mu+mu**2)*x**3 + (2*mu*l*(l-mu)-(mu+l))*x**2 + (mu**2*l**2+2*(l**2-mu**2))*x - (mu**3+l**3)\n #fval = gamma**5 + (3-mu)*gamma**4 + (3-2*mu)*gamma**3 - mu*gamma**2 - 2*mu*gamma - mu\n return fval\n sol_l2 = optimize.root(eqL2, 1.5, method='hybr')\n l2 = np.array([sol_l2.x[0] , 0, 0])\n \n def eqL3(x):\n fval = x**5 + 2*(mu-l)*x**4 + (l**2-4*mu*l+mu**2)*x**3 + (2*mu*l*(l-mu)+(l+mu))*x**2 + (mu**2*l**2+2*(mu**2-l**2))*x + l**3+mu**3\n return fval\n sol_l3 = optimize.root(eqL3, -1, method='hybr')\n l3 = np.array([sol_l3.x[0] , 0, 0])\n \n # equilateral points\n # L4\n l4 = np.array([np.cos(np.pi/3) - mu , np.sin(np.pi/3), 0])\n # L5\n l5 = np.array([np.cos(np.pi/3) - mu , -np.sin(np.pi/3), 0])\n \n return _lagrangePointsReturn(l1,l2,l3,l4,l5)",
"def makeSlopeMap():\n a=numpy.zeros((ncents/2,2),numpy.int32)\n subFlag=makeSubapMap()#subapFlag.copy()\n # for i in range(7):#ngs 1-3, truth, lgs, lofs, hofs\n # tmp=subFlag[nsub[:i].sum():nsub[:i+1].sum()]\n # tmp.shape=nsuby[i],nsubx[i]\n # if i==5:#lofs\n # tmp[:]=sfNoObs*(i+1)\n # elif i==6:#hofs\n # tmp[:]=sf14NoObs*(i+1)\n # else:\n # tmp[:]=individualSubapFlag*(i+1)\n pos=0\n for i in range(subFlag.size):\n if subFlag[i]!=0:\n a[pos]=subFlag[i]\n pos+=1\n return a",
"def calcAllIntensities(self, xc, yc):\n\n tp = 0.0\n ix = 0\n iy = 0\n h = 0\n ints = np.zeros([5, 5])\n ints_inner = np.zeros([5, 5])\n # ints = [[0.0] * 5] * 5\n # ints_inner = [[0.0] * 5] * 5\n x = 0.0\n y = 0.0\n xc1 = 0.0\n yc1 = 0.0\n xc1 = xc\n yc1 = yc\n \n for h in np.arange(1,5,1):\n for k in np.arange(1,5,1):\n ints[h][k] = 0.0\n ints_inner[h][k] = 0.0\n\n for k in np.arange(0, 2, 1):\n for h in np.arange(0, 2, 1):\n for ix in np.arange(0, self.stepp + 1, 1):\n for iy in np.arange(0, self.stepp + 1, 1):\n #print(k, h, ix, iy)\n if self.qc_format == 0 :\n x = -(1 + self.G) + h * (1 + 2 * self.G) + (ix * (1.0 / self.stepp))\n y = -(1 + self.G) + k * (1 + 2 * self.G) + (iy * (1.0 / self.stepp))\n if self.spot_radius == 0 or math.sqrt(math.pow((x - xc1),2) + math.pow((y - yc1),2)) == 0 :\n tp = 0.0\n else :\n tp = (math.sin((1 / self.spot_radius) * math.sqrt(math.pow((x - xc1),2) + math.pow((y - yc1),2)))) / ((1 / self.spot_radius) * math.sqrt(math.pow((x - xc1),2) + math.pow((y - yc1),2)))\n tp = math.pow(tp,2)\n #print(tp)\n elif self.qc_format == 1 :\n x = -1 + h + (ix * (1 / self.stepp))\n y = -1 + k + (iy * (1 / self.stepp))\n ints[h + 1][k + 1] += math.pow(math.exp((math.pow((x - xc1),2) + math.pow((y - yc1),2) ) / math.pow(self.spot_radius,2)), -1)\n if (self.spot_radius * self.spot_radius) == 0 or ((x - xc1) * (y - yc1) * np.pi * np.pi) == 0 :\n tp = 0.0\n else :\n tp = (math.sin((x - xc1) * np.pi / self.spot_radius) * math.sin((y - yc1) * np.pi / self.spot_radius)) / (((x - xc1) * (y - yc1) * np.pi * np.pi) / (self.spot_radius * self.spot_radius))\n\n if (math.pow(x,2) + math.pow(y,2)) <= math.pow(self.radius_inner,2):\n ints_inner[h + 1][k + 1] += tp\n else :\n if self.qc_format == 1 :\n if (math.pow(x,2) + math.pow(y,2)) <= math.pow(self.cell_qc, 2):\n ints[h + 1][k + 1] += tp\n if (math.pow(x,2) + math.pow(y,2)) <= 1 :\n #print(math.pow(x,2) + math.pow(y,2))\n ints[h + 1][k + 1] += tp\n # print(ints[h + 1][k + 1])\t\t\t\t\t\t\n tp = 0.0\n\n # print(ints)\n\n Aq = 0.0\n Bq = 0.0\n Cq = 0.0\n Dq = 0.0\n Ac_inner = 0.0\n Bc_inner = 0.0\n Cc_inner = 0.0\n Dc_inner = 0.0\n Ac = 0.0\n Bc = 0.0\n Cc = 0.0\n Dc = 0.0\n Ac = ints[1][2]\n Bc = ints[2][2]\n Cc = ints[2][1]\n Dc = ints[1][1]\n\n Ac_inner = ints_inner[1][2]\n Bc_inner = ints_inner[2][2]\n Cc_inner = ints_inner[2][1]\n Dc_inner = ints_inner[1][1]\n Ac *= self.QE\n Bc *= self.QE\n Cc *= self.QE\n Dc *= self.QE\n\n Ac_inner *= self.QE_inner\n Bc_inner *= self.QE_inner\n Cc_inner *= self.QE_inner\n Dc_inner *= self.QE_inner\n Ac += Ac_inner\n Bc += Bc_inner\n Cc += Cc_inner\n Dc += Dc_inner\n\n Aq = Ac\n Bq = Bc\n Cq = Cc\n Dq = Dc\n\n #tp/TP = cotribution percentage of the spot with respect to max (spot center)\n if self.smooth == 0 :\n if (Config.hplk_c0_e * self.TP) == 0 :\n cnst = 0\n else :\n cnst = ((Parameters.TPS / (self.n_ml * self.n_ml)) * self.lamb) / (Config.hplk_c0_e * self.TP) #Número de fótons efeticos\n if Config.flag_spice == 1 :\n Ac *= Parameters.TPS / (self.n_ml * self.n_ml * self.TP) #W\n Bc *= Parameters.TPS / (self.n_ml * self.n_ml * self.TP)\n Cc *= Parameters.TPS / (self.n_ml * self.n_ml * self.TP)\n Dc *= Parameters.TPS / (self.n_ml * self.n_ml * self.TP)\n Ac *= 1 / (math.pow(self.cell_qc * 1e-6,2)) #W/(m^2)\n Bc *= 1 / (math.pow(self.cell_qc * 1e-6,2))\n Cc *= 1 / (math.pow(self.cell_qc * 1e-6,2))\n Dc *= 1 / (math.pow(self.cell_qc * 1e-6,2))\n #Ac *= 1 / (self.lamb * 1e6); #Adequação da irradiância para a unidade W/m2micm conforme necessário no SPICE\n #Bc *= 1 / (self.lamb * 1e6);\n #Cc *= 1 / (self.lamb * 1e6);\n #Dc *= 1 / (self.lamb * 1e6);\n \n ############################## DOUBLE CHECK ##############################\n # self.grava_arquivos = 1\n # self.flag_V_QC = 0\n # grava_le_arquivos(0) # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n # self.flag_V_QC = 1\n # self.grava_arquivos = 0\n ############################## DOUBLE CHECK ##############################\n Aq *= cnst * 1e9\n Bq *= cnst * 1e9\n Cq *= cnst * 1e9\n Dq *= cnst * 1e9\n else :\n Aq *= cnst * 1e9\n Bq *= cnst * 1e9\n Cq *= cnst * 1e9\n Dq *= cnst * 1e9\n\n # 'returns' all the intensities\n self.A_intensity = Aq\n self.B_intensity = Bq\n self.C_intensity = Cq\n self.D_intensity = Dq",
"def xy(self):\n ...",
"def pontos(self):\n \n self.sc = 1. \n self.x = self.sc*np.array([-155., -139.4, -124., -108.5, -93., -77.5, -62., -46.5, -31., -15.5, 0, 15.5, 31., 46.5, 62., 77.5, 93., 108.5, 124., 139.5, 155.])\n self.y = self.sc*np.array([ 9.23, 14.37, 18.98, 23.6, 23.6, 23.6, 23.6, 23.6, 23.6, 23.6, 23.6, 23.6, 23.6, 23.6, 23.6, 23.6, 23.6, 23.6, 21.55, 14.37, 3.59])\n self.px_index = len(self.x)\n #self.py_index = len(self.x)/2\n\n self.coord = np.array([self.x,self.y,np.full(len(self.x),self.z)])\n \n self.x = self.x[::-1]\n self.y = -self.y[::-1] \n self.new = np.array([self.x,self.y,np.full(len(self.x),self.z)])\n self.coord = np.array([np.append(self.coord[0],self.new[0]),np.append(self.coord[1],self.new[1]),np.append(self.coord[2],self.new[2])])\n self.coord = np.array([np.append(self.coord[0],self.coord[0,0]),np.append(self.coord[1],self.coord[1,0]),np.append(self.coord[2],self.coord[2,0])])\n\n self.coord[0] = self.coord[0] - (np.amax(self.coord[0])+np.amin(self.coord[0]))/2\n self.coord[1] = self.coord[1] + (np.amax(self.coord[1])-np.amin(self.coord[1]))/2 \n \n self.coordi = np.array(self.coord)\n \n self.cg = np.array([0 + self.dx, self.H/2 + self.dy, self.z]) \n self.cgi = np.array(self.cg)\n \n self.thi = 0. + self.dth \n self.th = float(self.thi) \n \n self.coordnav(self.dx,self.dy,self.dth)",
"def alignChain(entry,prec=1E-4,seed_index=0,supercell=2,\n c_mag=50,dist_from_line=0):\n\n new_struct = copy.deepcopy(entry[0])\n\n new_latt = getNewLattice(entry,1,prec,seed_index,supercell,c_mag)\n\n v1,v2,perp=new_latt\n new_latt=np.array(new_latt)\n line1 = Line3D(new_latt[0],[0,0,0])\n line = line1.parallel_line(new_struct.sites[0].coords)\n trans = list(itertools.product([1,-1,0],repeat=3))\n\n lat = np.array(new_struct.lattice.as_dict()['matrix'])\n final_sites = []\n i=0\n i=-1\n for site in [x.coords for x in new_struct.sites]:\n\n i+=1\n point = Point3D(site)\n if line.distance(point)<dist_from_line:\n final_sites.append(site)\n else:\n news = []\n for t in trans:\n point = Point3D(site+np.dot(lat.T,t))\n news.append([float(line.distance(point)),t])\n news.sort(key = lambda x: x[0])\n final_sites.append(site+np.dot(lat.T,news[0][1]))\n\n\n\n new_fracs = np.linalg.solve(new_latt.T,np.array(final_sites).T).T\n species = new_struct.species\n return([species,new_fracs,new_latt])",
"def __init__(\n self,\n reciprocal_lattice: Lattice,\n original_points: np.ndarray,\n original_dim: np.ndarray,\n extra_points: np.ndarray,\n ir_to_full_idx: Optional[np.ndarray] = None,\n extra_ir_points_idx: Optional[np.ndarray] = None,\n nworkers: int = pdefaults[\"nworkers\"],\n ):\n self._nworkers = nworkers if nworkers != -1 else cpu_count()\n self._final_points = np.concatenate([original_points, extra_points])\n self._reciprocal_lattice = reciprocal_lattice\n\n if ir_to_full_idx is None:\n ir_to_full_idx = np.arange(len(original_points) + len(extra_points))\n\n if extra_ir_points_idx is None:\n extra_ir_points_idx = np.arange(len(extra_points))\n\n logger.debug(\"Initializing periodic Voronoi calculator\")\n all_points = np.concatenate((original_points, extra_points))\n\n logger.debug(\" ├── getting supercell k-points\")\n supercell_points = get_supercell_points(all_points)\n supercell_idxs = np.arange(supercell_points.shape[0])\n\n # filter points far from the zone boundary, this will lead to errors for\n # very small meshes < 5x5x5 but we are not interested in those\n mask = ((supercell_points > -0.75) & (supercell_points < 0.75)).all(axis=1)\n supercell_points = supercell_points[mask]\n supercell_idxs = supercell_idxs[mask]\n\n # want points in cartesian space so we can define a regular spherical\n # cutoff even if reciprocal lattice is not cubic. If we used a\n # fractional cutoff, the cutoff regions would not be spherical\n logger.debug(\" ├── getting cartesian points\")\n cart_points = reciprocal_lattice.get_cartesian_coords(supercell_points)\n cart_extra_points = reciprocal_lattice.get_cartesian_coords(\n extra_points[extra_ir_points_idx])\n\n # small cutoff is slightly larger than the max regular grid spacing\n # means at least 1 neighbour point will always be included in each\n # direction, need to find cartesian length which covers the longest direction\n # of the mesh\n spacing = 1 / original_dim\n body_diagonal = reciprocal_lattice.get_cartesian_coords(spacing)\n xy = reciprocal_lattice.get_cartesian_coords([spacing[0], spacing[1], 0])\n xz = reciprocal_lattice.get_cartesian_coords([spacing[0], 0, spacing[2]])\n yz = reciprocal_lattice.get_cartesian_coords([0, spacing[1], spacing[2]])\n\n len_diagonal = np.linalg.norm(body_diagonal)\n len_xy = np.linalg.norm(xy)\n len_xz = np.linalg.norm(xz)\n len_yz = np.linalg.norm(yz)\n\n small_cutoff = (np.max([len_diagonal, len_xy, len_xz, len_yz]) * 1.6)\n big_cutoff = (small_cutoff * 1.77)\n\n logger.debug(\" ├── initializing ball tree\")\n\n # use BallTree for quickly evaluating which points are within cutoffs\n tree = BallTree(cart_points)\n\n n_supercell_points = len(supercell_points)\n\n # big points are those which surround the extra points within the big cutoff\n # (including the extra points themselves)\n logger.debug(\" ├── calculating points in big radius\")\n big_points_idx = _query_radius_iteratively(\n tree, n_supercell_points, cart_extra_points, big_cutoff)\n\n # Voronoi points are those we actually include in the Voronoi diagram\n self._voronoi_points = cart_points[big_points_idx]\n\n # small points are the points in all_points (i.e., original + extra points) for\n # which we want to calculate the Voronoi volumes. Outside the small cutoff, the\n # weights will just be the regular grid weight.\n logger.debug(\" └── calculating points in small radius\")\n small_points_idx = _query_radius_iteratively(\n tree, n_supercell_points, cart_extra_points, small_cutoff)\n\n # get the irreducible small points\n small_points_in_all_points = supercell_idxs[small_points_idx] % len(all_points)\n mapping = ir_to_full_idx[small_points_in_all_points]\n unique_mappings, ir_idx = np.unique(mapping, return_index=True)\n small_points_idx = small_points_idx[ir_idx]\n\n # get a mapping to go from the ir small points to the full BZ.\n groups = groupby(np.arange(len(all_points)), ir_to_full_idx)\n grouped_ir = groups[unique_mappings]\n counts = [len(g) for g in grouped_ir]\n self._expand_ir = np.repeat(np.arange(len(ir_idx)), counts)\n\n # get the indices of the expanded ir_small_points in all_points\n self._volume_in_final_idx = np.concatenate(grouped_ir)\n\n # get the indices of ir_small_points_idx (i.e., the points for which we will\n # calculate the volume) in voronoi_points\n self._volume_points_idx = _get_loc(big_points_idx, small_points_idx)\n\n # Prepopulate the final volumes array. By default, each point has the\n # volume of the original mesh. Note: at this point, the extra points\n # will have zero volume. This will array will be updated by\n # compute_volumes\n self._volume = reciprocal_lattice.volume\n self._final_volumes = np.full(len(all_points), 1 / len(original_points))\n self._final_volumes[len(original_points):] = 0\n self._final_volumes[self._volume_in_final_idx] = 0\n\n # from pymatgen import Structure\n # s = Structure(\n # reciprocal_lattice.matrix * 10,\n # ['H'] * len(self._volume_points_idx),\n # reciprocal_lattice.get_fractional_coords(self._voronoi_points[self._volume_points_idx]) / 3 + 0.5,\n # )\n # s.to(filename=\"volume-points.cif\")\n #\n # s = Structure(\n # reciprocal_lattice.matrix * 10,\n # ['H'] * len(self._voronoi_points),\n # reciprocal_lattice.get_fractional_coords(self._voronoi_points) / 3 + 0.5,\n # )\n # s.to(filename=\"voronoi-points.cif\")",
"def expandcal(self):\n ind=np.zeros(self.spec.shape[0]).astype(int)\n for k in range(self.nscan):\n ind[self.getscanind(k)]=k\n ind[self.getcalind(k)]=k\n return ind",
"def calc_points_harbor(self):\n points = 0\n if self.cnt_1 + self.cnt_2 + self.cnt_3 + self.cnt_4 + self.cnt_5 >= 2:\n hor = 0\n for i in range(4):\n j = 0\n while j < 5 and ord(self.b[i * 5 + j]) >= 54:\n j += 1\n if j < 4:\n start = j\n j += 1\n while j < 5 and ord(self.b[i * 5 + j]) < 54:\n j += 1\n length = j - start\n if length > hor:\n hor = length\n vptab_harbor = (0, 0, 3, 7, 12, 18)\n points += vptab_harbor[hor]\n ver = 0\n for j in range(5):\n i = 0\n while i < 4 and ord(self.b[i * 5 + j]) >= 54:\n i += 1\n if i < 3:\n start = i\n i += 1\n while i < 4 and ord(self.b[i * 5 + j]) < 54:\n i += 1\n length = i - start\n if length > ver:\n ver = length\n points += vptab_harbor[ver]\n if 'cust' in args.exp:\n if ver == 4 or hor == 5:\n points += 5\n points += 2 * self.cnt_2 + 3 * self.cnt_3\n return points",
"def img2heliovec(bxImg,byImg,bzImg,lon,lat,lonc,latc,pAng):\n a11 = -np.sin(latc)*np.sin(pAng)*np.sin(lon - lonc) + np.cos(pAng)*np.cos(lon - lonc)\n a12 = np.sin(latc)*np.cos(pAng)*np.sin(lon - lonc) + np.sin(pAng)*np.cos(lon - lonc)\n a13 = -np.cos(latc)*np.sin(lon - lonc)\n a21 = -np.sin(lat)*(np.sin(latc)*np.sin(pAng)*np.cos(lon - lonc) + np.cos(pAng)*np.sin(lon - lonc)) - np.cos(lat)*np.cos(latc)*np.sin(pAng)\n a22 = np.sin(lat)*(np.sin(latc)*np.cos(pAng)*np.cos(lon - lonc) - np.sin(pAng)*np.sin(lon - lonc)) + np.cos(lat)*np.cos(latc)*np.cos(pAng)\n a23 = -np.cos(latc)*np.sin(lat)*np.cos(lon - lonc) + np.sin(latc)*np.cos(lat)\n a31 = np.cos(lat)*(np.sin(latc)*np.sin(pAng)*np.cos(lon - lonc) + np.cos(pAng)*np.sin(lon - lonc)) - np.sin(lat)*np.cos(latc)*np.sin(pAng)\n a32 = -np.cos(lat)*(np.sin(latc)*np.cos(pAng)*np.cos(lon - lonc) - np.sin(pAng)*np.sin(lon - lonc)) + np.sin(lat)*np.cos(latc)*np.cos(pAng)\n a33 = np.cos(lat)*np.cos(latc)*np.cos(lon - lonc) + np.sin(lat)*np.sin(latc)\n\n bxHelio = a11 * bxImg + a12 * byImg + a13 * bzImg\n byHelio = a21 * bxImg + a22 * byImg + a23 * bzImg\n bzHelio = a31 * bxImg + a32 * byImg + a33 * bzImg\n\n return bxHelio,byHelio,bzHelio",
"def direction_coordinates(self, gc_lines):\n lins = [(_line[0][mid], _line[0][mid + 1], _line[1][mid], _line[1][mid + 1])\n for _line, mid in zip(gc_lines, [len(_line[0]) // 2 for _line in gc_lines])\n if len(_line[0]) > 2]\n lens = [np.hypot(_line[0][0] - _line[0][-1], _line[0][0] - _line[0][-1]) * 110.\n for _line in gc_lines\n if len(_line[0]) > 2]\n lins = [(x0 * np.cos(np.deg2rad(np.mean([y0, y1]))), x1 * np.cos(np.deg2rad(np.mean([y0, y1]))), y0, y1)\n for x0, x1, y0, y1 in lins]\n lins = [_x for _x, _l in zip(lins, lens) if _l > 10]\n\n direction = [(0.5 * (x0 + x1), 0.5 * (y0 + y1), x1 - x0, y1 - y0) for x0, x1, y0, y1 in lins]\n direction = [(_u, _v, _x / np.hypot(_x, _y), _y / np.hypot(_x, _y))\n for _u, _v, _x, _y in direction]\n los = [rotate_point(point[2:], -self.dsbObsAngleAzimuth.value()) for point in direction]\n\n dist = 1.\n tp_dir = (np.array(los).T * dist).T\n\n tps = [(x0, y0, x0 + tp_x, y0 + tp_y) for\n ((x0, y0, _, _), (tp_x, tp_y)) in zip(direction, tp_dir)]\n tps = [[(x0 / np.cos(np.deg2rad(y0)), y0), (x1 / np.cos(np.deg2rad(y0)), y1)] for (x0, y0, x1, y1) in tps]\n return tps",
"def get_xyz(self, H, K, L):\n v7 = vec(H, K, L)\n v6 = self.Bmat * v7\n v5 = self.Umat * v6\n\n def ewald_condition(phi): return (\n norm(self.Evec)**2 - norm(self.Gmat(phi)*v5 + self.Evec)**2)\n\n phis = []\n if H == 0 and K == 0 and L == 0:\n pass\n elif optimize.fsolve(ewald_condition, 45.0, full_output=1)[2] == 1:\n phis = list(\n np.unique(\n np.around(\n [optimize.fsolve(ewald_condition, phi) % 360\n for phi in np.arange(30, 390, 15)],\n decimals=4)))\n\n def get_ij(phi):\n v4 = self.Gmat(phi) * v5\n p = norm_vec(v4 + self.Evec)\n v3 = -(self.Dvec[0, 0] / p[0, 0]) * p\n v2 = self.Dmat * (v3 + self.Dvec)\n v1 = (self.Omat * v2 / self.pixel_size) + self.Cvec\n return v1[0, 0], v1[1, 0]\n\n peaks = []\n for phi in phis:\n x, y = get_ij(phi)\n z = ((phi - self.phi_start) / self.phi_step) % 3600\n if z < 25:\n z = z + 3600\n elif z > 3625:\n z = z - 3600\n if x > 0 and x < self.shape[1] and y > 0 and y < self.shape[0]:\n peaks.append(NXPeak(x, y, z, H=H, K=K, L=L, parent=self))\n\n peaks = [peak for peak in peaks if peak.z > 0 and peak.z < 3648]\n\n return peaks",
"def store_horiz_points_clean(self, ring_points, display_opt):\n plane_colors = [(0, 255, 0), (0, 0, 255), (0.9100, 0.4100, 0.1700)] #4,2,3\n\n num_ring_pts = len(ring_points)\n\n # declare arrays for storage ..\n landmarks = []\n landmarks_2chs = []\n landmarks_4chs = []\n\n y = []\n y_2ch = []\n y_4ch = []\n\n # find top and low pts for the 2ch and 4ch views at the ideal angles\n _, _, highest_2ch_orig, _ = self.find_top_low_pts3([self.orig_cut_poly_array[1][0],self.orig_cut_poly_array[1][1]])\n _, _, highest_4ch_orig, _ = self.find_top_low_pts3([self.orig_cut_poly_array[0][0],self.orig_cut_poly_array[0][1]])\n\n for (i, offset_2ch) in enumerate(ring_points):\n offset_pt_2ch = np.asarray(offset_2ch)\n\n two_ch_view_plane_normal = find_plane_eq(offset_pt_2ch, highest_2ch_orig[0], highest_2ch_orig[1])\n\n cutPoly_2ch, _ = self.get_edges_strips(two_ch_view_plane_normal, offset_pt_2ch,\n \"2ch ring_pt index = \" + str(i), self.plane_colors[1])\n\n lowest_point_2ch, lowest_point_2ch_idx, highest_points_2ch, sorted_pts_endo_epi = self.find_top_low_pts3([cutPoly_2ch[0], cutPoly_2ch[1]])\n\n horiz_2ch_a, horiz_2ch_b = self.get_landmarks(sorted_pts_endo_epi[0], lowest_point_2ch_idx, 0)\n\n landmarks_3D = np.concatenate((horiz_2ch_a, np.flip(horiz_2ch_b, 0)))\n\n # remember to add the lowest point\n current_np = len(landmarks_3D)\n idx = int(np.floor(current_np/2.0)) # find middle index\n landmarks_3D = np.insert(landmarks_3D, idx, lowest_point_2ch, axis=0)\n landmarks_2D = project_onto_xy_plane(landmarks_3D)\n landmarks_2chs.append(landmarks_2D)\n\n # now assess volume for labels\n horiz_2ch_dists, horiz_4ch_dists = self.compute_horizontal_distances(horiz_2ch_a, horiz_2ch_b,\n self.ideal_horiz_4ch_a,\n self.ideal_horiz_4ch_b)\n\n # set L = length of LV cavity (choose longest from 2ch or 4ch)\n L = get_length_of_LV_cavity(horiz_2ch_a, horiz_2ch_b,\n self.ideal_horiz_4ch_a, self.ideal_horiz_4ch_b,\n lowest_point_2ch, self.ideal_lowest_point_4ch)\n\n # compute percentage error\n vol = self.Simpson_bp(horiz_2ch_dists, horiz_4ch_dists, L)\n abs_error_vol = np.abs(vol - self.ideal_vol) # make all errors positive\n perc_err = 100.0*(abs_error_vol/self.ideal_vol)\n\n if perc_err > 5.0:\n y_2ch.append(int(1))\n else:\n y_2ch.append(int(0))\n\n landmarks.append(landmarks_2chs)\n y.append(y_2ch)\n\n for (i, offset_4ch) in enumerate(ring_points):\n offset_pt_4ch = np.asarray(offset_4ch)\n\n four_ch_view_plane_normal = find_plane_eq(offset_pt_4ch, highest_4ch_orig[0], highest_4ch_orig[1])\n\n cutPoly_4ch, _ = self.get_edges_strips(four_ch_view_plane_normal, offset_pt_4ch,\n \"4ch = \" + str(i), plane_colors[0])\n\n lowest_point_4ch, lowest_point_4ch_idx, highest_points_4ch, sorted_pts_endo_epi = self.find_top_low_pts3([cutPoly_4ch[0], cutPoly_4ch[1]])\n\n horiz_4ch_a, horiz_4ch_b = self.get_landmarks(sorted_pts_endo_epi[0], lowest_point_4ch_idx, 0)\n\n landmarks_3D = np.concatenate((horiz_4ch_a, np.flip(horiz_4ch_b, 0)))\n\n # dont forget to add the type I landmarks (lowest points)\n current_np = len(landmarks_3D)\n idx = int(np.floor(current_np/2.0))\n landmarks_3D = np.insert(landmarks_3D, idx, lowest_point_4ch, axis=0)\n landmarks_2D = project_onto_xy_plane(landmarks_3D)\n landmarks_4chs.append(landmarks_2D)\n\n # now asssess volume for labels\n horiz_2ch_dists, horiz_4ch_dists = self.compute_horizontal_distances(self.ideal_horiz_2ch_a,\n self.ideal_horiz_2ch_b,\n horiz_4ch_a,\n horiz_4ch_b)\n\n # set L = length of LV cavity (choose longest from 2ch or 4ch)\n L = get_length_of_LV_cavity(self.ideal_horiz_2ch_a, self.ideal_horiz_2ch_b,\n horiz_4ch_a, horiz_4ch_b,\n self.ideal_lowest_point_2ch, lowest_point_4ch)\n\n # compute percentage error\n vol = self.Simpson_bp(horiz_2ch_dists, horiz_4ch_dists, L)\n abs_error_vol = np.abs(vol - self.ideal_vol) # make all errors positive\n perc_err = 100.0*(abs_error_vol/self.ideal_vol)\n\n if perc_err > 5.0:\n y_4ch.append(int(1))\n else:\n y_4ch.append(int(0))\n\n landmarks.append(landmarks_4chs)\n y.append(y_4ch)\n\n return landmarks, y",
"def calculate_module_offsets(self):\n \n # These aren't for instantiating, but we use them to get the dimensions\n self.poly_contact_offset = vector(0.5*contact.poly.width,0.5*contact.poly.height)\n\n # M1/M2 routing pitch is based on contacted pitch\n self.m1_pitch = max(contact.m1m2.width,contact.m1m2.height) + max(self.m1_space,self.m2_space)\n self.m2_pitch = max(contact.m2m3.width,contact.m2m3.height) + max(self.m2_space,self.m3_space)\n \n # This corrects the offset pitch difference between M2 and M1\n self.offset_fix = vector(0.5*(self.m2_width-self.m1_width),0)\n\n # delay chain will be rotated 90, so move it over a width\n # we move it up a inv height just for some routing room\n self.rbl_inv_offset = vector(self.delay_chain.height, self.inv.width)\n # access TX goes right on top of inverter, leave space for an inverter which is\n # about the same as a TX. We'll need to add rails though.\n self.access_tx_offset = vector(1.25*self.inv.height,self.rbl_inv_offset.y) + vector(0,2.5*self.inv.width)\n self.delay_chain_offset = self.rbl_inv_offset + vector(0,4*self.inv.width)\n\n # Replica bitline and such are not rotated, but they must be placed far enough\n # away from the delay chain/inverter with space for three M2 tracks\n self.bitcell_offset = self.rbl_inv_offset + vector(2*self.m2_pitch, 0) + vector(0, self.bitcell.height + self.inv.width)\n\n self.rbl_offset = self.bitcell_offset\n\n \n self.height = self.rbl_offset.y + self.rbl.height + self.m2_pitch\n self.width = self.rbl_offset.x + self.bitcell.width",
"def __hinterpolate(self):\n \n # Temp. Data holders\n upperint = []\n lowerint = []\n \n # Dont like this, because here we insert points into the rawdata\n # But it creates consisitent results in the interpolation results\n if self.__upper[0][0] != 0: self.__upper.insert(0,(0.,0.))\n if self.__lower[0][0] != 0: self.__lower.insert(0,(0.,0.))\n \n # Create points\n if self.__interpolation_method == \"l\":\n xpointsU = list(map(lambda x:x/float(self.__procPointsCount),range(0,self.__procPointsCount+1)))\n xpointsL = list(map(lambda x:x/float(self.__procPointsCount),range(0,self.__procPointsCount+1)))\n elif self.__interpolation_method == \"p\":\n xpointsU = [x**2/float(self.__procPointsCount)**2 for x in range(self.__procPointsCount+1)]\n xpointsL = [x**2/float(self.__procPointsCount)**2 for x in range(self.__procPointsCount+1)]\n \n # Calculate secants\n uppersec = [(self.__upper[i+1][1]-self.__upper[i][1])/(self.__upper[i+1][0]-self.__upper[i][0]) for i in range(len(self.__upper)-1)]\n lowersec = [(self.__lower[i+1][1]-self.__lower[i][1])/(self.__lower[i+1][0]-self.__lower[i][0]) for i in range(len(self.__lower)-1)]\n \n # Calculate tangents\n uppertan = [(uppersec[k-1]+uppersec[k])/2 for k in range(1,len(uppersec))]\n uppertan.insert(0,uppersec[0])\n uppertan.append(uppersec[-1])\n\n lowertan = [(lowersec[k-1]+lowersec[k])/2 for k in range(1,len(lowersec))]\n lowertan.insert(0,lowersec[0])\n lowertan.append(lowersec[-1])\n \n # Hermite blending functions\n p0 = lambda t: 2*t**3 - 3*t**2 + 1\n m0 = lambda t: t**3 - 2*t**2 + t\n p1 = lambda t: -2*t**3 + 3*t**2\n m1 = lambda t: t**3 - t**2\n \n # Find matching points to improve accuarcy\n matchU = [(i,j) for i in range(len(xpointsU)) for j in range(len(self.__upper)) if xpointsU[i] == self.__upper[j][0]]\n matchL = [(i,j) for i in range(len(xpointsL)) for j in range(len(self.__lower)) if xpointsL[i] == self.__lower[j][0]]\n \n # Reverse match pairs to insure no index errors\n matchU.reverse()\n matchL.reverse()\n\n# print(self.__lower)\n# print(xpointsL)\n # Pop xpoints that dont require interpolation and append the point into the upperint list\n for i in matchU:\n xpointsU.pop(i[0])\n upperint.append(self.__upper[i[1]])\n \n# print(matchL)\n \n # Same process as above but for lower airfoil\n for i in matchL:\n xpointsL.pop(i[0])\n lowerint.append(self.__lower[i[1]])\n \n # Interpolate upper points\n for xp in xpointsU:\n for i in range(len(self.__upper)-1):\n if self.__upper[i][0] < xp < self.__upper[i+1][0]:\n h = self.__upper[i+1][0]-self.__upper[i][0]\n t = (xp - self.__upper[i][0]) / h\n solution = ( p0(t)*self.__upper[i][1] + h*m0(t)*uppertan[i] + p1(t)*self.__upper[i+1][1] + h*m1(t)*uppertan[i+1] )\n upperint.append((xp,solution))\n \n # Interpolate lower points\n for xp in xpointsL:\n for i in range(len(self.__lower)-1):\n if self.__lower[i][0] < xp < self.__lower[i+1][0]:\n h = self.__lower[i+1][0]-self.__lower[i][0]\n t = (xp - self.__lower[i][0]) / h\n solution = ( p0(t)*self.__lower[i][1] + h*m0(t)*lowertan[i] + p1(t)*self.__lower[i+1][1] + h*m1(t)*lowertan[i+1] )\n lowerint.append((xp,solution))\n \n # Sort the points to keep the correct sequence\n upperint.sort(key=lambda x:x[0], reverse=True)\n lowerint.sort(key=lambda x:x[0])\n \n # Do checks to insure no duplicates\n if upperint[0][0] != 1.0: upperint.insert(0,(1.0,0.0))\n if upperint[-1][0] != 0.0: upperint.append((0.0,0.0))\n if lowerint[0][0] == 0.0: lowerint.pop(0)\n if lowerint[-1][0] != 1.0: lowerint.append((1.0,0.0))\n\n self.__ProcPoints = upperint + lowerint",
"def test_cspad_xy_at_z():\n ## 'CxiDs1.0:Cspad.0)' or 'DscCsPad'\n basedir = '/reg/g/psdm/detector/alignment/cspad/calib-cxi-camera1-2014-09-24/'\n fname_geometry = basedir + '2016-06-03-geometry-cxi06216-r25-camera1-z175mm.txt'\n fname_data = basedir + '2016-06-03-chun-cxi06216-0025-DscCsPad-max.txt'\n\n geometry = GeometryAccess(fname_geometry, pbits=0o377)\n\n # get pixel coordinate index arrays:\n xyc = xc, yc = 1000, 1000\n #rows, cols = geometry.get_pixel_coord_indexes(xy0_off_pix=xyc)\n #rows, cols = geometry.get_pixel_coord_indexes(do_tilt=True)\n #rows, cols = geometry.get_pixel_xy_inds_at_z(zplane=None, xy0_off_pix=xyc)\n rows, cols = geometry.get_pixel_xy_inds_at_z(zplane=150000)\n\n root, ext = os.path.splitext(fname_data)\n arr = np.load(fname_data) if ext == '.npy' else np.loadtxt(fname_data, dtype=np.float)\n\n #logger.info('arr.shape=', arr.shape\n arr.shape= (32,185,388)\n\n #ave, rms = arr.mean(), arr.std()\n #amp_range = (ave-rms, ave+3*rms)\n amp_range = (0, 1000)\n logger.info('amp_range:' + str(amp_range))\n\n logger.info('shapes rows: %s cols: %s weight: %s' % (str(rows.shape), str(cols.shape), str(arr.shape)))\n img = img_from_pixel_arrays(rows,cols,W=arr)\n\n axim = gg.plotImageLarge(img,amp_range=amp_range)\n gg.move(500,10)\n gg.show()",
"def get_cape(temp,pres,dewpt,hght,startp,startt,startdp,totalcape=False): \n\n # Check units\n # Init temp is startt in C, Init dew point is stwrtdp,\n # pressure levels are in hPa \n temp = temp - 273.15 # convert temperature to celsius\n dewpt = dewpt - 273.15 # convert dewpoint to celsius\n pres = pres/100 # convert pressure to hPa\n \n \n inds = np.where( (pres < startp) ) \n tmp = pres[inds]\n del pres\n #pres = tmp[::-1]\n pres = tmp[:]\n del tmp \n startp = startp/100\n \n tmp = temp[inds]\n del temp\n #temp = tmp[::-1]\n temp = tmp[:]\n del tmp \n\n tmp = dewpt[inds]\n del dewpt\n #dewpt = tmp[::-1]\n dewpt = tmp[:]\n del tmp \n\n tmp = hght[inds]\n del hght\n #hght = tmp[::-1]\n hght = tmp[:]\n del tmp \n\n \n # Get Sub-LCL traces \n presdry,tempdry,tempiso=dry_ascent(startp,startt-degCtoK,startdp-degCtoK) \n \n\n # make lcl variables explicit\n P_lcl=presdry[-1]\n T_lcl=tempdry[-1]\n\n # Now lift a wet parcel from the intersection point\n # preswet=linspace(P_lcl,100,101)\n preswet,tempwet=moist_ascent(P_lcl,T_lcl)\n\n # tparcel is the concatenation of tempdry and \n # tempwet, and so on.\n \n tparcel=np.concatenate((tempdry,tempwet[1:]))\n pparcel=np.concatenate((presdry,preswet[1:]))\n\n # Interpolating the environmental profile onto the \n # parcel pressure coordinate\n # tempenv=interp(preswet,pres[::-1],temp[::-1])\n ## NEW, for total column:\n tempenv=interp(pparcel,pres[::-1],temp[::-1])\n\n\n # now solve for the equlibrium levels above LCL\n # (all of them, including unstable ones)\n # eqlev,stab=solve_eq(preswet[::-1],(tempwet-tempenv)[::-1])\n # NEW, for total column:\n # On second thought, we don't really want/need\n # any equilibrium levels below LCL\n # eqlev,stab=solve_eq(pparcel[::-1],(tparcel-tempenv)[::-1])\n # This is equivalent to the old statement :\n eqlev,stab=solve_eq(pparcel[pparcel<=P_lcl][::-1],\\\n (tparcel-tempenv)[pparcel<=P_lcl][::-1])\n\n aa = tparcel-tempenv\n\n # Sorting index by decreasing pressure\n I=np.argsort(eqlev)[::-1]\n eqlev=eqlev[I]; stab=stab[I]\n\n # temperatures at the equilibrium level\n # tempeq=interp(eqlev,preswet[::-1],tempenv[::-1])\n ## NEW, for total column:\n tempeq=interp(eqlev,pparcel[::-1],tparcel[::-1])\n\n # This helps with debugging\n # for ii,eq in enumerate(eqlev):\n # print \"%5.2f %5.2f %2d\"%(eq,tempeq[ii],stab[ii])\n\n # need environmental temperature at LCL\n tenv_lcl=interp(P_lcl,pparcel[::-1],tempenv[::-1])\n\n isstab=np.where(stab==1.,True,False)\n unstab=np.where(stab==1.,False,True) \n\n if eqlev.shape[0]==0:\n # no unstable layers in entire profile\n # because the parcel never crosses the tenv\n P_lfc=float('NaN')\n P_el=float('NaN')\n elif T_lcl>tenv_lcl:\n # check LCL to see if this is unstable\n P_lfc=P_lcl\n if totalcape:\n P_el=eqlev[isstab][-1]\n else:\n P_el=eqlev[isstab][0]\n elif eqlev.shape[0]>1:\n # Parcel is stable at LCL so LFC is the \n # first unstable equilibrium level and \n # \"EQ\" level is the first stable equilibrium \n # level\n P_lfc=eqlev[unstab][0]\n if totalcape:\n P_el=eqlev[isstab][-1]\n else:\n P_el=eqlev[isstab][0]\n else:\n # catch a problem... if there is only\n # one eqlev and it's stable (this is \n # unphysical), then it could be a vertical\n # resolution thing. This is a kind of \n # \"null\" option\n try:\n\t P_el=eqlev[isstab][0]\n P_lfc=eqlev[isstab][0]\n except:\n\t P_el=eqlev[unstab][0]\n P_lfc=eqlev[unstab][0]\t\n\t\n if np.isnan(P_lfc):\n return P_lcl,P_lfc,P_el,0,0\n\n # need to handle case where dwpt is not available \n # above a certain level for any reason. Most simplest \n # thing to do is set it to a reasonably low value; \n # this should be a conservative approach!\n \n #dwpt=dewpt.copy().soften_mask()\n [inds] = np.where(np.isnan(dewpt))\n dwpt = dewpt\n dwpt[inds] = dwpt.min()\n \n # raise ValueError\n #if dwpt[(pres>=P_el).data*(pres<P_lfc).data].mask.any():\n # print \"WARNING: substituting dwpt.min() for masked values of DWPT in this sounding\"\n #dwpt[dwpt.mask]=dwpt.min()\n # dwptenv=interp(preswet,pres[::-1],dwpt[::-1])\n # NEW:\n\n dwptenv=interp(pparcel,pres[::-1],dwpt[::-1])\n\n\n \n #if hght[(pres>=P_el).data].mask.any():\n # raise NotImplementedError, \"TODO: Implement standard atmosphere to substitute missing heights\"\n # hghtenv=interp(preswet,pres[::-1],self.soundingdata['hght'][::-1])\n # NEW:\n hghtenv=interp(pparcel,pres[::-1],hght[::-1])\n \n\n # Areas of POSITIVE Bouyancy\n # cond1=(tempwet>=tempenv)*(preswet<=P_lfc)*(preswet>P_el)\n # NEW:\n cond1=(tparcel>=tempenv)*(pparcel<=P_lfc)*(pparcel>P_el)\n # Areas of NEGATIVE Bouyancy\n # cond2=(tempwet<tempenv)*(preswet<=P_lcl)*(preswet>P_el)\n # NEW:\n if totalcape:\n cond2=(tparcel<tempenv)*(pparcel>P_el)\n else:\n cond2=(tparcel<tempenv)*(pparcel>P_lfc)\n # Do CAPE calculation\n # 1. Virtual temperature of parcel... remember it's saturated above LCL.\n # e_parcel=SatVap(tempwet)\n # Tv_parcel=VirtualTemp(tempwet+degCtoK,preswet*100.,e_parcel)\n # e_env=SatVap(dwptenv)\n # Tv_env=VirtualTemp(tempenv+degCtoK,preswet*100.,e_env)\n # NEW:\n e_parcel=SatVap(tparcel)\n Tv_parcel=VirtualTemp(tparcel+degCtoK,pparcel*100.,e_parcel)\n e_env=SatVap(dwptenv)\n Tv_env=VirtualTemp(tempenv+degCtoK,pparcel*100.,e_env)\n\n CAPE=trapz(9.81*(Tv_parcel[cond1]-Tv_env[cond1])/Tv_env[cond1],hghtenv[cond1])\n CIN=trapz(9.81*(Tv_parcel[cond2]-Tv_env[cond2])/Tv_env[cond2],hghtenv[cond2])\n\n return P_lcl,P_lfc,P_el,CAPE,CIN",
"def light_source_directions():\n L = np.array([[-0.06059872, -0.44839055, 0.8917812],\n [-0.05939919, -0.33739538, 0.93948714],\n [-0.05710194, -0.21230722, 0.97553319],\n [-0.05360061, -0.07800089, 0.99551134],\n [-0.04919816, 0.05869781, 0.99706274],\n [-0.04399823, 0.19019233, 0.98076044],\n [-0.03839991, 0.31049925, 0.9497977],\n [-0.03280081, 0.41611025, 0.90872238],\n [-0.18449839, -0.43989616, 0.87889232],\n [-0.18870114, -0.32950199, 0.92510557],\n [-0.1901994, -0.20549935, 0.95999698],\n [-0.18849605, -0.07269848, 0.97937948],\n [-0.18329657, 0.06229884, 0.98108166],\n [-0.17500445, 0.19220488, 0.96562453],\n [-0.16449474, 0.31129005, 0.93597008],\n [-0.15270716, 0.4160195, 0.89644202],\n [-0.30139786, -0.42509698, 0.85349393],\n [-0.31020115, -0.31660118, 0.89640333],\n [-0.31489186, -0.19549495, 0.92877599],\n [-0.31450962, -0.06640203, 0.94692897],\n [-0.30880699, 0.06470146, 0.94892147],\n [-0.2981084, 0.19100538, 0.93522635],\n [-0.28359251, 0.30729189, 0.90837601],\n [-0.26670649, 0.41020998, 0.87212122],\n [-0.40709586, -0.40559588, 0.81839168],\n [-0.41919869, -0.29999906, 0.85689732],\n [-0.42618633, -0.18329412, 0.88587159],\n [-0.42691512, -0.05950211, 0.90233197],\n [-0.42090385, 0.0659006, 0.90470827],\n [-0.40860354, 0.18720162, 0.89330773],\n [-0.39141794, 0.29941372, 0.87013988],\n [-0.3707838, 0.39958255, 0.83836338],\n [-0.499596, -0.38319693, 0.77689378],\n [-0.51360334, -0.28130183, 0.81060526],\n [-0.52190667, -0.16990217, 0.83591069],\n [-0.52326874, -0.05249686, 0.85054918],\n [-0.51720021, 0.06620003, 0.85330035],\n [-0.50428312, 0.18139393, 0.84427174],\n [-0.48561334, 0.28870793, 0.82512267],\n [-0.46289771, 0.38549809, 0.79819605],\n [-0.57853599, -0.35932235, 0.73224555],\n [-0.59329349, -0.26189713, 0.76119165],\n [-0.60202327, -0.15630604, 0.78303027],\n [-0.6037003, -0.04570002, 0.7959004],\n [-0.59781529, 0.06590169, 0.79892043],\n [-0.58486953, 0.17439091, 0.79215873],\n [-0.56588359, 0.27639198, 0.77677747],\n [-0.54241965, 0.36921337, 0.75462733],\n [0.05220076, -0.43870637, 0.89711304],\n [0.05199786, -0.33138635, 0.9420612],\n [0.05109826, -0.20999284, 0.97636672],\n [0.04919919, -0.07869871, 0.99568366],\n [0.04640163, 0.05630197, 0.99733494],\n [0.04279892, 0.18779527, 0.98127529],\n [0.03870043, 0.30950341, 0.95011048],\n [0.03440055, 0.41730662, 0.90811441],\n [0.17290651, -0.43181626, 0.88523333],\n [0.17839998, -0.32509996, 0.92869988],\n [0.18160174, -0.20480196, 0.96180921],\n [0.18200745, -0.07490306, 0.98044012],\n [0.17919505, 0.05849838, 0.98207285],\n [0.17329685, 0.18839658, 0.96668244],\n [0.1649036, 0.30880674, 0.93672045],\n [0.1549931, 0.41578148, 0.89616009],\n [0.28720483, -0.41910705, 0.8613145],\n [0.29740177, -0.31410186, 0.90160535],\n [0.30420604, -0.1965039, 0.9321185],\n [0.30640529, -0.07010121, 0.94931639],\n [0.30361153, 0.05950226, 0.95093613],\n [0.29588748, 0.18589214, 0.93696036],\n [0.28409783, 0.30349768, 0.90949304],\n [0.26939905, 0.40849857, 0.87209694],\n [0.39120402, -0.40190413, 0.8279085],\n [0.40481085, -0.29960803, 0.86392315],\n [0.41411685, -0.18590756, 0.89103626],\n [0.41769724, -0.06449957, 0.906294],\n [0.41498764, 0.05959822, 0.90787296],\n [0.40607977, 0.18089099, 0.89575537],\n [0.39179226, 0.29439419, 0.87168279],\n [0.37379609, 0.39649585, 0.83849122],\n [0.48278794, -0.38169046, 0.78818031],\n [0.49848546, -0.28279175, 0.8194761],\n [0.50918069, -0.1740934, 0.84286803],\n [0.51360856, -0.05870098, 0.85601427],\n [0.51097962, 0.05899765, 0.8575658],\n [0.50151639, 0.17420569, 0.84742769],\n [0.48600297, 0.28260173, 0.82700506],\n [0.46600106, 0.38110087, 0.79850181],\n [0.56150442, -0.35990283, 0.74510586],\n [0.57807114, -0.26498677, 0.77176147],\n [0.58933134, -0.1617086, 0.7915421],\n [0.59407609, -0.05289787, 0.80266769],\n [0.59157958, 0.057798, 0.80417224],\n [0.58198189, 0.16649482, 0.79597523],\n [0.56620006, 0.26940003, 0.77900008],\n [0.54551481, 0.36380988, 0.7550205]], dtype=float)\n return L",
"def hexapodZernikeLinearModel():\n Tfile='/home/jghao/research/decamFocus/psf_withseeing/finerGrid_coeff_matrix/zernike_coeff_finerGrid_training.cp'\n b=p.load(open(Tfile))\n nobs = len(b)\n x = b[:,0]\n y = b[:,1]\n z = b[:,2]\n theta = b[:,3]\n phi = b[:,4]\n fwhm = b[:,5]\n e1 = b[:,6]\n e2 = b[:,7]\n thetax = theta*np.cos(np.deg2rad(phi))\n thetay = theta*np.sin(np.deg2rad(phi))\n \n M22realTrefoil2 = b[:,37] # for x decenter\n M22imagTrefoil1 = b[:,54] \n M22TrefoilXshift = 0.5*(M22realTrefoil2+M22imagTrefoil1)\n\n M22realTrefoil1 = b[:,34] # for y decenter\n M22imagTrefoil2 = b[:,57] \n M22TrefoilYshift = 0.5*(M22realTrefoil1 - M22imagTrefoil2)\n\n M20defocus = b[:,12] # for defocus\n\n M22realComa2 = b[:,36] # for x-tilt\n M22imagComa1 = b[:,55]\n M22ComaXtilt = 0.5*(M22realComa2+M22imagComa1)\n\n M22realComa1 = b[:,35] # for y-tilt\n M22imagComa2 = b[:,56]\n M22ComaYtilt = 0.5*(M22realComa1 - M22imagComa2)\n \n pl.figure(figsize=(21,12))\n pl.subplot(2,3,1)\n t=bp.bin_scatter(M22TrefoilXshift,x,nbins=20,fmt='bo',scatter=True)\n res = linefit(M22TrefoilXshift,x)\n pl.plot(M22TrefoilXshift,M22TrefoilXshift*res[1]+res[0],'r,')\n pl.ylabel('x-decenter')\n pl.xlabel('(M22realTrefoil2+M22imagTrefoil1)/2')\n pl.title('slope: '+str(round(res[1],4))+' Intercept: '+str(round(res[0],4)))\n pl.subplot(2,3,2)\n t=bp.bin_scatter(M22TrefoilYshift,y,nbins=20,fmt='bo',scatter=True)\n res = linefit(M22TrefoilYshift,y)\n pl.plot(M22TrefoilYshift,M22TrefoilYshift*res[1]+res[0],'r,')\n pl.ylabel('y-decenter')\n pl.xlabel('(M22realTrefoil1 - M22imagTrefoil2)/2')\n pl.title('slope: '+str(round(res[1],4))+' Intercept: '+str(round(res[0],4)))\n pl.subplot(2,3,3)\n t=bp.bin_scatter(M20defocus,z,nbins=20,fmt='bo',scatter=True)\n res = linefit(M20defocus,z)\n pl.plot(M20defocus,M20defocus*res[1]+res[0],'r,')\n pl.ylabel('z-defocus')\n pl.xlabel('M20defocus')\n pl.title('slope: '+str(round(res[1],4))+' Intercept: '+str(round(res[0],4)))\n pl.subplot(2,3,4)\n t=bp.bin_scatter(M22ComaXtilt,thetax,nbins=20,fmt='bo',scatter=True)\n res = linefit(M22ComaXtilt,thetax)\n pl.plot(M22ComaXtilt,M22ComaXtilt*res[1]+res[0],'r,')\n pl.ylabel('x-tilt')\n pl.xlabel('(M22realComa2+M22imagComa1)/2')\n pl.title('slope: '+str(round(res[1],4))+' Intercept: '+str(round(res[0],4)))\n pl.subplot(2,3,5)\n t=bp.bin_scatter(M22ComaYtilt,thetay,nbins=20,fmt='bo',scatter=True)\n res = linefit(M22ComaYtilt,thetay)\n pl.plot(M22ComaYtilt,M22ComaYtilt*res[1]+res[0],'r,')\n pl.ylabel('y-tilt')\n pl.xlabel('(M22realComa1 - M22imagComa2)/2')\n pl.title('slope: '+str(round(res[1],4))+' Intercept: '+str(round(res[0],4)))\n\n pl.close()",
"def monolayer_4band():\n a = 0.222\n ax = 0.438\n ay = 0.332\n theta = 96.79 * (pi / 180)\n phi = 103.69 * (pi / 180)\n\n lat = pb.Lattice(a1=[ax, 0], a2=[0, ay])\n\n h = a * sin(phi - pi / 2)\n s = 0.5 * ax - a * cos(theta / 2)\n lat.add_sublattices(\n ('A', [0, 0, h], 0),\n ('B', [s, 0, 0], 0),\n ('C', [ax/2, ay/2, 0], 0),\n ('D', [ax/2 + s, ay/2, h], 0)\n )\n\n lat.register_hopping_energies({\n 't1': -1.22,\n 't2': 3.665,\n 't3': -0.205,\n 't4': -0.105,\n 't5': -0.055\n })\n\n lat.add_hoppings(\n # t1\n ([-1, 0], 'A', 'D', 't1'),\n ([-1, -1], 'A', 'D', 't1'),\n ([ 0, 0], 'B', 'C', 't1'),\n ([ 0, -1], 'B', 'C', 't1'),\n # t2\n ([ 0, 0], 'A', 'B', 't2'),\n ([ 0, 0], 'C', 'D', 't2'),\n # t3\n ([ 0, 0], 'A', 'D', 't3'),\n ([ 0, -1], 'A', 'D', 't3'),\n ([ 1, 1], 'C', 'B', 't3'),\n ([ 1, 0], 'C', 'B', 't3'),\n # t4\n ([ 0, 0], 'A', 'C', 't4'),\n ([ 0, -1], 'A', 'C', 't4'),\n ([-1, 0], 'A', 'C', 't4'),\n ([-1, -1], 'A', 'C', 't4'),\n ([ 0, 0], 'B', 'D', 't4'),\n ([ 0, -1], 'B', 'D', 't4'),\n ([-1, 0], 'B', 'D', 't4'),\n ([-1, -1], 'B', 'D', 't4'),\n # t5\n ([-1, 0], 'A', 'B', 't5'),\n ([ 0, 1], 'A', 'B', 't5'),\n ([ 0, -1], 'A', 'B', 't5'),\n ([-1, 0], 'C', 'D', 't5'),\n ([ 0, 1], 'C', 'D', 't5'),\n ([ 0, -1], 'C', 'D', 't5'),\n )\n\n return lat",
"def _process(self, X):\n # 周波数毎に実施する\n ones = np.ones(self.L.shape[1])\n\n spire_cost = np.zeros(self.grid.n_points)\n\n # 初期のポジションベクトル\n n_channels = np.shape(X)[0]\n n_freq_bins = np.shape(X)[1]\n n_frames = np.shape(X)[2]\n\n d = None\n n_mic_pair = 0\n # for m1 in range(1):\n\n step = 2\n\n mic_pairs = self.mic_pairs\n # mic_pairs=[[m1,m2] for m1 in range(n_channels-1) for m2 in range(m1+1,np.minimum(m1+step+1,n_channels)) ]\n mic_pairs = np.array(mic_pairs)\n\n n_mic_pair = np.shape(mic_pairs)[0]\n d = np.array(self.mic_positions[mic_pairs[:, 1]]) - np.array(\n self.mic_positions[mic_pairs[:, 0]]\n )\n # d: n_mic_pair,dim\n\n # 時間周波数毎の初期のポジションベクトル\n position_vector = np.zeros(shape=(n_freq_bins, n_frames, self.dim))\n\n X_temp = X[:, self.freq_bins, :]\n\n sigma = np.angle(X_temp[mic_pairs[:, 1], ...] / X_temp[mic_pairs[:, 0], ...])\n sigma = np.transpose(sigma, (1, 2, 0))\n\n sigma = np.where(np.abs(sigma) < 1.0e-18, np.zeros_like(sigma) + 1.0e-18, sigma)\n z = np.zeros(shape=(n_freq_bins, n_frames, n_mic_pair), dtype=np.int)\n x = np.random.normal(size=n_freq_bins * n_frames * n_mic_pair)\n x = np.reshape(x, newshape=(n_freq_bins, n_frames, n_mic_pair))\n # 初期化\n mode_vec = self.rough_mode_vec[self.freq_bins, :, :]\n mode_vec = np.conjugate(mode_vec)\n prod = np.einsum(\"fmi,mft->fti\", mode_vec, X[:, self.freq_bins, :])\n # prod=np.einsum(\"mi,mt->ti\",mode_vec,X[:,k,:])\n amp = np.abs(prod)\n # ft\n index = np.argmax(amp, axis=-1)\n org_shape = np.shape(index)\n index = np.reshape(index, [-1])\n\n # indexに相当する方向を取る\n if self.dim == 2:\n rough_azimuth_recon = self.rough_grid.azimuth[index]\n # ダミー\n rough_colatitude_recon = np.zeros_like(rough_azimuth_recon) + np.pi\n elif self.dim == 3:\n rough_azimuth_recon = self.rough_grid.azimuth[index]\n rough_colatitude_recon = self.rough_grid.colatitude[index]\n\n doas = np.concatenate(\n (\n rough_colatitude_recon[:, None], # colatitude [0, pi]\n rough_azimuth_recon[:, None], # azimuth [0, 2 pi]\n ),\n axis=-1,\n )\n distance = 3.0\n\n # source_locations: 3, n_frames\n source_locations = geom.spherical_to_cartesian(doa=doas, distance=distance)\n source_locations = np.reshape(source_locations, (3, org_shape[0], org_shape[1]))\n\n position_vector[self.freq_bins, :, :] = np.transpose(\n source_locations[: self.dim, :, :], (1, 2, 0)\n )\n\n size = np.einsum(\"fti,fti->ft\", np.conjugate(position_vector), position_vector)\n size = np.sqrt(size)[..., np.newaxis]\n position_vector = position_vector / np.maximum(size, 1.0e-18)\n\n est_p = position_vector[self.freq_bins, ...]\n z = z[self.freq_bins, ...]\n x = x[self.freq_bins, ...]\n freqs = self.freq_hz\n cluster_index = cluster_index[self.freq_bins, ...]\n\n silent_mode = True\n freqs_d = np.einsum(\"f,pi->fpi\", freqs, d)\n x_non_const_power_vector = np.zeros(shape=(n_freq_bins, n_frames))\n\n for i in range(self.n_mm_itertaions):\n (\n org_cost_0,\n org_cost_1,\n org_cost_2,\n org_cost_3,\n cost_0,\n cost_1,\n cost_2,\n cost_3,\n est_p,\n z,\n x,\n x_non_const_power,\n ) = coplaner_doa_estimation_one_iteration(\n freqs_d,\n est_p,\n sigma,\n z,\n x,\n use_clustering=use_clustering,\n cluster_index=cluster_index,\n cluster_center=cluster_center,\n iter_num2=self.rooting_n_iter,\n silent_mode=silent_mode,\n zero_feature_index=2,\n )\n\n if silent_mode == False:\n print(cost_0, cost_1, cost_2, cost_3)\n\n # est_pから\n # fti\n position_vector[self.freq_bins, ...] = est_p\n\n x_non_const_power_vector[self.freq_bins, :] = x_non_const_power[:, :, 0]\n\n size = np.einsum(\"fti,fti->ft\", np.conjugate(position_vector), position_vector)\n size = np.sqrt(size)[..., np.newaxis]\n position_vector = position_vector / np.maximum(size, 1.0e-18)\n\n # gridを探す\n\n # position_vectorに相当する方向を取る\n if self.dim == 2:\n azimuth_recon = self.grid.azimuth\n # ダミー\n colatitude_recon = np.zeros_like(azimuth_recon) + np.pi\n elif self.dim == 3:\n azimuth_recon = self.grid.azimuth\n colatitude_recon = self.grid.colatitude\n\n doas = np.concatenate(\n (\n colatitude_recon[:, None], # colatitude [0, pi]\n azimuth_recon[:, None], # azimuth [0, 2 pi]\n ),\n axis=-1,\n )\n distance = 3.0\n # source_locations: 3, n_grid_num\n grid_locations = geom.spherical_to_cartesian(doa=doas, distance=distance)\n size = np.einsum(\"in,in->n\", np.conjugate(grid_locations), grid_locations)\n size = np.sqrt(size)[np.newaxis, ...]\n grid_locations = grid_locations / np.maximum(size, 1.0e-18)\n\n grid_index_buf = []\n\n # 制約なし解のパワーが1を大幅に超えて居たらReject\n print(np.average(x_non_const_power_vector))\n valid_index = x_non_const_power_vector < self.reject_th\n for k in self.freq_bins:\n prod = np.einsum(\"in,ti->tn\", grid_locations, position_vector[k, ...])\n grid_index = np.argmax(prod, axis=-1)\n\n grid_index = grid_index[valid_index[k, :]]\n\n grid_index_buf.append(grid_index)\n grid_index_buf = np.array(grid_index_buf)\n\n for n in range(self.grid.n_points):\n spire_cost[n] = spire_cost[n] + np.count_nonzero(grid_index_buf == n)\n\n self.grid.set_values(spire_cost)",
"def __init__(self,clump,ds,Gsize_threshold=50000,clump_2d_id=-1,subclump_id=-1):\n self.clump_2d_id = clump_2d_id\n self.subclump_id = subclump_id\n\n #For gravitational energy calculation.\n truncate = False\n\n if hasattr(clump,'data'):\n G = clump.data.ds['GravitationalConstant']/(4*np.pi)\n #Scalar quantites for ease\n #the mean density is almost always 1 in code units. The extra 4 pi is for the definition of G in Enzo\n self.Time = clump.data.ds['InitialTime']\n self.TimePerFreeFall = self.Time/np.sqrt( 3*np.pi*(4*np.pi)/(32*clump.data.ds['GravitationalConstant']*1))\n data = clump.data\n elif hasattr(clump,'ds'):\n G = clump.ds['GravitationalConstant']/(4*np.pi)\n #Scalar quantites for ease\n self.Time = clump.ds['InitialTime']\n #the mean density is almost always 1 in code units. The extra 4 pi is for the definition of G in Enzo\n data=clump\n else:\n print \"nope\"\n raise\n self.Time = data.ds['InitialTime']\n\n\n #set up vectors and dictionaries\n axes = np.array(['x','y','z'])\n daxes = np.array(['dx','dy','dz'])\n self.AvgColumnDensity = np.zeros(3)\n self.AvgBlos = np.zeros(3)\n truncate = False\n self.Energies = {}\n self.CenterOfMass = []\n\n #Check for, and move relevant zones to the right.\n self.shift = shift(data)\n\n #size info\n self.LeftEdge = np.array([data['x'].min(),data['y'].min(),data['z'].min()])\n self.RightEdge= np.array([data['x'].max(),data['y'].max(),data['z'].max()])\n d = self.RightEdge - self.LeftEdge\n self.MaxWidthStupid = np.sqrt(np.dot(d,d))\n self.MinWidthStupid = d.min()\n self.R = d.prod()**(1./3)*0.5\n self.R2 = 0.5*(data['cell_volume'].sum())**(1./3)\n\n #Projected Quantities\n for d,x in enumerate(axes):\n tx = trans(daxes,d)\n A = data[tx[0]]*data[tx[1]]\n Z = data[daxes[d]]\n self.AvgColumnDensity[d] = (A*Z*data['Density']).sum()/A.sum()\n BName = 'B%s'%('xyz'[d])\n self.AvgBlos[d] = (data['cell_mass']*data[BName]).sum()/data['cell_mass'].sum()\n\n #mass quantites\n self.Mass = data['cell_mass'].sum()\n for ax in ['x','y','z']:\n self.CenterOfMass.append( (data[ax]*data['cell_mass']).sum()/\n self.Mass)\n self.CenterOfMass = np.array(self.CenterOfMass)\n data.set_field_parameter(\"center\",self.CenterOfMass) \n self.mean_density = self.Mass/(data['cell_volume']).sum()\n\n\n #velocity properties \n self.AvgVelocity = data.ds.arr([(data['cell_mass']*data[v]).sum()/data['cell_mass'].sum()\n for v in ['x-velocity','y-velocity','z-velocity']])\n self.VelocityUnitVector = self.AvgVelocity/np.sqrt((self.AvgVelocity**2).sum())\n data.set_field_parameter(\"bulk_velocity\",self.AvgVelocity)\n self.VelocityDispersion =np.sqrt((data['VelocityDispersionSquared']*data['cell_mass']).sum()/self.Mass)\n\n #rotational properties. Require 'center' = center of mass, 'bulk velocity', both set above.\n self.AvgAngularMomentum = np.array([(data['cell_mass']*data['angular_momentum_%s'%ax]).sum()/self.Mass for ax in 'xyz'] )\n self.AvgAngularVelocity = np.array([(data['angular_momentum_%s'%ax]).sum()/self.Mass for ax in 'xyz'] )\n self.MeanOmega = np.sqrt( (self.AvgAngularVelocity**2).sum() )\n self.RotBeta = (self.MeanOmega**2*(self.R)**3)/(3*G*self.Mass)\n\n\n #test\n #self.Energies['Rotational'] = (data['cell_mass']*data['RotationalEnergy']).sum()/self.Mass\n #magnetic properties\n self.b_vol = np.zeros(3)\n self.b_mass =np.zeros(3)\n self.B_vol = np.zeros(3)\n self.B_mass= np.zeros(3)\n M = data['cell_mass']\n Mtotal = M.sum()\n V = data['cell_volume']\n Vtotal = V.sum()\n B = [data['B%s'%ax] for ax in 'xyz']\n for d in range(3):\n self.B_mass[d]= self.AvgBlos[d] #because we already did this one...\n self.B_vol[d] = (V*B[d]).sum()/Vtotal\n self.b_vol[d] =np.sqrt((V*(self.B_vol[d]-B[d].v)**2).sum()/Vtotal) \n self.b_mass[d]=np.sqrt( (M*(self.B_mass[d]-B[d].v)**2).sum()/Mtotal)\n for f in ['cell_mass','cell_volume', 'Bx','By','Bz']:\n del data[f]\n\n #angles\n \n #energetic properties\n ke = (data[\"cell_volume\"]*data[\"rel_kinetic_energy\"]).sum()\n ke = ke.in_units('code_velocity**2*code_mass')\n ke=ke.v\n self.Energies[\"Kinetic\"] = ke\n be = (data[\"cell_volume\"]*data[\"magnetic_energy\"]).sum()\n be = be.in_units('code_velocity**2*code_mass')\n be=be.v\n self.Energies[\"Magnetic\"] = be\n #pdb.set_trace()\n #if data[\"cell_volume\"].size < Gsize_threshold or Gsize_threshold < 0:\n # ge = G*FindBindingEnergy(data[\"cell_mass\"], data['x'],data['y'],data['z'],\n # truncate, self.Energies[\"Kinetic\"]/(G))\n #else:\n # ge = 0\n mw_temp_name = \"./MW_Grav_Temp_%s\"%(uuid.uuid1())\n ge = -G*mw_stuff.run_mw_grav(mw_temp_name,data)\n \n self.Energies[\"Gravitational\"] = ge\n\n #3/2nkT for monatomic gas, PV=nkt, P=cs^2 rho.\n #Assumes cs = 1\n thermal = 1.5*(data[\"cell_volume\"]*data['Density']).sum().in_units('code_mass').v\n self.Energies[\"Thermal\"] = thermal\n\n #energy ratios\n\n\n self.Ratios = {}\n if ke+be+thermal > 0.0:\n self.Ratios['ge/all']=ge/(ke+be+thermal)\n if ke+be > 0.0:\n self.Ratios['ge/ke+be']=ge/(ke+be)\n if ke > 0.0:\n self.Ratios['ge/ke']=ge/ke\n if be > 0.0:\n self.Ratios['ge/be']=ge/be\n self.Ratios['ke/be']=ke/be\n self.Ratios['te/be'] = thermal/be\n if thermal > 0.0:\n self.Ratios[ 'ge/thermal']=ge/thermal\n if ke+ge > 0.0:\n self.Ratios['ge/(ke+ge)'] = ge/(ke+ge)\n\n if 1: #get these later\n #if self.Energies['Rotational'] > 0.0:\n # self.Ratios['ge/re'] = self.Energies['Gravitational']/self.Energies['Rotational']\n self.Alpha = 5*(self.VelocityDispersion**2)/3.0*(self.R)/( self.Mass*G)\n self.Alpha2 = 5*(self.VelocityDispersion**2)/3.0*(self.R2)/( self.Mass*G)\n #self.MassToFlux = (data['MassToFluxNonCritical']*data['cell_volume']).sum()/data['cell_volume'].sum()*2*np.pi*G\n\n self.bound = self.Ratios['ge/all'] > 0.5\n \n\n #at least 2 zones wide.\n self.Valid = True\n nX = np.unique(data['x']).size\n nY = np.unique(data['y']).size\n nZ = np.unique(data['z']).size\n if (np.array([nX,nY,nZ]) <= 2).any():\n self.Valid=False\n if self.R == 0:\n self.Valid=False \n\n #Need to clean up all data so i don't crash the computer.\n for k in data.keys():\n del data[k]\n\n mw_temp_file_list = sorted(glob.glob(mw_temp_name+\"*\"), reverse=True)\n for fptr in mw_temp_file_list:\n if os.path.isfile(fptr):\n os.remove(fptr)",
"def get_coords(self,data,ifeed,mask):\n sky_data_flag = mask\n \n az = data['level1/spectrometer/pixel_pointing/pixel_az'][ifeed,:]\n el = data['level1/spectrometer/pixel_pointing/pixel_el'][ifeed,:]\n ra = data['level1/spectrometer/pixel_pointing/pixel_ra'][ifeed,:]\n dec = data['level1/spectrometer/pixel_pointing/pixel_dec'][ifeed,:]\n\n N = az.shape[0]//2 * 2\n daz = np.gradient(az[:])*50.\n daz = daz[sky_data_flag]\n az = az[sky_data_flag]\n el = el[sky_data_flag]\n ra = ra[sky_data_flag]\n dec=dec[sky_data_flag]\n cw = daz > 1e-2\n ccw = daz < 1e-2\n\n return {'az':az,\n 'el':el,\n 'ccw':ccw,\n 'cw':cw,\n 'ra':ra,\n 'dec':dec,\n 'sky_data_flag':sky_data_flag}",
"def linecut_points( **kwargs ):\n npoints = kwargs.get('npoints', 320)\n extents = kwargs.get('extents',None)\n lims = kwargs.get('lims', (-80.,80.))\n direc = kwargs.get('direc', (np.pi/2, 0.))\n origin = kwargs.get('origin', vec3(0.,0.,0.))\n\n if extents is not None:\n lims = (-extents, extents)\n\n # Prepare set of points for plot \n t = np.linspace( lims[0], lims[1], npoints )\n unit = vec3()\n th = direc[0]\n ph = direc[1] \n unit.set_spherical(1, th, ph) \n # Convert vec3s to ndarray\n unit = np.array(unit)\n origin = np.array(origin) \n #\n XYZ = origin + np.outer(t, unit)\n X = XYZ[:,0]\n Y = XYZ[:,1]\n Z = XYZ[:,2]\n \n return t, X, Y, Z, lims",
"def point_list(self,res,llc,urc,direction):\n\t\tif direction == 2:\n\t\t\tZdist=urc[2]-llc[2]\n\t\t\tnumPoints=int(numpy.ceil(Zdist/res))\n\t\t\tdeltaZ=Zdist/numPoints\n\t\t\tpoints=[llc+numpy.array([0,0,deltaZ*i]) for i in range(numPoints)]\n\t\t\treturn points, points[0], points[-1]\n\t\tif direction == 1:\n\t\t\tZdist=urc[1]-llc[1]\n\t\t\tnumPoints=int(numpy.ceil(Zdist/res))\n\t\t\tdeltaZ=Zdist/numPoints\n\t\t\tpoints=[llc+numpy.array([0,deltaZ*i,0]) for i in range(numPoints)]\n\t\t\treturn points, points[0], points[-1]\n\t\tif direction == 0:\n\t\t\tZdist=urc[0]-llc[0]\n\t\t\tnumPoints=int(numpy.ceil(Zdist/res))\n\t\t\tdeltaZ=Zdist/numPoints\n\t\t\tpoints=[llc+numpy.array([deltaZ*i,0,0]) for i in range(numPoints)]\n\t\t\treturn points, points[0], points[-1]",
"def calc_ell_list(chain) :\n ell_list = np.zeros(len(chain.bridges_dict))\n \n for b in chain.bridges_dict.keys() :\n i, j = chain.bridges_dict[b].lumen1, chain.bridges_dict[b].lumen2\n L_i, pos_i = chain.lumens_dict[i].length, chain.lumens_dict[i].pos\n L_j, pos_j = chain.lumens_dict[j].length, chain.lumens_dict[j].pos\n \n chain.bridges_dict[b].length = np.abs(pos_j - pos_i) - (L_i + L_j)",
"def __init__(self, roi_warped_points):\n\n # was the line detected in the last iteration?\n self.detected = False\n # x values of the last n fits of the line\n self.recent_xfitted = []\n #average x values of the fitted line over the last n iterations\n self.bestx = None\n #polynomial coefficients averaged over the last n iterations\n self.best_fit = [np.array([False])]\n #polinomial coefficients for the last n fits of the lane\n self.recent_fit = []\n #polynomial coefficients for the most recent fit\n self.current_fit = [np.array([False])]\n #radius of curvature of the line in some units\n self.radius_of_curvature = 0\n #distance in meters of vehicle center from the line\n self.line_base_pos = 0\n #difference in fit coefficients between last and new fits\n self.diffs = np.array([0,0,0], dtype='float')\n #x values for detected line pixels\n self.allx = None\n #maximum number of iterations to average\n self.max_n = 10 #25\n\n # roi image points in bird's view space\n self.roi_warped_points = roi_warped_points\n\n #y values for detected line pixels\n self.ally = np.linspace(0, self.roi_warped_points[2][1] - 1, self.roi_warped_points[2][1])\n\n # line base pos is calculated through the roi information\n # the used four point ROI has two points at the bottom that are straight\n # with respect to the bottom - as this points are right next to the lines,\n # they can be translated from pixels into meters with the knowledge of\n # a U.S. highway standard lane - this is an apprximation, but should be\n # good enough for this project\n # U.S. regulations minimum lane width: 3.7m\n self.xm_per_pix = 3.7 / (self.roi_warped_points[1][0] - self.roi_warped_points[0][0])\n\n # each dashed line is 3m long --> about 33m for warped image\n self.ym_per_pix = 33 / (self.roi_warped_points[2][1] - self.roi_warped_points[0][1])",
"def eval(self, sample):\n '''\n jv = sample.get(JOINT_VELOCITIES)\n eepv = sample.get(END_EFFECTOR_POINT_VELOCITIES)\n\n boxpos = jv[:, 2:5]\n fingerpos = eepv[:, 7:10]\n tgtpos = np.zeros((100,3))\n for i in range(100):\n tgtpos[i] = [0.6, 0.2, 0.1]\n \n fetchdist = np.sum((boxpos - fingerpos) ** 2, axis=1)\n liftdist = np.sum((boxpos - tgtpos) ** 2, axis=1)\n \n l = fetchdist + liftdist\n '''\n\n eept = sample.get(END_EFFECTOR_POINTS)\n eepv = sample.get(END_EFFECTOR_POINT_VELOCITIES)\n sample_u = sample.get_U()\n cfrc_ext = np.concatenate((eept[:, 13:56], eepv[:, 0:41]), axis = 1)\n # vec = eepv[:, 64:66] \n # dist = np.sum(np.square(vec), axis=1) / 5\n forward_reward = eepv[:, 53]\n scaling = 150\n ctrl_cost = 0.5 * 1e-2 * np.sum(np.square(sample_u / scaling), axis = 1)\n # contact_cost = 0.5 * 1e-3 * np.sum(np.square(cfrc_ext), axis = 1)\n # survive_reward = 0.5\n \n l = -forward_reward + ctrl_cost\n\n prefix=''\n logger.record_tabular('PolReturn', -sum(l))\n\n ave_vel = np.mean(forward_reward)\n min_vel = np.min(forward_reward)\n max_vel = np.max(forward_reward)\n std_vel = np.std(forward_reward)\n logger.record_tabular(prefix+'PolAverageVelocity', ave_vel)\n logger.record_tabular(prefix+'PolMinVelocity', min_vel)\n logger.record_tabular(prefix+'PolMaxVelocity', max_vel)\n logger.record_tabular(prefix+'PolStdVelocity', std_vel)\n logger.dump_tabular(with_prefix=False)\n \n lx, lu, lxx, luu, lux = 0, 0, 0, 0, 0\n\n '''\n # Compute weighted sum of each cost value and derivatives.\n weight = self._weights[0]\n l = l * weight\n lx = lx * weight\n lu = lu * weight\n lxx = lxx * weight\n luu = luu * weight\n lux = lux * weight\n for i in range(1, len(self._costs)):\n pl, plx, plu, plxx, pluu, plux = self._costs[i].eval(sample)\n weight = self._weights[i]\n l = l + pl * weight\n lx = lx + plx * weight\n lu = lu + plu * weight\n lxx = lxx + plxx * weight\n luu = luu + pluu * weight\n lux = lux + plux * weight\n '''\n \n return l, lx, lu, lxx, luu, lux",
"def calculate(self, points, offset=False, interp=None, normals=False):\n if isinstance(points,list):\n if not normals: \n points = np.array(points).reshape(int(len(points)/3) ,3)\n else:\n points = np.array(points).reshape(int(len(points)/6) ,6)\n\n self.minx = np.min(points[:,[0]]) # X\n self.maxx = np.max(points[:,[0]]) # X\n \n self.miny = np.min(points[:,[1]]) # Y\n self.maxy = np.max(points[:,[1]]) # Y\n \n self.minz = np.min(points[:,[2]]) # Z\n self.maxz = np.max(points[:,[2]]) # Z\n\n # offset the data to 0 (top, left)\n if offset:\n if normals:\n x,y,z = self.center\n points = points - [ x, y, z, 0.0, 0.0, 0.0]\n else:\n points = points - self.center\n\n self.minx_o = self.minx\n self.maxx_o = self.maxx\n self.miny_o = self.miny\n self.maxy_o = self.maxy\n self.minz_o = self.minz\n self.maxz_o = self.maxz\n\n # recompute new values\n self.minx = np.min(points[:,[0]]) # X\n self.maxx = np.max(points[:,[0]]) # X\n \n self.miny = np.min(points[:,[1]]) # Y\n self.maxy = np.max(points[:,[1]]) # Y\n \n self.minz = np.min(points[:,[2]]) # Z\n self.maxz = np.max(points[:,[2]]) # Z\n \n # interpolate the data to the new space\n if interp is not None:\n #\n # sighly open the range to avoid bad interpolation of decimals\n # due precision \n #\n #print(points)\n D = 0.1\n x = np.interp(points[:,[0]], (self.minx-D, self.maxx+D), interp[0])\n y = np.interp(points[:,[1]], (self.miny-D, self.maxy+D), interp[1])\n z = np.interp(points[:,[2]], (self.minz-D, self.maxz+D), interp[2]) \n if normals:\n a = points[:,[3]]\n b = points[:,[4]]\n c = points[:,[5]]\n points = np.column_stack((x,y,z,a,b,c))\n else:\n points = np.column_stack((x,y,z))\n \n \n\n return points.flatten()",
"def pwlELE(site_residuals, azSpacing=0.5,zenSpacing=0.5,store=False,site=\"site\"):\n tdata = res.reject_absVal(site_residuals,100.)\n del site_residuals \n data = res.reject_outliers_elevation(tdata,5,0.5)\n del tdata\n\n numd = np.shape(data)[0]\n numZD = int(90.0/zenSpacing) + 1\n\n Neq = np.eye(numZD,dtype=float) * 0.01\n #print(\"Neq\",np.shape(Neq))\n Apart = np.zeros((numd,numZD))\n #print(\"Apart:\",np.shape(Apart))\n sd = np.zeros(numd)\n\n for i in range(0,numd):\n iz = np.floor(data[i,2]/zenSpacing)\n sd[i] = np.sin(data[i,2]/180.*np.pi)\n Apart[i,iz] = (1.-(data[i,2]-iz*zenSpacing)/zenSpacing)\n Apart[i,iz+1] = (data[i,2]-iz*zenSpacing)/zenSpacing\n\n prechi = np.dot(data[:,3].T,data[:,3])\n #print(\"prechi:\",prechi,numd,np.sqrt(prechi/numd))\n\n Neq = np.add(Neq, np.dot(Apart.T,Apart) )\n #print(\"Neq:\",np.shape(Neq))\n\n Bvec = np.dot(Apart.T,data[:,3])\n #print(\"Bvec:\",np.shape(Bvec))\n \n Cov = np.linalg.pinv(Neq)\n #print(\"Cov\",np.shape(Cov))\n \n Sol = np.dot(Cov,Bvec)\n #print(\"Sol\",np.shape(Sol))\n \n postchi = prechi - np.dot(Bvec.T,Sol)\n #print(\"postchi:\",postchi)\n \n pwl = Sol\n #print(\"pwl:\",np.shape(pwl))\n \n pwlsig = np.sqrt(np.diag(Cov) *postchi/numd)\n #print(\"pwlsig\",np.shape(pwlsig))\n\n model = np.dot(Apart,Sol)\n f = loglikelihood(data[:,3],model)\n dof = numd - np.shape(Sol)[0]\n aic = calcAIC(f,dof)\n bic = calcBIC(f,dof,numd)\n #print(\"My loglikelihood:\",f,aic,bic,dof,numd)\n #print(\"STATS:\",numd,np.sqrt(prechi/numd),np.sqrt(postchi/numd),np.sqrt((prechi-postchi)/numd),aic,bic)\n stats = {}\n stats['prechi'] = np.sqrt(prechi/numd)\n stats['postchi'] = np.sqrt(postchi/numd)\n stats['chi_inc'] = np.sqrt((prechi-postchi)/numd)\n stats['aic'] = aic\n stats['bic'] = bic\n\n # Check to see if wee store the partials as a numpy array\n if store:\n np.savez(site+'_pwlELE.npz',neq=Neq,atwb=Bvec)\n\n return pwl,pwlsig,stats",
"def process_laneOffset(self):\n center_line = np.poly1d(np.mean([self.line_l.get_LinePoly().coeffs, self.line_r.get_LinePoly().coeffs], axis=0))\n # store the center line polynomial\n self.center_poly = center_line\n center_point = IMAGE_WIDTH/2 - center_line(709)\n offset_from_center =center_point* self.line_l.x_pxm\n self.lane_offset = offset_from_center\n return center_point",
"def Refine(self, level=2):\n\n from scipy.spatial import Delaunay\n try:\n from Florence.QuadratureRules.EquallySpacedPoints import EquallySpacedPoints, EquallySpacedPointsTri, EquallySpacedPointsTet\n from Florence.FunctionSpace import Line, Tri, Quad, Tet, Hex\n from Florence.FunctionSpace.OneDimensional.Line import Lagrange\n from Florence.Tensor import remove_duplicates_2D\n except ImportError:\n raise ImportError(\"This functionality requires florence's support\")\n\n\n # WE NEED AN ACTUAL NDIM\n # ndim = self.InferSpatialDimension()\n if self.element_type == \"line\":\n ndim = 1\n if self.element_type == \"tri\" or self.element_type == \"quad\":\n ndim = 2\n elif self.element_type == \"tet\" or self.element_type == \"hex\":\n ndim = 3\n\n mesh = deepcopy(self)\n if mesh.InferPolynomialDegree() > 1:\n mesh = mesh.GetLinearMesh(remap=True)\n\n C = level - 1\n p = C+1\n # CActual = self.InferPolynomialDegree() - 1\n CActual = 0 # MUST BE ALWAYS ZERO\n if self.element_type == \"line\":\n nsize = int(C+2)\n nsize_2 = int(CActual+2)\n elif self.element_type == \"tri\":\n nsize = int((p+1)*(p+2)/2.)\n nsize_2 = int((CActual+2)*(CActual+3)/2.)\n elif self.element_type == \"quad\":\n nsize = int((C+2)**2)\n nsize_2 = int((CActual+2)**2)\n elif self.element_type == \"tet\":\n nsize = int((p+1)*(p+2)*(p+3)/6.)\n nsize_2 = int((CActual+2)*(CActual+3)*(CActual+4)/6.)\n elif self.element_type == \"hex\":\n nsize = int((C+2)**3)\n nsize_2 = int((CActual+2)**3)\n else:\n raise ValueError(\"Element type not undersood\")\n\n if self.element_type == \"line\":\n SingleElementPoints = EquallySpacedPoints(ndim+1,C).ravel()\n\n elif self.element_type == \"quad\" or self.element_type == \"hex\":\n SingleElementPoints = EquallySpacedPoints(ndim+1,C)\n # RE-ARANGE NODES PROVIDED BY EquallySpacedPoints\n if ndim == 2:\n node_aranger = np.lexsort((SingleElementPoints[:,0],SingleElementPoints[:,1]))\n elif ndim == 3:\n node_aranger = np.lexsort((SingleElementPoints[:,0],SingleElementPoints[:,1],SingleElementPoints[:,2]))\n SingleElementPoints = SingleElementPoints[node_aranger,:]\n\n elif self.element_type == \"tri\":\n SingleElementPoints = EquallySpacedPointsTri(C)\n simplices = Delaunay(SingleElementPoints).simplices.copy()\n nsimplices = simplices.shape[0]\n\n elif self.element_type == \"tet\":\n SingleElementPoints = EquallySpacedPointsTet(C)\n simplices = Delaunay(SingleElementPoints).simplices.copy()\n nsimplices = simplices.shape[0]\n\n\n Bases = np.zeros((nsize_2,SingleElementPoints.shape[0]),dtype=np.float64)\n\n if mesh.element_type == \"line\":\n smesh = Mesh()\n smesh.Line(n=level)\n simplices = smesh.elements\n nsimplices = smesh.nelem\n\n hpBases = Line.Lagrange\n for i in range(SingleElementPoints.shape[0]):\n Bases[:,i] = hpBases(CActual,SingleElementPoints[i])[0]\n\n elif mesh.element_type == \"tri\":\n hpBases = Tri.hpNodal.hpBases\n for i in range(SingleElementPoints.shape[0]):\n Bases[:,i] = hpBases(CActual,SingleElementPoints[i,0],SingleElementPoints[i,1],\n EvalOpt=1,equally_spaced=True,Transform=1)[0]\n\n elif mesh.element_type == \"quad\":\n smesh = Mesh()\n smesh.Rectangle(element_type=\"quad\", nx=level, ny=level)\n simplices = smesh.elements\n nsimplices = smesh.nelem\n\n hpBases = Quad.LagrangeGaussLobatto\n for i in range(SingleElementPoints.shape[0]):\n Bases[:,i] = hpBases(CActual,SingleElementPoints[i,0],SingleElementPoints[i,1])[:,0]\n\n elif mesh.element_type == \"tet\":\n hpBases = Tet.hpNodal.hpBases\n for i in range(SingleElementPoints.shape[0]):\n Bases[:,i] = hpBases(CActual,SingleElementPoints[i,0],SingleElementPoints[i,1],\n SingleElementPoints[i,2],EvalOpt=1,equally_spaced=True,Transform=1)[0]\n\n elif mesh.element_type == \"hex\":\n smesh = Mesh()\n smesh.Parallelepiped(element_type=\"hex\", nx=level, ny=level, nz=level)\n simplices = smesh.elements\n nsimplices = smesh.nelem\n\n hpBases = Hex.LagrangeGaussLobatto\n for i in range(SingleElementPoints.shape[0]):\n Bases[:,i] = hpBases(CActual,SingleElementPoints[i,0],SingleElementPoints[i,1],SingleElementPoints[i,2])[:,0]\n\n\n nnode = nsize*mesh.nelem\n nelem = nsimplices*mesh.nelem\n X = np.zeros((nnode,mesh.points.shape[1]),dtype=np.float64)\n T = np.zeros((nelem,mesh.elements.shape[1]),dtype=np.int64)\n\n for ielem in range(mesh.nelem):\n X[ielem*nsize:(ielem+1)*nsize,:] = np.dot(Bases.T, mesh.points[mesh.elements[ielem,:],:])\n T[ielem*nsimplices:(ielem+1)*nsimplices,:] = simplices + ielem*nsize\n\n # REMOVE DUPLICATES\n repoints, idx_repoints, inv_repoints = remove_duplicates_2D(X, decimals=10)\n unique_reelements, inv_reelements = np.unique(T,return_inverse=True)\n unique_reelements = unique_reelements[inv_repoints]\n reelements = unique_reelements[inv_reelements]\n reelements = reelements.reshape(nelem,mesh.elements.shape[1])\n\n self.__reset__()\n self.elements = np.ascontiguousarray(reelements)\n self.points = np.ascontiguousarray(repoints)\n self.element_type = mesh.element_type\n self.nelem = self.elements.shape[0]\n self.nnode = self.points.shape[0]\n\n if self.element_type == \"tri\" or self.element_type == \"quad\":\n self.GetEdges()\n self.GetBoundaryEdges()\n elif self.element_type == \"tet\" or self.element_type == \"hex\":\n self.GetFaces()\n self.GetBoundaryFaces()\n self.GetBoundaryEdges()\n\n if CActual > 0:\n sys.stdout = open(os.devnull, \"w\")\n self.GetHighOrderMesh(p=CActual+1)\n # self.GetHighOrderMesh(p=CActual+1, equally_spaced=equally_spaced, check_duplicates=False)\n sys.stdout = sys.__stdout__",
"def printPolyCoeffs(lam) :\n ell = len(lam)\n useFormat = \"2.6e\"\n count = 0\n def printLine(s, count) :\n if lam[count] < 0 :\n s = s + 3 * \" \"\n else :\n s = s + 4 * \" \"\n s = s + \"{0:\" + useFormat + \"}\"\n print(s . format(lam[count]))\n count = count + 1\n return count\n if ell >= 1 :\n count = printLine(\"x0y0\", count)\n if ell >= 3 :\n count = printLine(\"x1y0\", count)\n count = printLine(\"x0y1\", count)\n if ell >= 6 :\n count = printLine(\"x2y0\", count)\n count = printLine(\"x1y1\", count)\n count = printLine(\"x0y2\", count)\n if ell >= 10 :\n count = printLine(\"x3y0\", count)\n count = printLine(\"x2y1\", count)\n count = printLine(\"x1y2\", count)\n count = printLine(\"x0y3\", count)\n if ell >= 15 :\n count = printLine(\"x4y0\", count)\n count = printLine(\"x3y1\", count)\n count = printLine(\"x2y2\", count)\n count = printLine(\"x1y3\", count)\n count = printLine(\"x0y4\", count)\n if ell >= 21 :\n count = printLine(\"x5y0\", count)\n count = printLine(\"x4y1\", count)\n count = printLine(\"x3y2\", count)\n count = printLine(\"x2y3\", count)\n count = printLine(\"x1y4\", count)\n count = printLine(\"x0y5\", count)\n if ell >= 28 :\n count = printLine(\"x6y0\", count)\n count = printLine(\"x5y1\", count)\n count = printLine(\"x4y2\", count)\n count = printLine(\"x3y3\", count)\n count = printLine(\"x2y4\", count)\n count = printLine(\"x1y5\", count)\n count = printLine(\"x0y6\", count)\n if ell >= 36 :\n count = printLine(\"x7y0\", count)\n count = printLine(\"x6y1\", count)\n count = printLine(\"x5y2\", count)\n count = printLine(\"x4y3\", count)\n count = printLine(\"x3y4\", count)\n count = printLine(\"x2y5\", count)\n count = printLine(\"x1y6\", count)\n count = printLine(\"x0y7\", count)\n if (ell > 36) or (ell < 1) :\n raise ValueError(\"Polynomial degree less than or equal to 7, please.\")",
"def buildxy(self):\n\n x_dim = float(self.metadata['XPTS'])\n xmin = float(self.metadata['XMIN'])\n xrange = float(self.metadata['XWID'])\n\n d_x = xrange/(x_dim-1)\n x_axis = (np.arange(xmin, xmin+x_dim*d_x, d_x))\n\n # y_dim = float(\"\".join(ProcessSpectra.get_from_dict('YPTS')))\n # ymin = list(map(float, get_from_dict('YMIN')))\n # yrange = float(\"\".join(ProcessSpectra.get_from_dict('YWID')))\n\n frwidth = 1000/(x_axis[0])\n frinc = frwidth/(len(self.zdata))\n freq = np.arange(-frwidth, frwidth, frinc*2)\n xdata = freq\n ydata = freq\n\n return xdata, ydata",
"def test_lifted_index():\n pressure = np.array([1014., 1000., 997., 981.2, 947.4, 925., 914.9, 911.,\n 902., 883., 850., 822.3, 816., 807., 793.2, 770.,\n 765.1, 753., 737.5, 737., 713., 700., 688., 685.,\n 680., 666., 659.8, 653., 643., 634., 615., 611.8,\n 566.2, 516., 500., 487., 484.2, 481., 475., 460.,\n 400.]) * units.hPa\n temperature = np.array([24.2, 24.2, 24., 23.1, 21., 19.6, 18.7, 18.4,\n 19.2, 19.4, 17.2, 15.3, 14.8, 14.4, 13.4, 11.6,\n 11.1, 10., 8.8, 8.8, 8.2, 7., 5.6, 5.6,\n 5.6, 4.4, 3.8, 3.2, 3., 3.2, 1.8, 1.5,\n -3.4, -9.3, -11.3, -13.1, -13.1, -13.1, -13.7, -15.1,\n -23.5]) * units.degC\n dewpoint = np.array([23.2, 23.1, 22.8, 22., 20.2, 19., 17.6, 17.,\n 16.8, 15.5, 14., 11.7, 11.2, 8.4, 7., 4.6,\n 5., 6., 4.2, 4.1, -1.8, -2., -1.4, -0.4,\n -3.4, -5.6, -4.3, -2.8, -7., -25.8, -31.2, -31.4,\n -34.1, -37.3, -32.3, -34.1, -37.3, -41.1, -37.7, -58.1,\n -57.5]) * units.degC\n parcel_prof = parcel_profile(pressure, temperature[0], dewpoint[0])\n li = lifted_index(pressure, temperature, parcel_prof)\n assert_almost_equal(li, -7.9115691 * units.delta_degree_Celsius, 2)",
"def superposition(self):\r\n superpos_array = [[0,0,0],[0,0,0],[0,0,0]]\r\n #check normalised:\r\n n = sum(self.block_weights)\r\n if n != 1:\r\n #normalise here if required\r\n self.block_weights = [x/n for x in self.block_weights]\r\n o = self.block_opts\r\n w = self.block_weights\r\n for i in range(TILE_SIZE):\r\n for j in range(TILE_SIZE):\r\n for k in range(len(o)):\r\n superpos_array[j][i] += 254*get_blocks(o[k])[j][i]*w[k] \r\n \r\n return superpos_array\r\n \r\n #propgate changes out\r",
"def m2_m3_make_lower_shape_points_list(dx, dy, m_info, SEN_info):\n \"\"\"\n 1 Get information from m_info & SEN_info.\n \"\"\"\n x_m = m_info[0]\n y_m = m_info[1]\n z_m = m_info[2]\n\n m_points = m_info[3]\n\n m_p0 = m_points[0]\n m_p1 = m_points[1]\n m_p2 = m_points[2]\n m_p3 = m_points[3]\n\n w_sen = SEN_info[0]\n n_w_sen = SEN_info[1]\n h_sen = SEN_info[2]\n t_sen = SEN_info[3]\n l_n = SEN_info[4]\n r_n = SEN_info[5]\n set = SEN_info[6]\n l_offset = SEN_info[7]\n r_offset = SEN_info[8]\n\n \"\"\"\n 2 Make lists.\n lower_shape_left_upper_row list\n lower_shape_left_lower_row list\n\n lower_shape_right_upper_row list\n lower_shape_right_lower_row list\n \"\"\"\n # Leftside\n lower_shape_left_upper_row = []\n lower_shape_left_lower_row = []\n\n for i in range(l_n):\n # upper row\n ix = i * l_offset + set\n iy = y_m - t_sen + dy\n p0, p1, p2, p3, p4, p5, p6, p7, p8 = X_lower_shape_points(ix, iy, w_sen, t_sen, n_w_sen)\n upper_points = [p1, p2, p3, p4, p5, p6, p7, p8]\n lower_shape_left_upper_row.extend((upper_points))\n\n for i in range(l_n - 1, -1, -1):\n # lower row\n ix = i * l_offset + set\n iy = t_sen + dy\n p0, p1, p2, p3, p4, p5, p6, p7, p8 = X_lower_shape_points(ix, iy, w_sen, t_sen, n_w_sen)\n lower_points = [p5, p6, p7, p8, p1, p2, p3, p4]\n lower_shape_left_lower_row.extend(lower_points)\n\n # Rightside\n lower_shape_right_upper_row = []\n lower_shape_right_lower_row = []\n\n for i in range(r_n):\n # upper row\n ix = x_m - i * r_offset - set\n iy = y_m - t_sen + dy\n p0, p1, p2, p3, p4, p5, p6, p7, p8 = X_lower_shape_points(ix, iy, w_sen, t_sen, n_w_sen)\n upper_points = [p8, p7, p6, p5, p4, p3, p2, p1]\n lower_shape_right_upper_row.extend((upper_points))\n\n for i in range(r_n - 1, -1, -1):\n # lower row\n ix = x_m - i * r_offset - set\n iy = t_sen + dy\n p0, p1, p2, p3, p4, p5, p6, p7, p8 = X_lower_shape_points(ix, iy, w_sen, t_sen, n_w_sen)\n lower_points = [p4, p3, p2, p1, p8, p7, p6, p5]\n lower_shape_right_lower_row.extend(lower_points)\n\n lower_shape_left = [lower_shape_left_upper_row, lower_shape_left_lower_row]\n lower_shape_right = [lower_shape_right_upper_row, lower_shape_right_lower_row]\n\n return lower_shape_left, lower_shape_right",
"def __init__(self, model, line, segments = None,head_target = 0,\r\n variables = [], priors=[]):\r\n\r\n import numpy as np\r\n from scipy.interpolate import interp1d\r\n import copy\r\n \r\n # Append this element to the specified model\r\n self.model = model\r\n model.elementlist.append(self)\r\n model.linear_solver = True\r\n\r\n # ---------------------------------------------------------------------\r\n # Subdivide the provided no flow boundary into segments pieces\r\n \r\n # Complexify the line, if it wasn't already complex\r\n line = self.complexify(line)\r\n \r\n # The subdivision algorith requires the line coordinates as a real N-by-2 matrix\r\n line = np.column_stack((\r\n np.real(line)[:,np.newaxis],\r\n np.imag(line)[:,np.newaxis]))\r\n \r\n self.line_raw = copy.copy(line)\r\n if segments is None:\r\n self.segments = line.shape[0]-1\r\n else:\r\n self.segments = segments\r\n \r\n if self.segments < self.line_raw.shape[0]-1:\r\n raise Exception('Prescribed number of line segments '+str(self.segments)+\" mustn't be smaller than base number of segments \"+str(line.shape[0]-1)+'.')\r\n \r\n if self.segments > self.line_raw.shape[0]-1:\r\n \r\n # Subdivide the line\r\n self.line = self.subdivide_line(line,self.segments)\r\n self.line_c = self.line[:,0] + 1j*self.line[:,1]\r\n \r\n else:\r\n \r\n self.line = self.line_raw.copy()\r\n self.line_c = self.line[:,0] + 1j*self.line[:,1]\r\n \r\n # Also get the normal vector components to each segment\r\n self.line_nvec = self.line[:,1] - 1j*self.line[:,0]\r\n self.line_nvec = self.line_nvec/np.abs(self.line_nvec)\r\n\r\n # ---------------------------------------------------------------------\r\n \r\n # Get strength parameters for each vertex\r\n self.strength = np.ones(self.segments)\r\n \r\n \r\n self.zc = []\r\n self.segment_nvec = []\r\n self.L = []\r\n \r\n for seg in range(self.segments):\r\n \r\n self.zc += [(self.line_c[seg]+self.line_c[seg+1])/2]\r\n \r\n # Calculate the normal vector to this segment\r\n self.segment_nvec += [(self.line_c[seg]-self.line_c[seg+1])]\r\n self.segment_nvec[-1]= [np.imag(self.segment_nvec[-1])-1j*np.real(self.segment_nvec[-1])]\r\n \r\n self.L += [np.abs(self.line_c[seg+1] - self.line_c[seg])]\r\n \r\n self.zc = np.asarray(self.zc)\r\n \r\n # Extract target variables\r\n self.variables = variables\r\n self.priors = priors\r\n \r\n self.L = np.asarray(self.L)\r\n \r\n \r\n # Check if the prior matches the number of parameters\r\n if len(self.priors) != len(self.variables):\r\n raise Exception('Number of priors must match number of unknown variables. Number of priors: '+str(self.priors)+' / Number of unknown variables: '+str(len(self.variables)))\r\n \r\n # Go through all elements\r\n if len(self.variables) > 0:\r\n # There are some model variables specified\r\n for idx,var in enumerate(self.variables):\r\n self.model.num_params += 1\r\n exec(\"self.model.params += [self.%s]\" % var)\r\n self.model.priors += [self.priors[idx]]\r\n self.model.variables += [var]\r\n if 'name' in list(self.priors[idx].keys()):\r\n self.model.param_names += [self.priors[idx]['name']] \r\n else: \r\n self.model.param_names += ['unknown']",
"def ghost_points(self):\n return self.central.loffset, self.central.roffset",
"def MINET(self):",
"def direction(self):\n import pylab\n i = 0\n j = 0\n vals = []\n vects = []\n kpx = self.keypoints.x\n kpy = self.keypoints.y\n sigma = self.keypoints.sigma\n img = self.raw\n pylab.figure()\n pylab.imshow(img, interpolation='nearest')\n\n for y, x, s in zip(kpy, kpx, sigma):\n s_patch = numpy.trunc(s * 2)\n\n if s_patch % 2 == 0 :\n s_patch += 1\n\n if s_patch < 3 : s_patch = 3\n\n if (x > s_patch / 2 and x < img.shape[1] - s_patch / 2 - 1 and y > s_patch / 2 and y < img.shape[0] - s_patch / 2):\n\n patch = img[y - (s_patch - 1) / 2:y + (s_patch - 1) / 2 + 1, x - (s_patch - 1) / 2:x + (s_patch - 1) / 2 + 1]\n x_patch = numpy.arange(s_patch)\n Gx = numpy.exp(-4 * numpy.log(2) * (x_patch - numpy.median(x_patch)) ** 2 / s)\n Gy = Gx[:, numpy.newaxis]\n dGx = -Gx * 4 * numpy.log(2) / s * 2 * (x_patch - numpy.median(x_patch))\n dGy = dGx[:, numpy.newaxis]\n d2Gx = -8 * numpy.log(2) / s * ((x_patch - numpy.median(x_patch)) * dGx + Gx)\n d2Gy = d2Gx[:, numpy.newaxis]\n\n Hxx = d2Gx * Gy\n Hyy = d2Gy * Gx\n Hxy = dGx * dGy\n\n d2x = (Hxx.ravel() * patch.ravel()).sum()\n d2y = (Hyy.ravel() * patch.ravel()).sum()\n dxy = (Hxy.ravel() * patch.ravel()).sum()\n H = numpy.array([[d2y, dxy], [dxy, d2x]])\n val, vect = numpy.linalg.eig(H)\n\n# print 'new point'\n# print x, y\n# print val\n# print vect\n# print numpy.dot(vect[0],vect[1])\n e = numpy.abs(val[0] - val[1]) / numpy.abs(val[0] + val[1])\n j += 1\n# print j\n# print e\n if numpy.abs(val[1]) < numpy.abs(val[0]): # reorganisation des valeurs propres et vecteurs propres\n val[0],val[1] = val[1],val[0]\n vect = vect[-1::-1,:]\n\n\n pylab.annotate(\"\", xy=(x + vect[0][0] * val[0], y + vect[0][1] * val[0]), xytext=(x, y),\n arrowprops=dict(facecolor='red', shrink=0.05),)\n\n pylab.annotate(\"\", xy=(x + vect[1][0] * val[1], y + vect[1][1] * val[1]), xytext=(x, y),\n arrowprops=dict(facecolor='red', shrink=0.05),)\n pylab.plot(x, y, 'og')\n vals.append(val)\n vects.append(vect)\n return vals, vects",
"def oss_stacked(block, cut, laser):\r\n\tx0_1, x1_1, z0_1, taper_x_1, taper_y_1, layers_1, pyramid_angle_1 = oss_helper(block, cut, laser, cut[\"final_dimension_x\"]/2)\r\n\tx0_2, x1_2, z0_2, taper_x_2, taper_y_2, layers_2, pyramid_angle_2 = oss_helper(block, cut, laser, cut[\"final_dimension_y\"]/2)\r\n\tangle = math.radians(laser[\"kerf_angle\"]/2)\r\n\tgap = math.tan(pyramid_angle_1) * (cut[\"final_dimension_x\"]/2) + cut[\"gap_size\"]\r\n\tunit_length = gap + cut[\"base_height\"]\r\n\tmax_slices = math.floor(block[\"thickness\"]/unit_length)\r\n\ttaper_straight = math.tan(angle)*(laser[\"z_spacing\"])\r\n\r\n\tif cut[\"core\"] == \"yes\":\r\n\t\tcutlist = json.loads(vertical_core(block,cut,laser))\r\n\t\tcutlist.pop()\r\n\t\tcutlist.pop(0)\r\n\telse:\r\n\t\tcutlist = []\r\n\r\n\ta0 = -(90 + math.degrees(angle))\r\n\r\n\tz_shift = (cut[\"base_height\"] + gap) * math.sin(angle)\r\n\tx_shift = (cut[\"base_height\"] + gap) * math.cos(angle)\r\n\r\n\tx_delta = math.sin(angle) * block[\"origin_x\"]\r\n\ty_delta = math.sin(angle) * block[\"origin_y\"]\r\n\tz1_delta = math.cos(angle) * block[\"origin_x\"]\r\n\tz2_delta = math.cos(angle) * block[\"origin_y\"]\r\n\r\n\tcutlist.append([\"a_abs\",f\"{a0:.6f}\"])\r\n\tcutlist.append([\"c_abs\",str(block[\"physical_rotation\"])])\r\n\tcutlist.append([\"z_abs\",str(z0_1 + z2_delta)])\r\n\r\n\tif pyramid_angle_1 >= angle and pyramid_angle_2 >= angle:\r\n\r\n\t\tif cut[\"num_of_seeds\"] == \"max\":\r\n\t\t\tnum_slices = max_slices\r\n\t\telse:\r\n\t\t\tnum_slices = cut[\"num_of_seeds\"] + 1\r\n\t\t\r\n\t\tfor i in range(num_slices):\r\n\t\t\tcutlist = (cutlist\r\n\t\t\t\t\t\t + pyramid_slice(cut[\"final_dimension_y\"]/2 - block[\"origin_x\"],x0_1 + y_delta,-cut[\"final_dimension_y\"]/2 - block[\"origin_x\"],x1_1 + y_delta,z0_1 + block[\"origin_y\"],laser[\"xy_spacing\"], laser[\"z_spacing\"], taper_x_1,taper_y_1,taper_straight,layers_1)\r\n\t\t\t\t\t\t + [[\"z_abs\",str(z0_2 + z1_delta)]] + [[\"c_abs\",\"90\"]]\r\n\t\t\t\t\t\t + pyramid_slice(cut[\"final_dimension_x\"]/2 + block[\"origin_y\"],x0_2 + x_delta,-cut[\"final_dimension_x\"]/2 + block[\"origin_y\"],x1_2 + x_delta,z0_2 + block[\"origin_x\"],laser[\"xy_spacing\"], laser[\"z_spacing\"], taper_x_2,taper_y_2,taper_straight,layers_2)\r\n\t\t\t\t\t\t + [[\"z_abs\",str(z0_1 - z2_delta)]] + [[\"c_abs\",\"180\"]]\r\n\t\t\t\t\t\t + pyramid_slice(cut[\"final_dimension_y\"]/2 + block[\"origin_x\"],x0_1 - y_delta,-cut[\"final_dimension_y\"]/2 + block[\"origin_x\"],x1_1 - y_delta,z0_1 - block[\"origin_y\"],laser[\"xy_spacing\"], laser[\"z_spacing\"], taper_x_1,taper_y_1,taper_straight,layers_1)\r\n\t\t\t\t\t\t + [[\"z_abs\",str(z0_2 - z1_delta)]] + [[\"c_abs\",\"270\"]]\r\n\t\t\t\t\t\t + pyramid_slice(cut[\"final_dimension_x\"]/2 - block[\"origin_y\"],x0_2 - x_delta,-cut[\"final_dimension_x\"]/2 - block[\"origin_y\"],x1_2 - x_delta,z0_2 - block[\"origin_x\"],laser[\"xy_spacing\"], laser[\"z_spacing\"], taper_x_2,taper_y_2,taper_straight,layers_2)\r\n\t\t\t\t\t\t )\r\n\t\t\tz0_1 = z0_1 + z_shift\r\n\t\t\tz0_2 = z0_2 + z_shift\r\n\t\t\tx0_1, x1_1, x0_2, x1_2 = x0_1 - x_shift, x1_1 - x_shift, x0_2 - x_shift, x1_2 - x_shift\r\n\t\t\tcutlist.append([\"c_abs\",str(block[\"physical_rotation\"])])\r\n\t\t\tcutlist.append([\"z_abs\",str(z0_1 + z2_delta)])\t\r\n\telse:\r\n\t\traise Exception(\"Pyramid angle too small\")\r\n\r\n\tcutlist.insert(0, [\"set_trigger4\", \"1\", \"0\", \"7\", \"8\", \"45\"])\r\n\tcutlist.append([\"stop_trigger\"])\r\n\treturn json.dumps(cutlist)",
"def F_subset_S5PCH4(self,path,if_trop_xch4=False,s5p_product='RPRO'): \n from scipy.interpolate import interp1d\n # find out list of l2 files to subset\n if os.path.isfile(path):\n self.F_update_popy_with_control_file(path)\n l2_list = self.l2_list\n l2_dir = self.l2_dir\n else:\n import glob\n l2_dir = path\n l2_list = []\n cwd = os.getcwd()\n os.chdir(l2_dir)\n start_date = self.start_python_datetime.date()\n end_date = self.end_python_datetime.date()\n days = (end_date-start_date).days+1\n DATES = [start_date + datetime.timedelta(days=d) for d in range(days)]\n for DATE in DATES:\n flist = glob.glob('S5P_'+s5p_product+'_L2__CH4____'+DATE.strftime(\"%Y%m%d\")+'T*.nc')\n l2_list = l2_list+flist\n \n os.chdir(cwd)\n self.l2_dir = l2_dir\n self.l2_list = l2_list\n \n #maxsza = self.maxsza \n #maxcf = self.maxcf\n west = self.west\n east = self.east\n south = self.south\n north = self.north\n min_qa_value = self.min_qa_value\n \n # absolute path of useful variables in the nc file\n data_fields = ['/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds',\\\n '/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds',\\\n '/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/solar_zenith_angle',\\\n '/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/viewing_zenith_angle',\\\n '/PRODUCT/latitude',\\\n '/PRODUCT/longitude',\\\n '/PRODUCT/qa_value',\\\n '/PRODUCT/time',\\\n '/PRODUCT/delta_time',\\\n '/PRODUCT/methane_mixing_ratio',\\\n '/PRODUCT/methane_mixing_ratio_bias_corrected',\\\n '/PRODUCT/methane_mixing_ratio_precision'] \n # standardized variable names in l2g file. should map one-on-one to data_fields\n data_fields_l2g = ['latitude_bounds','longitude_bounds','SolarZenithAngle',\\\n 'vza','latc','lonc','qa_value','time','delta_time',\\\n 'column_amount_no_bias_correction','column_amount','column_uncertainty']\n if if_trop_xch4:\n # absolute path of useful variables in the nc file\n data_fields = ['/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds',\\\n '/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds',\\\n '/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/solar_zenith_angle',\\\n '/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/viewing_zenith_angle',\\\n '/PRODUCT/SUPPORT_DATA/INPUT_DATA/dry_air_subcolumns',\\\n '/PRODUCT/SUPPORT_DATA/INPUT_DATA/surface_pressure',\\\n '/PRODUCT/SUPPORT_DATA/INPUT_DATA/pressure_interval',\\\n '/PRODUCT/SUPPORT_DATA/INPUT_DATA/methane_profile_apriori',\\\n '/PRODUCT/latitude',\\\n '/PRODUCT/longitude',\\\n '/PRODUCT/qa_value',\\\n '/PRODUCT/time',\\\n '/PRODUCT/delta_time',\\\n '/PRODUCT/methane_mixing_ratio',\\\n '/PRODUCT/methane_mixing_ratio_bias_corrected',\\\n '/PRODUCT/methane_mixing_ratio_precision'] \n # standardized variable names in l2g file. should map one-on-one to data_fields\n data_fields_l2g = ['latitude_bounds','longitude_bounds','SolarZenithAngle',\\\n 'vza','dry_air_subcolumns','surface_pressure','pressure_interval',\n 'methane_profile_apriori','latc','lonc','qa_value','time','delta_time',\\\n 'column_amount_no_bias_correction','column_amount','column_uncertainty']\n self.logger.info('Read, subset, and store level 2 data to l2g_data')\n self.logger.info('Level 2 data are located at '+l2_dir)\n l2g_data = {}\n for fn in l2_list:\n fn_dir = l2_dir+fn\n self.logger.info('Loading '+fn)\n outp_nc = self.F_read_S5P_nc(fn_dir,data_fields,data_fields_l2g)\n if if_trop_xch4:\n sounding_interp = F_interp_geos_mat(outp_nc['lonc'],outp_nc['latc'],outp_nc['UTC_matlab_datenum'],\\\n geos_dir='/mnt/Data2/GEOS/s5p_interp/',\\\n interp_fields=['TROPPT'])\n outp_nc['TROPPT'] = sounding_interp['TROPPT']\n #f1 = outp_nc['SolarZenithAngle'] <= maxsza\n #f2 = outp_nc['cloud_fraction'] <= maxcf\n # ridiculously, qa_value has a scale_factor of 0.01. so error-prone\n f3 = outp_nc['qa_value'] >= min_qa_value \n f4 = outp_nc['latc'] >= south\n f5 = outp_nc['latc'] <= north\n tmplon = outp_nc['lonc']-west\n tmplon[tmplon < 0] = tmplon[tmplon < 0]+360\n f6 = tmplon >= 0\n f7 = tmplon <= east-west\n f8 = outp_nc['UTC_matlab_datenum'] >= self.start_matlab_datenum\n f9 = outp_nc['UTC_matlab_datenum'] <= self.end_matlab_datenum\n validmask = f3 & f4 & f5 & f6 & f7 & f8 & f9\n self.logger.info('You have '+'%s'%np.sum(validmask)+' valid L2 pixels')\n l2g_data0 = {}\n if np.sum(validmask) == 0:\n continue\n # yep it's indeed messed up\n Lat_lowerleft = np.squeeze(outp_nc['latitude_bounds'][:,:,0])[validmask]\n Lat_upperleft = np.squeeze(outp_nc['latitude_bounds'][:,:,3])[validmask]\n Lat_lowerright = np.squeeze(outp_nc['latitude_bounds'][:,:,1])[validmask]\n Lat_upperright = np.squeeze(outp_nc['latitude_bounds'][:,:,2])[validmask]\n Lon_lowerleft = np.squeeze(outp_nc['longitude_bounds'][:,:,0])[validmask]\n Lon_upperleft = np.squeeze(outp_nc['longitude_bounds'][:,:,3])[validmask]\n Lon_lowerright = np.squeeze(outp_nc['longitude_bounds'][:,:,1])[validmask]\n Lon_upperright = np.squeeze(outp_nc['longitude_bounds'][:,:,2])[validmask]\n l2g_data0['latr'] = np.column_stack((Lat_lowerleft,Lat_upperleft,Lat_upperright,Lat_lowerright))\n l2g_data0['lonr'] = np.column_stack((Lon_lowerleft,Lon_upperleft,Lon_upperright,Lon_lowerright))\n for key in outp_nc.keys():\n if key not in {'latitude_bounds','longitude_bounds','time_utc','time','delta_time'}:\n l2g_data0[key] = outp_nc[key][validmask]\n if if_trop_xch4:\n # calculate trop xch4 using l2g_data0\n l2g_data0['air_column_strat'] = np.zeros(l2g_data0['latc'].shape)\n l2g_data0['air_column_total'] = np.zeros(l2g_data0['latc'].shape)\n l2g_data0['methane_ap_column_strat'] = np.zeros(l2g_data0['latc'].shape)\n for il2 in range(len(l2g_data0['latc'])):\n cum_air = np.concatenate(([0.],np.cumsum(l2g_data0['dry_air_subcolumns'][il2,].squeeze())))\n cum_methane = np.concatenate(([0.],np.cumsum(l2g_data0['methane_profile_apriori'][il2,].squeeze())))\n # model top is 10 Pa, 12 layers, 13 levels\n plevel = 10.+np.arange(0,13)*l2g_data0['pressure_interval'][il2]\n tropp = l2g_data0['TROPPT'][il2]\n l2g_data0['air_column_total'][il2] = np.sum(l2g_data0['dry_air_subcolumns'][il2,])\n f = interp1d(plevel,cum_air)\n l2g_data0['air_column_strat'][il2] = f(tropp)\n f = interp1d(plevel,cum_methane)\n l2g_data0['methane_ap_column_strat'][il2] = f(tropp)\n del l2g_data0['dry_air_subcolumns']\n del l2g_data0['methane_profile_apriori'] \n l2g_data = self.F_merge_l2g_data(l2g_data,l2g_data0)\n self.l2g_data = l2g_data\n if not l2g_data:\n self.nl2 = 0\n else:\n self.nl2 = len(l2g_data['latc'])",
"def IR():\n s = np.array(\n [2.40774137,2.287696084,2.203613927,2.048710132,1.899829585,1.591776247,\n 2.021218754,2.572949552,3.298381484,3.635993426,3.788266224,3.8307278,3.834208811]\n )\n\n TI = np.array([50, 75, 100, 150, 200, 300, 400, 500, 750, 1000, 1500, 2000, 3000])\n\n comp1 = s * np.array([-159.1,-134.2,-109.1,-64.7,25.0,40.1,88.6,126.8,187.6,219.4,245.4,253.6,256.1])\n comp2 = s * np.array([-368.3,-356.9,-343.8,-318.1,-292.0,-242.5,-199.3,-158.4,-68.8,14.2,131.9,219.5,333.5])\n comp3 = s * np.array([-77.5,-51.9,-29.8,9.9,40.2,85.7,115.4,135.1,160.1,167.6,172.3,171.7,171.8])\n comp4 = s * np.array([-265.0,-240.6,-216.7,-170.5,-128.2,-53.5,9.6,62.3,159.7,223.8,296.5,328.3,346.7])\n comp5 = s * np.array([-346.5,-328.9,-312.1,-278.5,-244.4,-182.3,-128.0,-80.0,30.8,109.3,225.1,299.5,372.2])\n\n comp = [comp1, comp2, comp3, comp4, comp5]\n MSE = []\n colors = [\"#1f77b4\", \"#ff7f0e\", \"#2ca02c\", \"#d62728\", \"#9467bd\"]\n x_new = np.linspace(0, 3000, 10000)\n for i, j, k in zip(comp, colors, np.arange(1, 6)):\n plt.scatter(TI, i, c=j)\n # popt, _ = curve_fit(MZ, TI, i, p0=np.array([200, 220, 300]))\n popt, _ = curve_fit(MZ, TI, i, p0=np.array([300, 220]))\n # M_z0, T1, M0 = popt\n M0, T1 = popt\n y_new = MZ(x_new, *popt)\n plt.plot(x_new, y_new, \"--\", c=j, label=f\"Fit Comp. {k:d} : $T_1$={T1:3.2f}\")\n MSE.append(mean_squared_error(i,y_new[TI]))\n print(MSE)\n print(np.mean(MSE))\n plt.grid()\n plt.legend(loc=\"best\")\n plt.xlabel(\"TI\")\n plt.ylabel(r\"Singal Intensity $M_z$\")\n plt.show()",
"def calc_line(tik_instance, grads, y, grads_h,\n loc_h, loc_w, n_index, start_c1, end_c1):\n in_w = grads.shape[3]\n out_w = y.shape[3]\n grads_ub = tik_instance.Tensor(\n \"float32\", [in_w, 16], name=\"grads_ub\", scope=tik.scope_ubuf)\n y_ub = tik_instance.Tensor(\n \"float32\", [out_w, 16], name=\"y_ub\", scope=tik.scope_ubuf)\n loc_reg = tik_instance.Scalar(dtype=\"int32\")\n c1_reg = tik_instance.Scalar(dtype=\"int32\")\n\n calc_c1_num = end_c1 - start_c1\n with tik_instance.for_range(0, calc_c1_num) as c1_index:\n # read one line grads\n c1_reg.set_as(start_c1 + c1_index)\n tik_instance.tensor_mov(grads_ub,\n grads[n_index, c1_reg, grads_h, 0, 0],\n '', 1, (in_w * 16 * 4 + 31) // 32, 0, 0)\n # clear out ub\n clear_ub(tik_instance, y_ub)\n\n with tik_instance.for_range(0, in_w) as i:\n loc_reg.set_as(loc_w[i])\n tik_instance.vadd(16, y_ub[loc_reg, 0], y_ub[loc_reg, 0],\n grads_ub[i, 0], 1, 1, 1, 1, 0, 0, 0)\n\n # move data out\n tik_instance.set_atomic_add(1)\n tik_instance.tensor_mov(y[n_index, start_c1 + c1_index, loc_h, 0, 0],\n y_ub, '', 1,\n (out_w * 16 * 4 + 31) // 32, 0, 0)\n tik_instance.set_atomic_add(0)",
"def lat_lons(self):",
"def loadData(fname='Unstra.out2.00008.athdf'):\n #data=ath.athdf(fname,quantities=['B1','B2','B3'])\n time,data=ath.athdf(fname,quantities=['Bcc1'])\n bx = data['Bcc1']\n time,data=ath.athdf(fname,quantities=['Bcc2'])\n by = data['Bcc2']\n time,data=ath.athdf(fname,quantities=['Bcc3'])\n bz = data['Bcc3']\n x = data['x1f']\n y = data['x2f']\n z = data['x3f']\n # refinement\n rfac = 1.0\n ##if bx.shape[0] < 512:\n ## nz,ny,nx = bx.shape\n ## rfac = int(512/bx.shape[0])\n ## bx = np.repeat(bx,rfac,axis=0)\n ## bx = np.repeat(bx,rfac,axis=1)\n ## bx = np.repeat(bx,rfac,axis=2)\n ## by = np.repeat(by,rfac,axis=0)\n ## by = np.repeat(by,rfac,axis=1)\n ## by = np.repeat(by,rfac,axis=2)\n ## bz = np.repeat(bz,rfac,axis=0)\n ## bz = np.repeat(bz,rfac,axis=1)\n ## bz = np.repeat(bz,rfac,axis=2)\n # ---\n def curl(vx,vy,vz,dx,dy,dz):\n [dzvx,dyvx,dxvx] = np.gradient(vx)\n [dzvy,dyvy,dxvy] = np.gradient(vy)\n [dzvz,dyvz,dxvz] = np.gradient(vz)\n cx = dyvz/dy-dzvy/dz\n cy = dzvx/dz-dxvz/dx\n cz = dxvy/dx-dyvx/dy\n # No need to del the reference by one manually\n # allow python to perform its own garbage collection\n # after the function return cxyz\n #del dzvx\n #del dzvy\n #del dzvz\n return cx,cy,cz\n # ---\n dx = dz = (x[1]-x[0])/rfac\n dy = (y[1]-y[0])/rfac\n jx,jy,jz = curl(bx,by,bz,dx,dy,dz)\n j2 = jx**2+jy**2+jz**2\n return j2",
"def __init__(self):\n self.lattices = []\n self.meshfns = []",
"def getCl(filename):\n powSpec = pf.getdata(filename,1)\n temps = powSpec.field('TEMPERATURE')\n ell = np.arange(temps.size)\n return ell,temps",
"def __init__(self, **kwargs):\n super(VeryCleverBeamsplitter, self).__init__(**kwargs)\n self.shader_source = IL_SHADER_SOURCE\n self.centre = [0.5, 0.5]\n self.blazing_function = np.linspace(0,1,32)\n self.zernike_coefficients = np.zeros(12)"
] | [
"0.57638854",
"0.5657205",
"0.56363416",
"0.56297696",
"0.55647045",
"0.54962194",
"0.5491665",
"0.5449147",
"0.5436916",
"0.543175",
"0.541064",
"0.5409078",
"0.54073215",
"0.5384243",
"0.5368848",
"0.5359744",
"0.5338027",
"0.53308344",
"0.53248966",
"0.53210336",
"0.5317864",
"0.5317378",
"0.53097457",
"0.5306673",
"0.52744174",
"0.5270277",
"0.52656543",
"0.5264646",
"0.5257003",
"0.5253003",
"0.52487034",
"0.52433264",
"0.52301234",
"0.5221312",
"0.52150756",
"0.52134806",
"0.5212303",
"0.5212255",
"0.52101684",
"0.5207466",
"0.52046144",
"0.51975715",
"0.5194387",
"0.5189362",
"0.5179987",
"0.5175997",
"0.51740354",
"0.5164965",
"0.5162868",
"0.5157629",
"0.5155251",
"0.51495653",
"0.51334304",
"0.51334274",
"0.51329017",
"0.5130825",
"0.51301605",
"0.5120995",
"0.5120274",
"0.51150393",
"0.5114226",
"0.5109395",
"0.51078904",
"0.5107364",
"0.51028305",
"0.5102354",
"0.51020724",
"0.5101551",
"0.50985986",
"0.5093038",
"0.5089062",
"0.5084511",
"0.50789887",
"0.50771743",
"0.50767756",
"0.5073803",
"0.5071874",
"0.50638473",
"0.5063318",
"0.5061446",
"0.5059666",
"0.5054223",
"0.5052023",
"0.5051174",
"0.5050999",
"0.50503397",
"0.5042965",
"0.50399774",
"0.50337434",
"0.5032237",
"0.5027485",
"0.5025207",
"0.5023924",
"0.5023065",
"0.5022317",
"0.5021935",
"0.5019727",
"0.5017652",
"0.50133723",
"0.5013027"
] | 0.5448095 | 8 |
Writes tao LEM lines to a .tao file. Requires epics (or proxy object). | def write_tao_BC_and_LEM_lines(filePath='LEM_settings.tao', epics=None, verbose=False):
lines = tao_BC_and_LEM_lines(epics)
with open(filePath, 'w') as f:
for l in lines:
f.write(l+'\n')
if verbose:
print('Written:', filePath)
return lines | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def format_tep_file_lines(otu_table_data, mapping_lines, tree_lines,\r\n prefs_dict):\r\n\r\n # write tree file lines\r\n lines = ['>>tre\\n']\r\n lines += [tree_lines.read()]\r\n lines += '\\n'\r\n\r\n # get otu table data\r\n if(otu_table_data.ObservationMetadata):\r\n lines += ['>>otm\\n#OTU ID\\tOTU Metadata\\n']\r\n for i in range(len(otu_table_data.ObservationIds)):\r\n new_string = otu_table_data.ObservationIds[i] + '\\t'\r\n for m in otu_table_data.ObservationMetadata[i]['taxonomy']:\r\n new_string += m + ';'\r\n lines += [new_string]\r\n lines += '\\n'\r\n\r\n # format and write otu table and taxonomy lines\r\n lines += ['>>osm\\n']\r\n if otu_table_data.ObservationMetadata is None:\r\n lines += [str(otu_table_data.delimitedSelf())]\r\n elif \"taxonomy\" in otu_table_data.ObservationMetadata[0]:\r\n lines += [str(otu_table_data.delimitedSelf(header_key=\"taxonomy\",\r\n header_value=\"Consensus Lineage\",\r\n metadata_formatter=lambda x: ';'.join(x)))]\r\n\r\n # write mapping file lines\r\n lines += ['\\n>>sam\\n']\r\n lines += mapping_lines.readlines()\r\n\r\n # if prefs file supplied, write pref lines\r\n if prefs_dict:\r\n te_prefs = format_te_prefs(prefs_dict)\r\n lines += ['\\n>>pre\\n']\r\n lines += te_prefs\r\n\r\n return lines",
"def createTOFin(En):\n ftemplate = open(\"TOFtemplate.in\", \"r\")\n lines = ftemplate.readlines()\n ftofin = open(\"TOF.in\", \"w\") \n energyline = lines[12].split()\n lines[12] = \"%s %g %s\\n\"%(energyline[0], En, energyline[2])\n ftofin.writelines(lines)\n ftemplate.close()\n ftofin.close()",
"def write_bmad_linac_phasing_lines(filePath='linac_settings.bmad', epics=None, verbose=False):\n lines = bmad_linac_phasing_lines(epics)\n with open(filePath, 'w') as f:\n for l in lines:\n f.write(l+'\\n')\n if verbose:\n print('Written:', filePath)",
"def tao_BC_and_LEM_lines(epics):\n bc1_e0=epics.caget('SIOC:SYS0:ML00:AO483')*1e6\n bc2_e0=epics.caget('SIOC:SYS0:ML00:AO489')*1e9\n l3_e0 =epics.caget('SIOC:SYS0:ML00:AO500')*1e9\n \n # Charge in LTU\n q_after_horn_cutting = epics.caget('SIOC:SYS0:ML00:CALC252')*1e-12 # pC -> C\n bc1_offset=epics.caget('BMLN:LI21:235:MOTR')*1e-3\n bc2_offset=epics.caget('BMLN:LI24:805:MOTR')*1e-3\n \n bc1_current=epics.caget('SIOC:SYS0:ML00:AO485')\n bc2_current=epics.caget('SIOC:SYS0:ML00:AO195')\n \n # Catch bad settings\n if bc1_current==0:\n print('Warning: BC1 current is zero!')\n bc1_sigma_z = 0\n else:\n # Assumes parabolic distribution\n bc1_sigma_z = q_after_horn_cutting*299792458 / sqrt(10) / bc1_current\n\n if bc2_current==0:\n print('Warning: BC1 current is zero!')\n bc2_sigma_z = 0\n else:\n # Assumes Gaussian distribution\n bc2_sigma_z = q_after_horn_cutting*299792458 / sqrt(12) / bc2_current \n \n lines = []\n lines.append('set dat BC1.energy[1]|meas = '+str(bc1_e0))\n lines.append('set dat BC2.energy[1]|meas = '+str(bc2_e0))\n lines.append('set dat L3.energy[2]|meas = '+str(l3_e0))\n lines.append('set dat BC1.offset[1]|meas = '+str(bc1_offset))\n lines.append('set dat BC2.offset[1]|meas = '+str(bc2_offset))\n \n lines.append(f'! Charge after horn cutting: {q_after_horn_cutting*1e12:10.4} pC')\n lines.append(f'! For BC1 current {bc1_current} A')\n lines.append('set dat BC1.beam[1]|meas = '+str( bc1_sigma_z))\n lines.append(f'! For BC2 current {bc2_current} A')\n lines.append('set dat BC2.beam[1]|meas = '+str( bc2_sigma_z)) \n\n return lines",
"def lines_to_file(file_name: str, write_dir: str, lines: Sequence[str]):\n with open(os.path.join(write_dir, file_name), \"w\", encoding=\"utf-8\") as f:\n for l in lines:\n f.write(f\"{l}\\n\")",
"def save_to_file(self, tojuliet):\n if self.lc.time[0] < 1e4:\n self.lc.time += 2457000\n ascii.write([self.lc.time, self.lc.flux, self.lc.flux_err], 'TIC%d.dat' % self.TIC,\n format='fixed_width_no_header', delimiter=' ', overwrite=True)\n if tojuliet:\n ascii.write([self.lc.time, self.lc.flux, self.lc.flux_err,\n ['TESS' for _ in self.lc.time]], 'TIC%d_juliet.dat' % self.TIC,\n format='fixed_width_no_header', delimiter=' ', overwrite=True)",
"def save_to_MTFIT_style_file(MTs, MTp, nlloc_hyp_filename, inversion_type, outdir, MTp_absolute=[], shift_idxs=[]):\n # Get uid and stations data:\n uid, stations = get_event_uid_and_station_data_MTFIT_FORMAT_from_nonlinloc_hyp_file(nlloc_hyp_filename)\n # Write all data to output dict:\n out_dict = {}\n out_dict[\"MTs\"] = MTs\n out_dict[\"MTp\"] = MTp\n out_dict[\"uid\"] = uid\n out_dict[\"stations\"] = stations\n if len(MTp_absolute)>0:\n out_dict[\"MTp_absolute\"] = MTp_absolute\n if len(shift_idxs)>0:\n out_dict[\"shift_idxs\"] = shift_idxs\n # And save to file:\n out_fname = outdir+\"/\"+uid+\"_FW_\"+inversion_type+\".pkl\"\n print(\"Saving FW inversion to file:\", out_fname)\n pickle.dump(out_dict, open(out_fname, \"wb\"))",
"def write_traces(obj, arc, outfile):\n tdict = dict(obj=obj, arc=arc)\n jdict = ltu.jsonify(tdict)\n # Write\n ltu.savejson(outfile, jdict, easy_to_read=True, overwrite=True)\n print(\"Wrote Traces to {:s}\",outfile)",
"def write_trajectory(self, environmnent, pdb_filename):\n # TODO\n pass",
"def writelines(self, lines):\n for line in lines:\n self.write(line)",
"def save_rollout_to_file(self, episode):\n # get save path\n save_path = os.path.join(self.save_dir, \"rollout_{}.h5\".format(self.counter))\n\n # save rollout to file\n f = h5py.File(save_path, \"w\")\n f.create_dataset(\"traj_per_file\", data=1)\n\n # store trajectory info in traj0 group\n traj_data = f.create_group(\"traj0\")\n traj_data.create_dataset(\"states\", data=np.array(episode.observation))\n traj_data.create_dataset(\"images\", data=np.array(episode.image, dtype=np.uint8))\n traj_data.create_dataset(\"actions\", data=np.array(episode.action))\n\n terminals = np.array(episode.done)\n if np.sum(terminals) == 0:\n terminals[-1] = True\n\n # build pad-mask that indicates how long sequence is\n is_terminal_idxs = np.nonzero(terminals)[0]\n pad_mask = np.zeros((len(terminals),))\n pad_mask[:is_terminal_idxs[0]] = 1.\n traj_data.create_dataset(\"pad_mask\", data=pad_mask)\n\n f.close()\n\n self.counter += 1",
"def write(self, outputFile):\n \n try: \n f = open(outputFile + '.py', 'w')\n for trail in self.trails: \n f.write(\"[\")\n for index in trail:\n f.write(\"({0}, {1}), \".format(*index)) \n f.write(\"]\\n\")\n \n except IOError, e:\n msg = \"Exception encountered when attempting \" + \\\n \"to write data to file: {0}.\" + \\\n \"\\n\\t -- Exception was: {1}\" + \\\n \"\\n\\t For help use --help\".format(outputFile, e)\n raise Usage(e)",
"def write_telluric_transmission_to_file(wls,T,outpath):\n import pickle\n print('------Saving teluric transmission to '+outpath)\n with open(outpath, 'wb') as f: pickle.dump((wls,T),f)",
"def write_body(self):\r\n if self.arguments['--out']:\r\n self.file = open(self.arguments['--out'], \"a+\")\r\n for list_item in self.list_of_body_objects:\r\n self.file.write(list_item.line)\r\n self.file.close()\r\n else:\r\n for list_item in self.list_of_body_objects:\r\n print(list_item.line)",
"def write_file(poet, info_dict):\r\n\r\n filename = SAVE_PATH + '/' + poet + '/' + str(info_dict['id']) + '_'+ str(info_dict['pagenum']) \\\r\n + '_' + info_dict['id2'] +'_' + info_dict['ord2'] \\\r\n + '_' + info_dict['id3'] + '_' + info_dict['ord3'] \\\r\n + '_' + info_dict['id4'] + '_' + info_dict['ord4'] + '.txt'\r\n\r\n print(filename)\r\n with open(filename, 'w', encoding='utf-16') as f:\r\n txt = ','.join([str(info_dict[k]) for k in KEYS ])\r\n txt = txt + '\\n' + '\\n'.join([x for x in info_dict['beyts']])\r\n f.write(txt)\r\n\r\n\r\n locale.setlocale(locale.LC_ALL, '')\r\n DELIMITER = ';'# if locale.localeconv()['decimal_point'] == ',' else ','\r\n\r\n list_of_lists = [[info_dict[k] for k in KEYS]]\r\n with open('D:/poem/molana.csv', 'a', newline='', encoding='utf-16') as csvfile:\r\n\r\n writer = csv.writer(csvfile, delimiter=DELIMITER)\r\n writer.writerows(list_of_lists)",
"def write_file(l_dta, outputfile):\n l_dta2 = []\n for row in l_dta:\n s = '\\t'.join(row)\n l_dta2.append(s)\n s_dta = \"\\r\\n\".join(l_dta2)\n try:\n with open(outputfile, 'w') as fd:\n fd.write(s_dta)\n except (IOError,) as e:\n tracker()\n return None",
"def test_file_write_attributes_for_each(self):\n\n with OrthoMultiTs(self.testfilename, n_loc=3, mode=\"w\") as dataset:\n n_data = 5\n locations = np.array([1, 2, 3])\n data = {\n \"test\": np.arange(n_data * 3).reshape(3, n_data),\n \"test2\": np.arange(n_data * 3).reshape(3, n_data)\n }\n base = datetime(2007, 1, n_data)\n dates = np.array(\n [base + timedelta(hours=i) for i in range(n_data)])\n descriptions = np.repeat([str(\"station\")], 3).tolist()\n\n dataset.write_all(locations,\n data,\n dates,\n loc_descrs=descriptions,\n lons=np.arange(3),\n lats=np.arange(3),\n alts=np.arange(3),\n attributes={\n \"test\": {\n \"testattribute\": \"teststring\"\n },\n \"test2\": {\n \"testattribute2\": \"teststring2\"\n }\n })\n\n with OrthoMultiTs(self.testfilename) as dataset:\n data = dataset.read_all(2)\n nptest.assert_array_equal(data[\"test\"], np.arange(5) + 5)\n assert dataset.dataset.variables[\n \"test\"].testattribute == \"teststring\"\n assert dataset.dataset.variables[\n \"test2\"].testattribute2 == \"teststring2\"\n test_dates = []\n for n_data in [5]:\n base = datetime(2007, 1, n_data)\n test_dates.append(\n np.array(\n [base + timedelta(hours=i) for i in range(n_data)]))\n dates = np.concatenate(test_dates)\n nptest.assert_array_equal(data[\"time\"], dates)",
"def addTrailer(file):\n program = '\\t\\t</coordinates>\\n'\n program += '\\t</LineString>\\n'\n program += '\\t</Placemark>\\n'\n program += '</Document>\\n'\n program += '</kml>\\n'\n file.write(program)",
"def create_telemetry_file():\n loginfo(\"Creating telem file if it doesn't exist...\")\n with open(HAB_TELEM_FILE, \"w\"):\n pass",
"def write_output(self,content):\n text=\"\"\"# typ eta phi pt jmass ntrk btag had/em dummy dummy\\n\"\"\"\n self.output.writelines(text)\n text=\"0 \"+str(self.nb_data)+\" \"+str(len(content))+\"\\n\"\n self.output.writelines(text)\n\n i=1\n for particle in content:\n text=str(i)+' '+particle.lhco_line()+'\\n'\n self.output.writelines(text)\n i+=1",
"def write_to_file(self,\n ofile=\"output.txt\",\n **kwargs):\n with open(file=ofile, mode='a') as ofile:\n for num_line, obj in self.items():\n ofile.write(str(self._construct_output_string(num_line=num_line,\n obj=obj,\n **kwargs)))",
"def write_novel_alleles(alleles, novel):\n alleles_dict = {mistutils.basename(x):x for x in os.listdir(alleles)}\n\n for gene in novel:\n\n filename = os.path.join(alleles, alleles_dict[gene])\n\n with open(filename, 'a') as f:\n for allele in novel[gene]:\n f.write('>placeholder\\n')\n f.write(allele + \"\\n\")\n\n fix_headers(filename)",
"def write_tsv(labels, positions, elec_file):\n labels = labels.reshape(-1, order='F')\n positions = positions.reshape(-1, 3, order='F')\n\n elec_file = elec_file.with_suffix('.tsv')\n with elec_file.open('w') as f:\n f.write('name\\tx\\ty\\tz\\n')\n for i in range(labels.shape[0]):\n f.write(f'{labels[i]}\\t{positions[i, 0]:.3f}\\t{positions[i, 1]:.3f}\\t{positions[i, 2]:.3f}\\n')",
"def save(filename, points3, tris, metadata):\n logging.info(\"saving mesh: %s\"%filename)\n cells = {'triangle':tris}\n vtk_io.write(filename, points3, cells)\n with open(filename+'.readme','w') as fid:\n fid.write(metadata)",
"def write_obj(output_file_name, obj_name, mtl_lib_file, tex_lines,\n tex_map, n_verts, vertex_lines, n_normals,\n normals_lines, n_faces, faces_groups):\n\n def _join(lns):\n \"\"\"Joins lines.\n :lns: list of strings: Lines to join.\n Returns joined lines as string.\n \"\"\"\n return \"\\n\".join(lns)\n\n # Rebuild the faces data first.\n faces = \"\"\n for idx, lines in faces_groups.items():\n # Get the texture 'alias' or use a default value\n tex_name = _get_tex_name(tex_map, idx)\n faces += FACES_TEMPLATE.format(obj_name=obj_name, tex_name=tex_name,\n faces_lines=_join(lines))\n\n # 'Apply' data to the template.\n with open(output_file_name, \"w\") as fd_out:\n fd_out.write(OBJ_TEMPLATE.format(header=COMMON_HEADER,\n mtl_lib_file=mtl_lib_file,\n obj_name=obj_name,\n n_verts=n_verts,\n n_faces=n_faces,\n n_norms=n_normals,\n vertex_lines=_join(vertex_lines),\n tex_lines=_join(tex_lines),\n norms_lines=_join(normals_lines),\n faces_lines=faces))\n print \" * Saved '%s'.\" % output_file_name",
"def write_uem(uemf, uem, n_digits=3):\n with open(uemf, 'wb') as f:\n for file_id in sorted(iterkeys(uem)):\n for onset, offset in sorted(uem[file_id]):\n line = ' '.join([file_id,\n '1',\n format_float(onset, n_digits),\n format_float(offset, n_digits)\n ])\n f.write(line.encode('utf-8'))\n f.write(b'\\n')",
"def sauvegarder():\n\n fic = open(\"sauvegarde.txt\", \"w\")\n\n for i in range(Nombre_de_colonne):\n\n for j in range(Nombre_de_ligne):\n\n fic.write(str(etat[i][j]) + \"\\n\")\n\n fic.close()",
"def export_file_dto(self, active_model, objs=[], type=''):\n dto_parser = DtoParser()\n objs2 = []\n for obj in objs:\n objs2 += dto_parser.parseJointPromotion(obj)\n\n doc_type_obj = self.env[\"edi.doc.type\"]\n doc_obj = self.env[\"edi.doc\"]\n doc_type = doc_type_obj.search([(\"code\", '=', \"dto\")])[0]\n last_dto_file = doc_obj.search([(\"doc_type\", '=', doc_type.id)],\n order=\"date desc\", limit=1)\n if last_dto_file:\n count = last_dto_file.count + 1\n else:\n count = 1\n\n tmp_name = \"export_dto.txt\"\n file_len = len(objs2)\n filename = \"%sDTO%s.%s\" % (self.env.user.company_id.frigo_code,\n str(file_len).zfill(4),\n str(count).zfill(4))\n templates_path = self.addons_path('frigo_edi') + os.sep + 'wizard' + \\\n os.sep + 'templates' + os.sep\n mylookup = TemplateLookup(input_encoding='utf-8',\n output_encoding='utf-8',\n encoding_errors='replace')\n tmp = Template(filename=templates_path + tmp_name,\n lookup=mylookup, default_filters=['decode.utf8'])\n\n doc = tmp.render_unicode(o=objs2, type_=type, datetime=datetime,\n user=self.env.user).encode('utf-8', 'replace')\n file_name = self[0].service_id.output_path + os.sep + filename\n f = file(file_name, 'w')\n f.write(doc)\n f.close()\n file_obj = self.create_doc(filename, file_name, doc_type)\n file_obj.count = count",
"def newtwogfile(ntf_twogs):\n outfile = open(\"Twogs.txt\", \"w\")\n for x in ntf_twogs:\n outfile.write(\"%s\\n\" % x)\n outfile.close()",
"def saveenergyfile(path, meta, data):\n def serializemeta(meta):\n \"\"\"Convert metadata object to list of comment strings\"\"\"\n return [u\"#CTE_%s: %s\" % (key, meta[key]) for key in meta]\n\n with io.open(path, 'w+') as ff:\n ff.write(u\"\\n\".join(serializemeta(meta)))\n ff.write(u\"\\nvector,tipo,src_dst\\n\")\n for c in data:\n carrier = c['carrier']\n ctype = c['ctype']\n originoruse = c['originoruse']\n values = u\", \".join(u\"%.2f\" % v for v in c['values'])\n comment = u\" # %s\" % c['comment'] if c['comment'] else u\"\"\n ff.write(u\"%s, %s, %s, %s%s\\n\" % (carrier, ctype, originoruse, values, comment))",
"def t2t(lines,filename,flags='--style color.css --css-sugar'):\n\n # b\n if isinstance(lines,list) and isinstance(filename,str):\n website='\\n'.join(lines)\n elif isinstance(lines,str) and isinstance(filename,list):\n lines,filename=filename,lines\n website='\\n'.join(lines)\n elif isinstance(lines,str) and isinstance(filename,str):\n website=lines\n else:\n raise Exception(\"...\")\n\n\n file=open(filename,'w')\n file.write(website)\n file.close()\n\n os.system('txt2tags --target html %s %s' % (flags,filename))",
"def test_write_ead_file(self):\n kwargs = {\n 'id': 143,\n 'operation': 'add',\n 'exam_run__exam_series_code': 'MM-DEDP',\n 'exam_run__date_first_eligible': date(2016, 5, 15),\n 'exam_run__date_last_eligible': date(2016, 10, 15),\n }\n\n with mute_signals(post_save):\n profile = ProfileFactory(id=14879)\n exam_auths = [ExamAuthorizationFactory.create(user=profile.user, **kwargs)]\n exam_auths[0].updated_on = FIXED_DATETIME\n\n self.ead_writer.write(self.tsv_file, exam_auths)\n\n assert self.tsv_rows[0] == (\n \"add\\t143\\t\"\n \"14879\\tMM-DEDP\\t\\t\"\n \"\\t2016/05/15\\t2016/10/15\\t\" # accommodation blank intentionally\n \"2016/05/15 15:02:55\"\n )",
"def _write_antti_component(component, component_id, component_file):\n if component_file.split('.')[-1] == 'gz':\n ff = gzip.open(component_file, 'w')\n else:\n ff = open(component_file, 'w')\n\n ff.write(\"%%%% %s of the magnetic field distribution.\"%component_id +\n \" Data produced on %s\\n\"%dt.datetime.utcnow())\n ff.write(\"%% \\n\")\n ff.write(\"%% This data comes together with files DateTime.txt, LatLon.txt\" +\n \" and Stations.txt. \\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% Contact: \\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% The format of the data is as follows:\\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% Comp(loc1,t1) Comp(loc1,t2) Comp(loc1,t3) ... \\n\")\n ff.write(\"%% Comp(loc2,t1) Comp(loc2,t2) Comp(loc2,t3) ... \\n\")\n ff.write(\"%% . . . \\n\")\n ff.write(\"%% . . . \\n\")\n ff.write(\"%% . . . \\n\")\n ff.write(\"%% \\n\")\n ff.write(\"\\n\")\n\n fmt = ''.join(['%02.4f ' for row in component] + ['\\n'])\n for loc in component.T:\n ff.write(fmt%tuple(loc))\n ff.close()",
"def _write_antti_location(lat, lon, rad, label, location_file):\n if location_file.split('.')[-1] == 'gz':\n ff = gzip.open(location_file, 'w')\n else:\n ff = open(location_file, 'w')\n\n ff.write(\"%% Geographic coordinates of the geoelectric field distribution \" +\n \" Data produced on %s\\n\"%(dt.datetime.utcnow()))\n ff.write(\"%% \\n\")\n ff.write(\"%% This data comes together with files DateTime.txt, B?.txt,\" +\n \" and Stations.txt. \\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% Contact: \\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% The format of the data is as follows:\\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% lat1 lon1 rad1 label1 \\n\")\n ff.write(\"%% lat2 lon2 rad2 label2 \\n\")\n ff.write(\"%% . . . \\n\")\n ff.write(\"%% . . . \\n\")\n ff.write(\"%% . . . \\n\")\n ff.write(\"%% \\n\")\n ff.write(\"\\n\")\n\n for l in range(len(lat)):\n ff.write(\"%02.2f %02.2f %08e %s\\n\"%(lat[l], lon[l], rad[l], label[l]))\n\n ff.close()",
"def _write_antti_datetime(DT, dt_file):\n if dt_file.split('.')[-1] == 'gz':\n ff = gzip.open(dt_file, 'w')\n else:\n ff = open(dt_file, 'w')\n\n ff.write(\"%% Date and time of the geoelectric field distribution. \" +\n \" Data produced on %s\\n\"%(dt.datetime.utcnow()))\n ff.write(\"%% \\n\")\n ff.write(\"%% This data comes together with files BX.txt, BY.txt, LatLon.txt\" +\n \" and Stations.txt. \\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% Contact: \\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% The format of the data is as follows:\\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% year1 month1 day1 hour1 minute1 second1 \\n\")\n ff.write(\"%% year2 month2 day2 hour2 minute2 second2 \\n\")\n ff.write(\"%% . . . . . . \\n\")\n ff.write(\"%% . . . . . . \\n\")\n ff.write(\"%% . . . . . . \\n\")\n ff.write(\"%% \\n\")\n ff.write(\"\\n\")\n\n for d in DT:\n ff.write(\"%02.0f %02.0f %02.0f %02.0f %02.0f %02.0f\\n\"%\n (d.year, d.month, d.day, d.hour, d.minute, d.second))\n\n ff.close()",
"def store_headlines():\n for outlet in outlets:\n articles = get_headlines(outlet)\n connect_db.store_headlines(articles,outlet)",
"def write_eneheader(self,filename,replica):\n \n fheader = open(filename,'w')\n fheader.write('E_pot\\tE_rest(D)\\tD\\tcontact_state\\ttemp\\n')\n fheader.write('# Energy units: Joules/mol\\n')\n fheader.write('# Restrained contact state: ' + repr(replica.mc.restraint.contacts) + '\\n')\n fheader.write('# kspring: '+str(replica.mc.restraint.kspring) + '\\n')\n\tfheader.close()",
"def writeData(self, lines, fpath):\n with open(fpath, 'w') as f:\n for line in lines:\n print(line, file=f)",
"def write_po(self, outputfile):\n raise NotImplementedError(\n \"Writing to this file format is not yet implemented\")",
"def write_lines(filename, lines, verbose=True):\n with open(filename, 'w', encoding=\"utf-8\") as fp:\n for line in lines:\n print(line, file=fp)\n if verbose:\n print(\"Done writing to file %s.\" % filename)",
"def write_de_article(start: int, n_lines: int, tsv_file: IO, output_path: str):\n tsv_file.seek(start)\n de_fieldnames = [\n \"de_article_id\",\n \"de_section_id\",\n \"de_sent_id\",\n \"de_url\",\n \"de_article_title\",\n \"de_section_title\",\n \"de_sent\",\n ]\n de_reader = csv.DictReader(tsv_file, de_fieldnames, delimiter=\"\\t\")\n with open(output_path, \"w\") as outfile:\n for _ in range(n_lines):\n line = next(de_reader)\n outfile.write(line[\"de_sent\"] + \"\\n\")",
"def write_meeting(meeting):\n logfile = open(MEETINGS_ORG_FILE, 'a')\n\n str = \"* TODO %s :Appointment:\\n:PROPERTIES:\\n:guid: %s\\n:END:\\n%s\\n\" % (meeting.title, meeting.guid, meeting.description)\n logfile.write(str)\n\n logfile.close()",
"def WriteNewFile(head_list, atom_list, tail_list):\n file = open(\"output.txt\", 'w')\n output_head = ''.join(map(str,head_list))\n output_atom = ''.join(map(str,atom_list))\n output_tail = ''.join(map(str,tail_list))\n output = ((output_head)+(output_atom)+(output_tail))\n file.write(output)",
"def write_traj(name,r_eq):\r\n f = open(name, 'w') #eqilibration.dump'\r\n N =len(r_eq[0,:,0])\r\n steps = len(r_eq[0,0,:])\r\n types = np.linspace(0,N-1,N)\r\n types = np.ones(N)\r\n types[1::3] = 2\r\n for kk in tqdm(range(steps)):\r\n f.write('ITEM: TIMESTEP \\n')\r\n f.write('{} \\n'.format(dt*kk))\r\n f.write('ITEM: NUMBER OF ATOMS \\n')\r\n f.write('{} \\n'.format(N))\r\n f.write('ITEM: BOX BOUNDS pp pp pp\\n')\r\n f.write('{} {} \\n'.format(-0,L))\r\n f.write('{} {} \\n'.format(-0,L))\r\n f.write('{} {} \\n'.format(-0,L))\r\n f.write('ITEM: ATOMS id type x y z Radius \\n')\r\n for ii in range(N):\r\n f.write(' {} {} {} {} {} {}\\n'.format(ii+1,types[ii],r_eq[0,ii,kk],r_eq[1,ii,kk],r_eq[2,ii,kk], .2e-10, ))\r\n f.close() \r\n return",
"def write(self, model, **kwargs):\n self.section_line_list = []\n self.node_string_list = []\n self.node_connector_string_list = []\n self.node_connector_string_mapping = (\n {}\n ) # A mapping of the node and index to the section\n self.bus_string_list = (\n []\n ) # Only used for nodes - not nodes derived from PV, Loads or Capacitors\n self.nodeID_list = []\n self.sectionID_list = []\n self.section_feeder_mapping = {}\n self.section_line_feeder_mapping = {}\n self.section_headnode_mapping = {}\n\n # Verbose print the progress\n if \"verbose\" in kwargs and isinstance(kwargs[\"verbose\"], bool):\n self.verbose = kwargs[\"verbose\"]\n else:\n self.verbose = False\n\n # Writing the load file\n if self.verbose:\n logger.info(\"Writing the load file...\")\n self.write_load_file(model, **kwargs)\n\n # Writing the network file\n if self.verbose:\n logger.info(\"Writing the network file...\")\n self.write_network_file(model, **kwargs)\n\n # Writing the equipment file\n if self.verbose:\n logger.info(\"Writing the equipment file...\")\n self.write_equipment_file(model, **kwargs)",
"def write_lines_to_file(filename, lines):\n with open(filename, 'w') as fp:\n for line in lines:\n fp.write(\"%s\\n\" % line.strip('\\n'))",
"def make_eph():\n\n # Get table data:\n tr_file = 'exoplanets_transiting.fits'\n if os.path.isfile( tr_file )==False:\n tutilities.download_data()\n t = atpy.Table( tr_file )\n\n # Open and prepare file for output writing to:\n eph_file_w = open( EPH_FILE, 'w' )\n header_str = '# Transiting planet positions and epochs \\n'\n header_str += '# Generated from exoplanet.org data \\n'\n header_str += '# Comment out those not needed \\n\\n'\n header_str += '# COLUMNS: \\n'\n header_str += '# Name, Vmag, RA, Dec, Epoch(HJD), Period(days), Duration(hrs) \\n\\n'\n eph_file_w.write( header_str )\n\n # Go through each of the planets alphabetically and extract\n # the necessary information:\n q = np.argsort( t.NAME )\n for i in range( t.NAME.size ):\n eph_file_w.write( '%-12.10s %.1f %s %s %15.7f %13.8f %8.4f \\n' % \\\n ( t.NAME[ q[i] ].replace(' ',''), t.V[ q[i] ], t.RA_STRING[ q[i] ], \\\n t.DEC_STRING[ q[i] ], t.TT[ q[i] ], t.PER[ q[i] ], t.T14[ q[i] ]*24. ) )\n eph_file_w.close()\n print '\\n\\nSaved output in %s' % EPH_FILE\n\n return None",
"def write_out(c2ptmk, ofn):\n print \"Writing out to [{}]\".format(ofn)\n with codecs.open(ofn, \"w\", \"utf8\") as ofd:\n for co, infos in sorted(c2ptmk.items()):\n ofd.write(u\"{}\\t{}\\t{}\\n\".format(\n co, infos[\"uri\"], \",\".join(\n [unicode(x) for x in infos[\"ptmks\"]])))",
"def Writedata(self, tstep):\n \n nc = Dataset(self.outfile, 'a')\n \n nc.variables['time'][tstep] = self.time\n nc.variables['salt'][tstep] = self.salt\n nc.variables['temp'][tstep] = self.temp\n nc.variables['uc'][tstep] = self.uc\n nc.variables['vc'][tstep] = self.vc\n nc.variables['nu_v'][tstep] = self.nu_v\n nc.variables['rho'][tstep] = self.rho\n nc.variables['tau_x'][tstep] = self.tau_x\n nc.variables['tau_y'][tstep] = self.tau_y\n nc.variables['eta'][tstep] = self.eta\n \n nc.close()",
"def create_eqt_template(nodes, input_filename):\n output_filename = f'{input_filename[:-4]}_eqpt_sheet.txt'\n with open(output_filename, 'w', encoding='utf-8') as my_file:\n # print header similar to excel\n my_file.write('OPTIONAL\\n\\n\\n\\\n \\t\\tNode a egress amp (from a to z)\\t\\t\\t\\t\\tNode a ingress amp (from z to a) \\\n \\nNode A \\tNode Z \\tamp type \\tatt_in \\tamp gain \\ttilt \\tatt_out\\\n amp type \\tatt_in \\tamp gain \\ttilt \\tatt_out\\n')\n\n for node in nodes.values():\n if node.eqpt == 'ILA':\n my_file.write(f'{node.uid}\\t{node.to_node[0]}\\n')\n if node.eqpt == 'ROADM':\n for to_node in node.to_node:\n my_file.write(f'{node.uid}\\t{to_node}\\n')\n\n print(f'File {output_filename} successfully created with Node A - Node Z entries for Eqpt sheet in excel file.')",
"def writeChronListToFile(self):\n ## write header\n for header_line in self.outData['header']:\n self.outFile.write(header_line + '\\n')\n ##loop through each msg list\n for msg_list in self.outData_temp:\n ## create line\n msg_line = reconstructLine(msg_list)\n ## write to file\n self.outFile.write(msg_line + '\\n')",
"def save_all_ne_as_list_to_txt(self):\n #write the output\n outfile = open(('ne_list_all_' + self.lang + '_' + self.method +\n '.txt'), 'w')\n for sublist in self.named_entity_list_total:\n for entry in sublist:\n outfile.write(entry[0]+'\\t'+entry[3]+'\\n')\n outfile.close()",
"def eeg_writeavr(array,tsb,di,file):\t\t\n import shutil as shu\n f=open(file,'w')\n firstline = 'Npts= %i TSB= %i DI= %7.5f SB= %7.5f SC= %i NChan= %i\\n' %(array.shape[1],tsb,di,1,200,array.shape[0]) \n chnam = 'Cz FP1 FP2 F3 F4 C3 C4 P3 P4 O1 O2 F7 F8 T7 T8 P7 P8 Fz Pz FC1 FC2 CP1 CP2 FC5 FC6 CP5 CP6 FT9 FT10 TP9 TP10 PO9 PO10\\n'\n f.write(firstline)\n f.write(chnam)\n for i in range(array.shape[0]):\n tmp = array[i,:]\n f.write(('%7.5f ' * len(tmp)) %tuple(tmp))\n f.write('\\n')\n \n f.close()\n #may want to change this on different machines...\n src = '/Users/crislanting/Projects/EEG/data/33.elp'\n dest = file[:-4] + '.elp'\n shu.copyfile(src,dest)",
"def _write_antti_stations(obs_lat, obs_lon, obs_rad, obs_inc, obs_id,\n station_file):\n if station_file.split('.')[-1] == 'gz':\n ff = gzip.open(station_file, 'w')\n else:\n ff = open(station_file, 'w')\n\n ff.write(\"%% Geographic coordinates and ID of stations used to generate\" +\n \" SECS-interpolated magnetic vector comonents. \" +\n \" Data produced on %s\\n\"%(dt.datetime.utcnow()))\n ff.write(\"%% \\n\")\n ff.write(\"%% This data comes together with files DateTime.txt, B?.txt,\" +\n \" and LatLon.txt. \\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% Contact: \\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% The format of the data is as follows:\\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% First row: the list of station codes used in SECS\" +\n \" calculations followed by the geographic coordinates of\" +\n \" the stations:\\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% lat1 lon1 rad1 \\n\")\n ff.write(\"%% lat2 lon2 rad2 \\n\")\n ff.write(\"%% . . \\n\")\n ff.write(\"%% . . \\n\")\n ff.write(\"%% . . \\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% The rest of the data are an array of integers indicating the\" +\n \" quality [0-9] of station in the first row used in SECS inversion:\\n\")\n ff.write(\"%% \\n\")\n ff.write(\"%% bool(station1,t1) bool(station1,t2) bool(station1,t3) ...\\n\")\n ff.write(\"%% bool(station2,t1) bool(station2,t2) bool(station2,t3) ...\\n\")\n ff.write(\"\\n\")\n\n # write observatory ids as single line\n for obs in obs_id:\n ff.write(\"%s \"%obs)\n\n ff.write(\"\\n\")\n ff.write(\"\\n\")\n\n # write observatory locations\n for l in range(len(obs_lat)):\n ff.write(\"%03.2f %03.2f %08e\\n\"%(obs_lat[l], obs_lon[l], obs_rad[l]))\n\n ff.write(\"\\n\")\n\n # write quality factor array\n fmt = ''.join(['%01.0f' for row in obs_inc] + ['\\n'])\n for loc in obs_inc.T:\n ff.write(fmt%tuple(loc))\n\n ff.close()",
"def write_lines(list_of_lines, file):\r\n for i in range(0, len(list_of_lines)):\r\n file.write(list_of_lines[i] + b\"\\n\")",
"def data_to_file(data, ta_file):\n file_handle = file(ta_file, \"w\")\n file_handle.write(data_to_string(data))\n file_handle.close()",
"def _create_tempfile_with_lines(self, *lines):\n for line in lines:\n self.tempfile.write(line)\n self.tempfile.write('\\n')\n self.tempfile.flush()",
"def create_txt_files(self, op_dir=None):\n for tb_nm, tb_cont in list(self.tables_info['tables'].items()):\n op_fl = '{}_{}.txt'.format(self.report_basename, tb_nm)\n if op_dir:\n op_fl = os.path.join(op_dir, op_fl)\n with open(op_fl, 'w') as TXT:\n TXT.write(tb_cont)",
"def writelines(self, seq):\n for line in seq:\n self.write(line)",
"def write_imp_ASCII(DT, lat_lon_r, BX, BY, BZ, Label,\n olat_olon_or, obsX, obsY, obsZ, obsInc, obsID,\n filename='impOut.zip'):\n\n# def write_antti(DT, Lat, Lon, BX, BY, BZ, Label,\n# obsLat, obsLon, obsInc, obsID,\n# dt_file = 'DateTime.txt.gz',\n# location_file = 'LatLon.txt.gz',\n# bx_file = 'BX.txt.gz',\n# by_file = 'BY.txt.gz',\n# bz_file = 'BZ.txt.gz',\n# station_file = 'Stations.txt.gz'):\n\n # unpack former tuple arguments (see PEP-3113)\n Lat, Lon, Rad = lat_lon_r\n obsLat, obsLon, obsRad = olat_olon_or\n\n # create a temporary directory\n tmpDir = tempfile.mkdtemp()\n\n # set filenames\n dt_file = os.path.join(tmpDir, 'DateTime.txt')\n location_file = os.path.join(tmpDir, 'LatLon.txt')\n bx_file = os.path.join(tmpDir, 'BX.txt')\n by_file = os.path.join(tmpDir, 'BY.txt')\n bz_file = os.path.join(tmpDir, 'BZ.txt')\n obx_file = os.path.join(tmpDir, 'obsBX.txt')\n oby_file = os.path.join(tmpDir, 'obsBY.txt')\n obz_file = os.path.join(tmpDir, 'obsBZ.txt')\n station_file = os.path.join(tmpDir, 'Stations.txt')\n\n # write out ASCII files\n _write_antti_datetime(DT, dt_file)\n _write_antti_location(Lat, Lon, Rad, Label, location_file)\n _write_antti_component(BX, 'X (northward) component', bx_file)\n _write_antti_component(BY, 'Y (eastward) component', by_file)\n _write_antti_component(BZ, 'Z (downward) component', bz_file)\n _write_antti_stations(obsLat, obsLon, obsRad, obsInc, obsID, station_file)\n\n # not a part of original ASCII format, but included for completeness\n _write_antti_component(obsX, 'observed X (northward) component', obx_file)\n _write_antti_component(obsY, 'observed Y (eastward) component', oby_file)\n _write_antti_component(obsZ, 'observed Z (downward) component', obz_file)\n\n # open up output zip file\n with zipfile.ZipFile(filename, 'w', zipfile.ZIP_DEFLATED) as outZip:\n outZip.write(dt_file, os.path.basename(dt_file))\n outZip.write(location_file, os.path.basename(location_file))\n outZip.write(bx_file, os.path.basename(bx_file))\n outZip.write(by_file, os.path.basename(by_file))\n outZip.write(bz_file, os.path.basename(bz_file))\n outZip.write(obx_file, os.path.basename(obx_file))\n outZip.write(oby_file, os.path.basename(oby_file))\n outZip.write(obz_file, os.path.basename(obz_file))\n outZip.write(station_file, os.path.basename(station_file))\n\n shutil.rmtree(tmpDir)",
"def write_to_file(start_runtime, contents, write_mode='a'):\n with open(f\"{start_runtime}.txt\", write_mode) as f:\n f.write(\"Filename\\t\\tMaxTrack\\tNumInst\\t\\tTimeSig\\t\\tTPB\\n\")\n f.write(contents)",
"def write_note(note):\n \n logfile = open(NOTES_ORG_FILE, 'a')\n timestamp = time_stamp()\n str = \"** [%s] %s\\n:PROPERTIES:\\n:guid: %s\\n:END:\\n%s\\n\" % (timestamp, note.title, note.guid, note.description)\n logfile.write(str)\n\n logfile.close()",
"def write_ts(ts,i):\n '''\n Write light curve to disk as space delimited text file\n\t\n\tParameters\n\t----------\n\tts: time series object\n\ti : a counter to be appended to the file name where it is stored \n\tReturns\n\t-------\n\tNone. \n '''\n path = \"ts-{}.txt\".format(i)\n datafile_id = open(path, 'wb')\n data = np.array([ts._times, ts._values])\n data = data.T\n\n np.savetxt(datafile_id, data, fmt=['%.3f','%8f'])\n datafile_id.close()",
"def export_file_pol(self, active_model, objs=[]):\n doc_type_obj = self.env[\"edi.doc.type\"]\n doc_obj = self.env[\"edi.doc\"]\n doc_type = doc_type_obj.search([(\"code\", '=', \"pol\")])[0]\n last_pol_file = doc_obj.search([(\"doc_type\", '=', doc_type.id)],\n order=\"date desc\", limit=1)\n if last_pol_file:\n count = last_pol_file.count + 1\n else:\n count = 1\n\n tmp_name = \"export_pol.txt\"\n file_len = len(objs)\n filename = \"%sPOL%s.%s\" % (self.env.user.company_id.frigo_code,\n str(file_len).zfill(4),\n str(count).zfill(4))\n templates_path = self.addons_path('frigo_edi') + os.sep + 'wizard' + \\\n os.sep + 'templates' + os.sep\n mylookup = TemplateLookup(input_encoding='utf-8',\n output_encoding='utf-8',\n encoding_errors='replace')\n tmp = Template(filename=templates_path + tmp_name,\n lookup=mylookup, default_filters=['decode.utf8'])\n objs = [o for o in objs]\n if active_model == 'tourism.customer':\n o = objs\n o2 = []\n else:\n o = []\n o2 = objs\n doc = tmp.render_unicode(o=o, o2=o2, datetime=datetime,\n user=self.env.user).encode('utf-8', 'replace')\n file_name = self[0].service_id.output_path + os.sep + filename\n f = file(file_name, 'w')\n f.write(doc)\n f.close()\n file_obj = self.create_doc(filename, file_name, doc_type)\n file_obj.count = count",
"def __saveKnownAtlases(self):\n\n if self.__atlasDescs is None:\n return\n\n atlases = []\n\n for desc in self.__atlasDescs:\n atlases.append((desc.atlasID, desc.specPath))\n\n atlases = ['{}={}'.format(name, path) for name, path in atlases]\n atlases = op.pathsep.join(atlases)\n\n fslsettings.write('fsl.data.atlases', atlases)",
"def create_block_file(blockTxns):\n textfile = open(\"/content/block.txt\", \"w\")\n for element in blockTxns:\n textfile.write(element + \"\\n\")\n textfile. close()",
"def export(self, fname):\n f = open(fname, 'w')\n for ue in self.ue_list:\n line_components = list()\n line_components.append(ue.expression)\n line_components.append(ue.meaning)\n print >>f, '\\t'.join(line_components).encode('utf-8')",
"def write(cls, vas):\n with open(Y, 'w') as f_i:\n for items in vas:\n f_i.write('%s ' % items)\n print(\"File written successfully. Check out \\\"output.txt\\\" file\")\n f_i.close()",
"def writelines(lines, filename, encoding='utf-8', mode='wb'):\r\n return write(os.linesep.join(lines), filename, encoding, mode)",
"def _write_transact_types(self, file):\n for tp in self._transact_types:\n tp.write(file)\n file.write('\\n')",
"def print_to_file(list_of_lines, file_path):\r\n with open(file_path) as output_file:\r\n write_lines(list_of_lines, output_file)",
"def writelines(self, seq: list[str]) -> None:\n ...",
"def build_transcript(speaker_label_transcript):\n with open('main_transcript.txt', 'a') as the_file:\n for t in speaker_label_transcript:\n the_file.write(f\"{t['speaker']}:\\n\")\n the_file.write(f\"{t['content']}\\n\\n\")",
"def __save_article_to_file(self, content):\n with open(\"article.txt\", 'w') as out:\n out.write(content)",
"def write_edges(\n edges: Mapping[str, Any],\n filename: str,\n jsonlines: bool = False,\n gzipflag: bool = False,\n yaml: bool = False,\n):\n pass",
"def write_to_file(filepath, lines):\n with open(filepath, 'w', encoding='utf-8') as f:\n f.write(''.join([line.replace('\\r\\n', '\\n') for line in lines]))",
"def writetipsy(self, outfile=None, hubble=None):\n from . import analysis\n from . import tipsy\n from .analysis import cosmology\n from snapshot import _new as new\n import math\n s = self._base()\n if outfile is None: outfile = s.filename+'.gtp'\n print \"write tipsy file to \", outfile\n sout = new(star=self._nhalos) # create new tipsy snapshot written as halos.\n sout.properties['a'] = s.properties['a']\n sout.properties['z'] = s.properties['z']\n sout.properties['boxsize'] = s.properties['boxsize']\n if hubble is None: hubble = s.properties['h']\n sout.properties['h'] = hubble\n ### ! dangerous -- rho_crit function and unit conversions needs simplifying\n rhocrithhco = cosmology.rho_crit(s, z=0, unit=\"Msol Mpc^-3 h^2\")\n lboxkpc = sout.properties['boxsize'].ratio(\"kpc a\")\n lboxkpch = lboxkpc*sout.properties['h']\n lboxmpch = lboxkpc*sout.properties['h']/1000.\n tipsyvunitkms = lboxmpch * 100. / (math.pi * 8./3.)**.5\n tipsymunitmsun = rhocrithhco * lboxmpch**3 / sout.properties['h']\n\n print \"transforming \", self._nhalos, \" halos into tipsy star particles\"\n for ii in xrange(self._nhalos):\n h = self[ii+1].properties\n sout.star[ii]['mass'] = h['m']/hubble / tipsymunitmsun\n ## tipsy units: box centered at 0. (assume 0<=x<=1)\n sout.star[ii]['x'] = h['pos'][0][0]/lboxmpch - 0.5\n sout.star[ii]['y'] = h['pos'][0][1]/lboxmpch - 0.5\n sout.star[ii]['z'] = h['pos'][0][2]/lboxmpch - 0.5\n sout.star[ii]['vx'] = h['vel'][0][0]/tipsyvunitkms\n sout.star[ii]['vy'] = h['vel'][0][1]/tipsyvunitkms\n sout.star[ii]['vz'] = h['vel'][0][2]/tipsyvunitkms\n sout.star[ii]['eps'] = h['r']/lboxkpch\n sout.star[ii]['metals'] = 0.\n sout.star[ii]['phi'] = 0.\n sout.star[ii]['tform'] = 0.\n print \"writing tipsy outfile %s\"%outfile\n sout.write(fmt=tipsy.TipsySnap, filename=outfile)\n return sout",
"def bmad_linac_phasing_lines(epics):\n lines = [\n '! Linac overall phasing',\n 'O_L1[phase_deg] = 0 ! K21_1 sets this directly. This is a delta on top of that.', \n 'O_L2[phase_deg] = '+str(epics.caget('SIOC:SYS0:ML00:CALC204')),\n 'O_L3[phase_deg] = '+str(epics.caget('SIOC:SYS0:ML00:AO499'))\n ]\n return lines",
"def write(self, tstore, pot_mode = False):\n\n if not isinstance(tstore, UT3Store):\n raise TypeError\n\n if self.__name is not None:\n # check if the file is writable\n if not os.access(os.path.dirname(self.__name), os.W_OK):\n print_error_message(\"PO file not writable\")\n sys.exit( )\n out = file(self.__name, 'w')\n else:\n out = sys.stdout\n\n self._write_header(out, pot_mode)\n\n for ent in tstore.get_all( ):\n out.write('\\n')\n\n if ent.comment is not None:\n out.write('#. %s\\n' % (ent.comment))\n\n sources = ent.get_sources( )\n if len(sources) > 1 or (len(sources) == 1 and not \"\" in sources):\n out.write('#: %s\\n' % (\", \".join(sources)))\n\n if ent.translation is not None and ent.translation != \"\" and ent.status == store.STATUS_UNFINISHED:\n out.write('#, fuzzy\\n')\n\n out.write('msgid \\\"%s\\\"\\n' % (ent.message.replace('\\\"', '\\\\\"')))\n tr = \"\"\n if ent.translation is not None:\n tr = ent.translation.replace('\\\"', '\\\\\"')\n out.write('msgstr \\\"%s\\\"\\n' % (tr.encode(\"utf-8\")))\n\n if not self.__name is None:\n out.close( )\n\n return",
"def export(fileprefix, hedges):\n with open(fileprefix + '.txt', 'w') as f:\n for h in hedges:\n s = \"\"\n for node in h[0]: #each node in the tail\n s += str(node) + \"|\"\n s = s[:-1]\n s += '\\t'\n for node in h[1]: #each node in the head\n s += str(node) + \"|\"\n s = s[:-1]\n s += '\\t'\n s += '1' + '\\n' #assigns weight for the hedge, currently always set to 1\n f.write(s)",
"def writeOut(self):\n # import time\n self.outHeader = self.srcHeader\n for line in self.outHeader:\n self.outFile.write(line + '\\n')\n # now = time.asctime(time.localtime(time.time()))\n # self.outFile.write('%% -- %s -- Written to new alog' % now)\n for time_s in sorted(self.outData):\n for sens in self.outData[time_s]:\n for meas in self.outData[time_s][sens]:\n valu = self.outData[time_s][sens][meas]\n msg_list = [str(time_s), meas, sens, str(valu)]\n line_string = reconstructLine(msg_list)\n self.outFile.write(line_string + '\\n')",
"def write_oligos(sequences, output_base, oligo_pool_name, fwd_primer,\n rev_primer):\n\n # For CustomArray, we just want a list of sequences but include primers\n count_oligo = 0\n with gfile.GFile(os.path.join(output_base, oligo_pool_name), 'w') as f:\n for seq in sequences:\n count_oligo += 1\n f.write(('%s%s%s\\n' % (fwd_primer, seq, rev_primer)).encode('utf-8'))\n print('Wrote %d oligos to a txt file.' % (count_oligo))",
"def exportBulletFile(*argv):",
"def write(self,aFile,lines):\n # Not necessary (comment older than 021 - no idea what does that mean)\n # Maybe meant to be obsoleted by writeLine and writeLog\n self.debug.printHeader()\n for line in lines:\n if not hasattr(line,'upper'): line=self.settings.pathStorage.composeURL(line)\n # Really poor way how differ between string and list\n # Should be rewriten. Lines could contain only array of strings (not array of arrays).\n aFile.write(line)\n aFile.write('\\n')",
"def saveTeachersData():\n with open(\"TeacherData.txt\",\"wb\") as teacherData:\n pickle.dump(teacherEntities,teacherData)",
"def write_equipment_file(self, model, **kwargs):\n output_file = self.output_path + \"/equipment.txt\"\n\n with open(output_file, \"w\") as f:\n\n # Header\n f.write(\"[GENERAL]\\n\")\n current_date = datetime.now().strftime(\"%B %d, %Y at %H:%M:%S\")\n f.write(\"DATE={}\\n\".format(current_date))\n f.write(\"CYME_VERSION=8.02\\n\")\n f.write(\"\\n[SI]\\n\")\n\n # Substations\n #\n if len(self.substations) > 0:\n f.write(\"\\n[SUBSTATION]\\n\")\n f.write(\n \"FORMAT_SUBSTATION=ID,MVA,KVLL,KVLLdesired,R1,X1,R0,X0,R2,X2,PhaseAngle,MVA_1,MVA_2,MVA_3,MVA_4,Conn,PrimaryEquivalentType,SubEqVal1,SubEqVal2,SubEqVal3,SubEqVal4,SubPrimaryLLVoltage,SecondaryFaultReactance,TxfoConnection,HarmonicEnveloppe,BackgroundHarmonicVoltage,BaseMVA,ImpedanceUnit,BranchID_1,PrimProtDevID_1,PrimProtDevNum_1,TransformerID_1,TransformerNum_1,SubXs_1,SecProtDevID_1,SecProtDevNum_1,BranchStatus_1,BranchID_2,PrimProtDevID_2,PrimProtDevNum_2,TransformerID_2,TransformerNum_2,SubXs_2,SecProtDevID_2,SecProtDevNum_2,BranchStatus_2,BranchID_3,PrimProtDevID_3,PrimProtDevNum_3,TransformerID_3,TransformerNum_3,SubXs_3,SecProtDevID_3,SecProtDevNum_3,BranchStatus_3,BranchID_4,PrimProtDevID_4,PrimProtDevNum_4,TransformerID_4,TransformerNum_4,SubXs_4,SecProtDevID_4,SecProtDevNum_4,BranchStatus_4,BranchID_5,PrimProtDevID_5,PrimProtDevNum_5,TransformerID_5,TransformerNum_5,SubXs_5,SecProtDevID_5,SecProtDevNum_5,BranchStatus_5,FailRate,TmpFailRate,MajorRepairTime,\"\n )\n f.write(\n \"MinorRepairTime,MajorFailureProportion,SymbolID,Favorite,Flags,Comments\\n\"\n )\n\n for sub in self.substations:\n if \"sub_ID\" in sub:\n f.write(sub[\"sub_ID\"] + \",\")\n if \"MVA\" in sub:\n f.write(sub[\"MVA\"] + \",\")\n else:\n f.write(\",\")\n if \"KVLL\" in sub:\n # NOTE: Setting the voltage to 1.05pu at the feeder head is raw coded here\n # TODO: Come up with a less dirty way to have 1.05pu at the substation\n f.write(\n \"{a},{b},\".format(\n a=sub[\"KVLL\"], b=float(sub[\"KVLL\"]) * 1.00\n )\n ) # *1.05))\n else:\n f.write(\",,\")\n #\n # TODO: automatically detect if default or real values should be used for source impedance\n #\n if \"R1\" in sub:\n f.write(sub[\"R1\"] + \",\")\n else:\n f.write(\"DEFAULT,\")\n if \"X1\" in sub:\n f.write(sub[\"X1\"] + \",\")\n else:\n f.write(\"DEFAULT,\")\n if \"R0\" in sub:\n f.write(sub[\"R0\"] + \",\")\n else:\n f.write(\"DEFAULT,\")\n if \"X0\" in sub:\n f.write(sub[\"X0\"] + \",\")\n else:\n f.write(\"DEFAULT,\")\n if \"R2\" in sub:\n f.write(sub[\"R2\"] + \",\")\n elif \"R0\" in sub:\n f.write(sub[\"R0\"] + \",\")\n else:\n f.write(\"DEFAULT,\")\n if \"X2\" in sub:\n f.write(sub[\"X2\"] + \",\")\n elif \"X0\" in sub:\n f.write(sub[\"X0\"] + \",\")\n else:\n f.write(\"DEFAULT,\")\n if \"phase_angle\" in sub:\n f.write(sub[\"phase_angle\"] + \",\")\n else:\n f.write(\",\")\n\n f.write(\n \",,,,,,,,,,,,,,,,0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,\"\n )\n f.write(\"\\n\")\n\n # Switches\n #\n # Writing default values for switches\n #\n f.write(\"\\n[SWITCH]\\n\")\n f.write(\n \"FORMAT_SWITCH=ID,Amps,Amps_1,Amps_2,Amps_3,Amps_4,KVLL,Reversible,FailRate,TmpFailRate,MajorRepairTime,MinorRepairTime,MajorFailureProportion,StuckProbability,SwitchTime,SymbolOpenID,SymbolCloseID,SinglePhaseLocking,RemoteControlled,Automated,Comments\\n\"\n )\n f.write(\n \"DEFAULT,100.000000,100.000000,100.000000,100.000000,100.000000,25.000000,0,,,,,,,,0,0,0,0,0,\\n\"\n )\n for ID, data in self.switchcodes.items():\n f.write(str(ID) + \",\")\n f.write(data)\n f.write(\"\\n\")\n\n # Fuses\n #\n # Writing default values for fuses\n #\n f.write(\"\\n[FUSE]\\n\")\n f.write(\n \"FORMAT_FUSE=ID,Amps,Amps_1,Amps_2,Amps_3,Amps_4,KVLL,Reversible,InterruptingRating,FailRate,TmpFailRate,MajorRepairTime,MinorRepairTime,MajorFailureProportion,StuckProbability,SwitchTime,SymbolOpenID,SymbolCloseID,SinglePhaseLocking,Comments,Manufacturer,Model,TCCRating\\n\"\n )\n f.write(\n \"DEFAULT,100.000000,100.000000,100.000000,100.000000,100.000000,25.000000,0,600.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,0,,,,\\n\"\n )\n for ID, data in self.fusecodes.items():\n f.write(str(ID) + \",\")\n f.write(data)\n f.write(\"\\n\")\n\n # Reclosers\n #\n # Writing default values for reclosers\n #\n f.write(\"\\n[RECLOSER]\\n\")\n f.write(\n \"FORMAT_RECLOSER=ID,Amps,Amps_1,Amps_2,Amps_3,Amps_4,KVLL,Reversible,InterruptingRating,FailRate,TmpFailRate,MajorRepairTime,MinorRepairTime,MajorFailureProportion,StuckProbability,SwitchTime,SymbolOpenID,SymbolCloseID,SinglePhaseLocking,SinglePhaseTripping,RemoteControlled,Automated,Comments,RecloserType,ControlType,Model\\n\"\n )\n f.write(\n \"DEFAULT,100.000000,100.000000,100.000000,100.000000,100.000000,25.000000,0,600.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,0,0,0,0,,1,,\\n\"\n )\n for ID, data in self.reclosercodes.items():\n f.write(str(ID) + \",\")\n f.write(data)\n f.write(\"\\n\")\n\n # Breakers\n #\n # Writing default values for breakers\n #\n f.write(\"\\n[BREAKER]\\n\")\n f.write(\n \"FORMAT_BREAKER=ID,Amps,Amps_1,Amps_2,Amps_3,Amps_4,KVLL,Reversible,InterruptingRating,FailRate,TmpFailRate,MajorRepairTime,MinorRepairTime,MajorFailureProportion,StuckProbability,SwitchTime,SymbolOpenID,SymbolCloseID,SinglePhaseLocking,SinglePhaseTripping,RemoteControlled,Automated,Comments\\n\"\n )\n f.write(\n \"DEFAULT,100.000000,100.000000,100.000000,100.000000,100.000000,25.000000,0,600.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,0,0,0,0,\\n\"\n )\n for ID, data in self.breakercodes.items():\n f.write(str(ID) + \",\")\n f.write(data)\n f.write(\"\\n\")\n\n # Cables\n #\n f.write(\"\\n[CABLE]\\n\")\n f.write(\n \"FORMAT_CABLE=ID,R1,R0,X1,X0,B1,B0,Amps,CableType,UserDefinedImpedances,Frequency,Temperature\\n\"\n )\n f.write(\n \"DEFAULT,0.040399,0.055400,0.035900,0.018200,0.000000,0.000000,447.000000,0,1,60.000000,25.000000\\n\"\n )\n for ID, data in self.cablecodes.items():\n f.write(str(ID))\n for key in [\"R1\", \"R0\", \"X1\", \"X0\", \"B1\", \"B0\", \"amps\", \"cabletype\"]:\n if key in data:\n f.write(\",\" + str(data[key]))\n else:\n f.write(\",\")\n f.write(\",1,60.0000,25.00000\\n\")\n\n # Lines\n #\n if len(self.linecodes_overhead) > 0:\n f.write(\"\\n[LINE UNBALANCED]\\n\")\n f.write(\n \"FORMAT_LINEUNBALANCED=ID,Ra,Rb,Rc,Xa,Xb,Xc,Ba,Bb,Bc,MutualResistanceAB,MutualResistanceBC,MutualResistanceCA,MutualReactanceAB,MutualReactanceBC,MutualReactanceCA,MutualShuntSusceptanceAB,MutualShuntSusceptanceBC,MutualShuntSusceptanceCA,CondID_A,CondID_B,CondID_C,CondID_N1,CondID_N2,SpacingID,AmpsA,AmpsB,AmpsC,UserDefinedImpedances,Transposed\\n\"\n )\n\n for ID, data in self.linecodes_overhead.items():\n f.write(str(ID))\n for key in [\n \"RA\",\n \"RB\",\n \"RC\",\n \"XA\",\n \"XB\",\n \"XC\",\n \"Ba\",\n \"Bb\",\n \"Bc\",\n \"MutualResistanceAB\",\n \"MutualResistanceBC\",\n \"MutualResistanceCA\",\n \"MutualReactanceAB\",\n \"MutualReactanceBC\",\n \"MutualReactanceCA\",\n \"MutualShuntSusceptanceAB\",\n \"MutualShuntSusceptanceBC\",\n \"MutualShuntSusceptanceCA\",\n \"CondID_A\",\n \"CondID_B\",\n \"CondID_C\",\n \"CondID_N1\",\n \"CondID_N2\",\n \"SpacingID\",\n \"AmpsA\",\n \"AmpsB\",\n \"AmpsC\",\n \"UserDefinedImpedances\",\n ]:\n if key in data:\n f.write(\",\" + str(data[key]))\n else:\n if key in [\n \"CondID_A\",\n \"CondID_B\",\n \"CondID_C\",\n \"CondID_N1\",\n \"CondID_N2\",\n \"SpacingID\",\n ]:\n f.write(\"NONE,\")\n else:\n f.write(\",0\")\n f.write(\",0\\n\")\n\n # Conductors\n #\n f.write(\"\\n[CONDUCTOR]\\n\")\n f.write(\"FORMAT_CONDUCTOR=ID,Diameter,GMR,R25,Amps,WithstandRating\\n\")\n f.write(\"DEFAULT,1.000001,1.000001,0.7,2000.000000,2000.000000\\n\")\n if len(self.conductors) > 0:\n for ID, data in self.conductors.items():\n if ID == \"DEFAULT\":\n continue\n f.write(ID)\n f.write(data)\n f.write(\"\\n\")\n\n # Spacing table\n #\n f.write(\"\\n[SPACING TABLE FOR LINE]\\n\")\n f.write(\n \"FORMAT_SPACINGTABLEFORLINE=ID,GMDPh-Ph,GMDPh-N,AvgPhCondHeight,AvgNeutralHeight,PosOfCond1_X,PosOfCond1_Y,PosOfCond2_X,PosOfCond2_Y,PosOfCond3_X,PosOfCond3_Y,PosOfNeutralCond_X,PosOfNeutralCond_Y,PosOfNeutralCond_N2_X,PosOfNeutralCond_N2_Y,BundleDistance,NBPhasesPerCircuit,NBConductorsPerPhase,NBNeutrals,TowerType,DistanceA,DistanceB,DistanceC,DistanceD,DistanceE,ConductorStatusN1,ConductorStatusN2,FootingResistanceN1,FootingResistanceN2,TowerSpanN1,TowerSpanN2,Favorite,Flags,Comments\\n\"\n )\n f.write(\n \"DEFAULT,,,,,-0.609600,10.058400,0.000000,8.839200,0.609600,10.058400,0.000000,11.277600,,,0.010000,3,1,1,0,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,1.000000,1.000000,300.000000,300.000000,0,0,\\n\"\n )\n\n f.write(\n \"N_ABOVE_1PH,,,,,0.000000,9.601200,,,,,0.000000,10.363200,,,0.010000,1,1,1,0,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,1.000000,1.000000,300.000000,300.000000,0,0,\\n\"\n )\n f.write(\n \"N_ABOVE_2PH,,,,,-1.127760,9.601200,1.127760,9.601200,,,0.000000,10.363200,,,0.010000,2,1,1,0,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,1.000000,1.000000,300.000000,300.000000,0,0,\\n\"\n )\n f.write(\n \"N_ABOVE_3PH,,,,,-1.127760,9.601200,0.000000,9.601200,1.127760,9.601200,0.000000,10.363200,,,0.010000,3,1,1,0,0.000000,0.000000,0.000000,0.000000,0.000000,0,0,1.000000,1.000000,300.000000,300.000000,0,0,\\n\"\n )\n\n # TODO\n # Add the user-defined spacing tables here\n\n # Capacitors\n #\n if len(self.capcodes) > 0:\n f.write(\"\\n[SHUNT CAPACITOR]\\n\")\n f.write(\n \"FORMAT_SHUNTCAPACITOR=ID,KVAR,KV,CostForFixedBank,CostForSwitchedBank,Type\\n\"\n )\n\n for ID, data in self.capcodes.items():\n f.write(\"capacitor_\" + str(ID) + \",\")\n f.write(data.strip(\",\"))\n f.write(\",0,0,0\")\n f.write(\"\\n\")\n\n # Two winding transformers\n #\n if len(self.two_windings_trans_codes) > 0:\n f.write(\"\\n[TRANSFORMER]\\n\")\n f.write(\n \"FORMAT_TRANSFORMER=ID,Type,KVA,VoltageUnit,KVLLprim,KVLLsec,Z1,Z0,XR,XR0,Conn,WindingType,NoLoadLosses,PhaseShift,IsLTC\\n\"\n )\n\n for ID, data in self.two_windings_trans_codes.items():\n f.write(\"transformer_\" + str(ID) + \",\")\n f.write(data.strip(\",\"))\n f.write(\"\\n\")\n\n # Three winding transformers\n #\n if len(self.three_windings_trans_codes) > 0:\n f.write(\"\\n[THREE WINDING TRANSFORMER]\\n\")\n f.write(\n \"FORMAT_THREEWINDINGTRANSFORMER=ID,PrimaryRatedCapacity,PrimaryVoltage,PrimaryConnection,PrimaryToSecondaryZ1,PrimaryToSecondaryZ0,PrimaryToSecondaryXR1,PrimaryToSecondaryXR0,PrimaryToTertiaryZ1,PrimaryToTertiaryZ0,PrimaryToTertiaryXR1,PrimaryToTertiaryXR0,SecondaryToTertiaryZ1,SecondaryToTertiaryZ0,SecondaryToTertiaryXR1,SecondaryToTertiaryXR0,SecondaryCapacityLimit1,SecondaryCapacityLimit2,TertiaryCapacityLimit1,TertiaryCapacityLimit2,TertiaryConnection,NoLoadLosses\\n\"\n )\n for ID, data in self.three_windings_trans_codes.items():\n f.write(\"3_wdg_transformer_\" + str(ID) + \",\")\n f.write(data.strip(\",\"))\n f.write(\"\\n\")\n\n # Regulators\n #\n if len(self.reg_codes) > 0:\n f.write(\"\\n[REGULATOR]\\n\")\n f.write(\n \"FORMAT_REGULATOR=ID,KVA,Bandwidth,CT,PT,Type,KVLN,MaxBuck,MaxBoost,Taps,Reversible\\n\"\n )\n\n for ID, data in self.reg_codes.items():\n f.write(\"regulator_\" + str(ID) + \",\")\n f.write(data.strip(\",\"))\n f.write(\"\\n\")\n\n if len(self.irradiance_profiles) > 0:\n f.write(\"\\n[INSOLATION MODEL] \\n\")\n f.write(\"FORMAT_INSOLATIONMODEL=ID,FromFile,FileName\\n\")\n for i in self.irradiance_profiles:\n f.write(\n \"{label},1,{loc}\".format(\n label=i, loc=self.irradiance_profiles[i]\n )\n )\n f.write(\"\\n\")\n\n if len(self.bess_codes) > 0:\n f.write(\"\\n[BESS] \\n\")\n f.write(\n \"FORMAT_BESS=ID,RatedStorageEnergy,MaxChargingPower,MaxDischargingPower,ChargeEfficiency,DischargeEfficiency\\n\"\n )\n for value in self.bess_codes:\n f.write(self.bess_codes[value] + \",\" + value + \"\\n\")\n f.write(\"\\n\")",
"def writeDomainFile():\n writeTemplate(localTemplate)",
"def writeText2File(loginfo, mfile):\n with open(mfile,'a') as f:\n f.writelines('%s\\n' %loginfo)\n f.close()",
"def write_to_file(self, filename):\n self.octree.write(str.encode(filename))\n print(\"Save octomap to \"+filename)",
"def writeAlltoFile(self):\n with open(self._fname, 'w') as f:\n for elem in self.getAll():\n line = self._writeGratoLine(elem)\n f.write(line + \"\\n\")\n f.close()",
"def export_offsets(task, tensor_offsets, export_path, alpha):\n f = open(export_path + \"offsets_\" + str(task.pid) + \"_\" + str(int(alpha*100)) + \".txt\", 'w')\n f.write(str(len(tensor_offsets)) + \"\\n\")\n for name in tensor_offsets:\n f.write(name + \" \" + str(hex(tensor_offsets[name])) + \"\\n\")\n f.close()",
"def write_polling_location_txt(self):\n\n plt = self.build_polling_location_txt()\n\n\n # Drop base_df columns.\n plt.drop(['index', 'office_name', 'official_name', 'phone', 'email', 'ocd_division', 'phone', 'location_name',\n 'address1', 'address2', 'city', 'state', 'zip_code', 'start_time', 'end_time', 'start_date',\n 'end_date', 'is_only_by_appointment', 'is_or_by_appointment', 'directions', 'notes'],\n inplace=True, axis=1)\n\n plt = self.dedupe(plt)\n plt = plt[plt.address_line.notnull()]\n\n plt.to_csv(config.output + 'polling_location.txt', index=False, encoding='utf-8') # send to txt file\n plt.to_csv(config.output + 'polling_location.csv', index=False, encoding='utf-8') # send to csv file",
"async def writelines(self, lines):\n # first check if the file is binary or not\n if 'b' in self._mode:\n raise APIException(\n \"writelines on a binary file is not permitted: {}\".format(\n self._uri)\n )\n # write all but the last line with a line break\n for l in lines:\n await self.write((l+\"\\n\").encode('utf-8'))\n return True",
"def _dta_obj_to_file(self, address):\n global get_missing\n \n type_dict = {\n 251: ['b',1],\n 252: ['h',2], \n 253: ['l',4],\n 254: ['f',4],\n 255: ['d',8]\n }\n first_missing = {\n 251: 101,\n 252: 32741,\n 253: 2147483620, \n 254: float.fromhex('0x1.0p+127'),\n 255: float.fromhex('0x1.0p+1023')\n }\n typlist = self._typlist\n nvar = self._nvar\n \n missing_save_val = self._missing_save_val\n \n def write_value_label_table(labname, table):\n # Stata limits are a bit confusing. Total length of text \n # (including null terminators) must be <= 32000? Total \n # number of vals must be <= 65536? But the limit on text \n # length forces no. of vals <= 16000 since each label must \n # occupy at least two bytes (including null terminator).\n \n labname = labname[:32]\n \n val = sorted(table.keys())\n # each value may be up to 81 chars including null\n txt = [table[v][:80] for v in val] \n \n nval = len(val)\n if nval > 65536: # max number of values allowed\n val = val[:65536]\n txt = txt[:65536]\n nval = 65536\n \n off = [0]\n for i in range(nval - 1):\n # in next line, \"+ 1\" to leave room for \\0\n offset = off[i] + len(txt[i]) + 1\n if offset > 32000: # if too much text\n off = off[:i] # cut off at before the ith one\n val = val[:i]\n txt = txt[:i]\n nval = i\n break\n off.append(offset)\n txt_len = off[-1] + len(txt[-1]) + 1\n \n table_len = 4 + 4 + 4*nval + 4*nval + txt_len\n \n dta.write(pack(byteorder + \"l\", table_len))\n dta.write(bytearray(labname, 'iso-8859-1') +\n b'\\0'*(33-len(labname)))\n dta.write(b'\\x00\\x00\\x00')\n \n dta.write(pack(byteorder + \"l\", nval))\n dta.write(pack(byteorder + \"l\", txt_len))\n for o in off: dta.write(pack(byteorder + \"l\", o))\n for v in val: dta.write(pack(byteorder + \"l\", v))\n #for t in txt: write_byte_str((t,), len(t) + 1)\n for t in txt: dta.write(bytearray(t, 'iso-8859-1') + b'\\0')\n \n with open(address, 'wb') as dta:\n # header\n dta.write(pack('b', 115)) # ds_format\n byteorder = self._byteorder\n dta.write(pack('b', 1 if byteorder == '>' else 2)) # byteorder\n dta.write(pack('b', 1)) # filetype\n dta.write(pack('b', 0)) # padding\n dta.write(pack(byteorder + 'h', self._nvar))\n dta.write(pack(byteorder + 'i', self._nobs))\n data_label = self._data_label[:80]\n dta.write(bytearray(data_label, 'iso-8859-1') +\n b'\\0'*(81-len(data_label)))\n self._set_timestamp() # new time_stamp\n time_stamp = self._time_stamp[:17]\n dta.write(bytearray(time_stamp, 'iso-8859-1') +\n b'\\0'*(18-len(time_stamp)))\n \n # descriptors\n dta.write(bytes(self._typlist))\n for name in self._varlist:\n name = name[:32]\n dta.write(bytearray(name, 'iso-8859-1') + b'\\0'*(33-len(name)))\n # In srtlist, Nones are replaced with zeroes and \n # a terminating zero is appended (the file needs \n # nvar + 1 ints including terminating zero).\n srtlist = self._srtlist + [None]\n srtlist = [srt + 1 if srt is not None else 0 for srt in srtlist]\n dta.write(pack(byteorder + 'h'*(nvar + 1), *srtlist))\n for fmt in self._fmtlist:\n fmt = fmt[:48]\n dta.write(bytearray(fmt, 'iso-8859-1') + b'\\0'*(49-len(fmt)))\n for lab in self._lbllist:\n lab = lab[:32]\n dta.write(bytearray(lab, 'iso-8859-1') + b'\\0'*(33-len(lab)))\n \n # variable labels\n for lab in self._vlblist:\n lab = lab[:80]\n dta.write(bytearray(lab, 'iso-8859-1') + b'\\0'*(81-len(lab)))\n \n # characteristics\n chrdict = self._chrdict\n for varname in chrdict:\n varname = varname[:32]\n vardict = chrdict[varname]\n for charname in vardict:\n charname = charname[:32]\n char = vardict[charname][:67784] # or 8681 for Small Stata\n data_len = 66 + len(char) + 1 # +1 for null termination\n dta.write(b'\\x01') # data_type\n dta.write(pack(byteorder + 'i', data_len))\n dta.write(bytearray(varname, 'iso-8859-1') + \n b'\\0'*(33 - len(varname)))\n dta.write(bytearray(charname, 'iso-8859-1') + \n b'\\0'*(33 - len(charname)))\n dta.write(bytearray(char, 'iso-8859-1') + b'\\0')\n dta.write(b'\\x00\\x00\\x00\\x00\\x00')\n \n # data\n for row in self._varvals:\n for value, st_type in zip(row, typlist):\n if st_type <= 244:\n dta.write(bytearray(value, 'iso-8859-1') + \n b'\\0'*(st_type - len(value)))\n else:\n fmt, nbytes = type_dict[st_type]\n # Get correct dta value if missing. As a safety, check\n # for non-standard missing (None and large values).\n if value is None:\n value = first_missing[st_type]\n elif isinstance(value, MissingValue):\n value = missing_save_val(value, st_type)\n elif (value > 8.988465674311579e+307 or \n value < -1.7976931348623157e+308):\n # is this the right way to handle this ?\n value = missing_save_val(\n get_missing(value), st_type) \n dta.write(pack(byteorder + fmt, value))\n \n # value labels\n value_labels = self._vallabs\n for labname in value_labels.keys():\n write_value_label_table(labname, value_labels[labname])",
"def write_triplets(triplets, tool_names, out_fname):\n output = open(out_fname, 'w')\n output.write(json.dumps({'tool_names': tool_names, 'triplets': triplets}))\n output.close()",
"def _file_writer(self, lines, filename):\n if self.MockRun:\n return\n\n if self.Verbose:\n print \"Writing file %s\" % filename\n\n updated_file = open(filename, 'w')\n updated_file.write(''.join(lines))\n updated_file.close()",
"def write_to_file(info: List[str]) -> None:\n return",
"def _dta_obj_to_file(self, address):\n global get_missing\n \n type_dict = {\n 65530: ['b',1],\n 65529: ['h',2],\n 65528: ['l',4], \n 65527: ['f',4],\n 65526: ['d',8]\n }\n first_missing = {\n 65530: 101,\n 65529: 32741,\n 65528: 2147483620,\n 65527: float.fromhex('0x1.0p+127'),\n 65526: float.fromhex('0x1.0p+1023')\n }\n typlist = self._typlist\n byteorder = self._byteorder\n nvar = self._nvar\n \n def write_value_label_table(labname, table):\n # Stata limits are a bit confusing.\n # Total length of text (incl. null terminators) must be <= 32000 ?\n # Total number of vals must be <= 65536 ?\n # But the limit on text length forces no. of vals <= 16000 since\n # each label must occupy at least two bytes \n # (including null terminator).\n labname = labname[:32]\n \n val = sorted(table.keys())\n # each value may be up to 81 chars including null\n txt = [table[v][:80] for v in val] \n \n nval = len(val)\n if nval > 65536: # max number of values allowed\n val = val[:65536]\n txt = txt[:65536]\n nval = 65536\n \n off = [0]\n for i in range(nval - 1):\n # in next line, \"+ 1\" to leave room for \\0\n offset = off[i] + len(txt[i]) + 1\n if offset > 32000: # if too much text\n off = off[:i] # cut off at before the ith one\n val = val[:i]\n txt = txt[:i]\n nval = i\n break\n off.append(offset)\n txt_len = off[-1] + len(txt[-1]) + 1\n \n table_len = 4 + 4 + 4*nval + 4*nval + txt_len\n \n dta.write(bytearray('<lbl>', 'iso-8859-1'))\n dta.write(pack(byteorder + \"l\", table_len))\n dta.write(bytearray(labname, 'iso-8859-1') + \n b'\\0'*(33-len(labname)))\n dta.write(b'\\x00\\x00\\x00')\n \n dta.write(pack(byteorder + \"l\", nval))\n dta.write(pack(byteorder + \"l\", txt_len))\n for o in off: dta.write(pack(byteorder + \"l\", o))\n for v in val: dta.write(pack(byteorder + \"l\", v))\n for t in txt: dta.write(bytearray(t, 'iso-8859-1') + b'\\0')\n dta.write(bytearray('</lbl>', 'iso-8859-1'))\n \n with open(address, 'wb') as dta:\n dta.write(bytearray('<stata_dta>', 'iso-8859-1'))\n \n # header\n dta.write(bytearray('<header>', 'iso-8859-1'))\n dta.write(bytearray('<release>', 'iso-8859-1'))\n dta.write(bytearray('117', 'iso-8859-1'))\n dta.write(bytearray('</release>', 'iso-8859-1'))\n dta.write(bytearray('<byteorder>', 'iso-8859-1'))\n dta.write(\n bytearray('MSF' if byteorder == '>' else 'LSF', 'iso-8859-1'))\n dta.write(bytearray('</byteorder>', 'iso-8859-1'))\n dta.write(bytearray('<K>', 'iso-8859-1'))\n dta.write(pack(byteorder + 'H', self._nvar))\n dta.write(bytearray('</K>', 'iso-8859-1'))\n dta.write(bytearray('<N>', 'iso-8859-1'))\n dta.write(pack(byteorder + 'I', self._nobs))\n dta.write(bytearray('</N>', 'iso-8859-1'))\n dta.write(bytearray('<label>', 'iso-8859-1'))\n label = self._data_label\n label_length = len(label)\n dta.write(pack(byteorder + 'B', label_length))\n dta.write(bytearray(label, 'iso-8859-1'))\n dta.write(bytearray('</label>', 'iso-8859-1'))\n dta.write(bytearray('<timestamp>', 'iso-8859-1'))\n stamp = self._time_stamp\n m = re.match(\n '^([ 0-3][0-9]) ' + \n '(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ' + \n '[0-9]{4} ([ 0-2][0-9]):([0-9]{2})$', \n stamp)\n if (m and \n 1 <= int(m.group(1)) <= 31 and \n 0 <= int(m.group(3)) <= 24 and\n 0 <= int(m.group(4)) < 60):\n dta.write(pack(byteorder + 'B', 17))\n # next line includes optional binary zero\n dta.write(bytearray(stamp, 'iso-8859-1'))\n else: # there's something wrong with the time stamp, just skip it\n dta.write(pack(byteorder + 'B', 0))\n dta.write(bytearray('</timestamp>', 'iso-8859-1'))\n dta.write(bytearray('</header>', 'iso-8859-1'))\n \n # map\n offset_map = [0, dta.tell()]\n dta.write(bytearray(\"<map>\", 'iso-8859-1'))\n for i in range(14):\n dta.write(pack(byteorder + 'Q', 0))\n dta.write(bytearray(\"</map>\", \"iso-8859-1\"))\n \n # variable types\n offset_map.append(dta.tell())\n dta.write(bytearray(\"<variable_types>\", 'iso-8859-1'))\n dta.write(pack(byteorder + 'H'*nvar, *typlist))\n dta.write(bytearray(\"</variable_types>\", 'iso-8859-1'))\n \n # variable names\n offset_map.append(dta.tell())\n dta.write(bytearray(\"<varnames>\", 'iso-8859-1'))\n for name in self._varlist:\n name = name[:32]\n dta.write(bytearray(name, 'iso-8859-1') + b'\\0'*(33-len(name)))\n dta.write(bytearray(\"</varnames>\", 'iso-8859-1'))\n \n # sort order\n offset_map.append(dta.tell())\n dta.write(bytearray(\"<sortlist>\", 'iso-8859-1'))\n srtlist = self._srtlist + [None]\n srtlist = [srt + 1 if srt is not None else 0 for srt in srtlist]\n dta.write(pack(byteorder + 'H'*(nvar + 1), *srtlist))\n dta.write(bytearray(\"</sortlist>\", 'iso-8859-1'))\n \n # formats\n offset_map.append(dta.tell())\n dta.write(bytearray(\"<formats>\", 'iso-8859-1'))\n for fmt in self._fmtlist:\n fmt = fmt[:48]\n dta.write(bytearray(fmt, 'iso-8859-1') + b'\\0'*(49-len(fmt)))\n dta.write(bytearray(\"</formats>\", 'iso-8859-1'))\n \n # value-label names\n offset_map.append(dta.tell())\n dta.write(bytearray(\"<value_label_names>\", 'iso-8859-1'))\n for lab in self._lbllist:\n lab = lab[:32]\n dta.write(bytearray(lab, 'iso-8859-1') + b'\\0'*(33-len(lab)))\n dta.write(bytearray(\"</value_label_names>\", 'iso-8859-1'))\n \n # variable labels\n offset_map.append(dta.tell())\n dta.write(bytearray(\"<variable_labels>\", 'iso-8859-1'))\n for lab in self._vlblist:\n lab = lab[:80]\n dta.write(bytearray(lab, 'iso-8859-1') + b'\\0'*(81-len(lab)))\n dta.write(bytearray(\"</variable_labels>\", 'iso-8859-1'))\n \n # characteristics\n offset_map.append(dta.tell())\n dta.write(bytearray(\"<characteristics>\", 'iso-8859-1'))\n chrdict = self._chrdict\n for varname in chrdict:\n varname = varname[:32]\n var_dict = chrdict[varname]\n for charname in var_dict:\n charname = charname[:32]\n char = var_dict[charname][:67784] # or 8681 for Small Stata\n full_length = 66 + len(char) + 1 # +1 for null termination\n \n dta.write(bytearray('<ch>', 'iso-8859-1'))\n dta.write(pack(byteorder + 'I', full_length))\n dta.write(bytearray(varname, 'iso-8859-1') + \n b'\\0'*(33-len(varname)))\n dta.write(bytearray(charname, 'iso-8859-1') + \n b'\\0'*(33-len(charname)))\n dta.write(bytearray(char, 'iso-8859-1') + b'\\0')\n dta.write(bytearray('</ch>', 'iso-8859-1'))\n dta.write(bytearray(\"</characteristics>\", 'iso-8859-1'))\n \n # data\n offset_map.append(dta.tell())\n strls = {}\n dta.write(bytearray(\"<data>\", 'iso-8859-1'))\n varvals = self._varvals\n nvar, nobs = self._nvar, self._nobs\n missing_save_val = self._missing_save_val\n for i in range(nobs):\n row = varvals[i]\n for j in range(nvar):\n value, st_type = row[j], typlist[j]\n if st_type <= 2045:\n value = value[:st_type]\n dta.write(bytearray(value, 'iso-8859-1') + \n b'\\0'*(st_type - len(value)))\n elif st_type == 32768:\n if value == \"\":\n o,v = 0,0\n elif value in strls:\n o,v = strls[value]\n else:\n strls[value] = o,v = (i+1,j+1)\n dta.write(pack(byteorder + 'II', v, o))\n else:\n fmt = 'bhlfd'[65530 - st_type]\n if value is None:\n value = first_missing[st_type]\n elif isinstance(value, MissingValue):\n value = missing_save_val(value, st_type)\n elif (value > 8.988465674311579e+307 or \n value < -1.7976931348623157e+308):\n # is this the right way to handle this ?\n value = missing_save_val(\n get_missing(value), st_type)\n dta.write(pack(byteorder + fmt, value))\n dta.write(bytearray(\"</data>\", 'iso-8859-1'))\n \n # strls\n offset_map.append(dta.tell())\n strls = [(val, key) for key,val in strls.items()]\n strls.sort()\n dta.write(bytearray(\"<strls>\", 'iso-8859-1'))\n for (o,v), value in strls:\n dta.write(bytearray('GSO', 'iso-8859-1'))\n dta.write(pack(byteorder + 'II', v, o))\n if isinstance(value, str):\n try:\n # expect error in next line if anywhere\n value = bytes(value, 'iso-8859-1') + b'\\x00'\n dta.write(pack('B', 130))\n except UnicodeEncodeError:\n value = bytes(value, 'utf-8')\n dta.write(pack('B', 129))\n elif (not isinstance(value, bytes) and \n not isinstance(value, bytearray)):\n msg = \"only bytes or str object allowed in Stata strl\"\n raise TypeError(msg)\n else:\n dta.write(pack('B', 129))\n val_len = len(value)\n dta.write(pack(byteorder + 'I', val_len))\n num_vals = unpack(str(val_len) + 'b', value)\n dta.write(value)\n dta.write(bytearray(\"</strls>\", 'iso-8859-1'))\n \n # value labels\n offset_map.append(dta.tell())\n dta.write(bytearray(\"<value_labels>\", 'iso-8859-1'))\n for name, table in self._vallabs.items():\n write_value_label_table(name, table)\n dta.write(bytearray(\"</value_labels>\", 'iso-8859-1'))\n \n # end file\n offset_map.append(dta.tell())\n dta.write(bytearray(\"</stata_dta>\", 'iso-8859-1'))\n \n offset_map.append(dta.tell())\n \n # write map\n dta.seek(offset_map[1] + 5)\n for offset in offset_map:\n dta.write(pack(byteorder + 'Q', offset))",
"def to_file(self, records):\n self._file_manager.make_dir_when_no_dir(self._directory)\n file = os.path.join(self._directory, self._file_name + '.txt')\n record_lines = [rec.to_string() + \"\\n\" for rec in records]\n self._file_manager.write_lines(file, record_lines)",
"def write_task(task):\n \n logfile = open(TASKS_ORG_FILE, 'a')\n\n str = \"* TODO %s\\n:PROPERTIES:\\n:guid: %s\\n:END:\\n%s\\n\" % (task.title, task.guid, task.description)\n logfile.write(str)\n\n logfile.close()"
] | [
"0.5752286",
"0.5749262",
"0.5700694",
"0.5613532",
"0.5605334",
"0.556102",
"0.54216707",
"0.5421532",
"0.5405027",
"0.53978723",
"0.53960645",
"0.53954494",
"0.5390986",
"0.5312852",
"0.5277949",
"0.5253673",
"0.5245013",
"0.5244172",
"0.52176124",
"0.5176581",
"0.5172888",
"0.51666254",
"0.5165132",
"0.5162644",
"0.51584023",
"0.5151599",
"0.51265514",
"0.51262856",
"0.5120784",
"0.5108108",
"0.51072866",
"0.50998104",
"0.509913",
"0.5087338",
"0.508492",
"0.5071456",
"0.5061368",
"0.5043643",
"0.5038993",
"0.5033225",
"0.50232047",
"0.50115556",
"0.5011327",
"0.500993",
"0.50073296",
"0.5002173",
"0.5001996",
"0.4999761",
"0.49989676",
"0.4995154",
"0.49945182",
"0.49929976",
"0.49918956",
"0.4989374",
"0.4975055",
"0.49626598",
"0.495749",
"0.49570557",
"0.4953736",
"0.49519038",
"0.4921331",
"0.4920116",
"0.49169052",
"0.4894843",
"0.48854145",
"0.48816866",
"0.48783538",
"0.48759133",
"0.48748544",
"0.4860401",
"0.4854951",
"0.48424366",
"0.48334843",
"0.48334053",
"0.48298147",
"0.48262322",
"0.4821975",
"0.48201415",
"0.48189703",
"0.48161954",
"0.48107147",
"0.4809465",
"0.48053133",
"0.4804961",
"0.47997513",
"0.47935224",
"0.4785696",
"0.47803706",
"0.47794458",
"0.47766256",
"0.47729674",
"0.4770625",
"0.4770145",
"0.4763459",
"0.47621456",
"0.47616792",
"0.47542578",
"0.47519383",
"0.4748471",
"0.47475377"
] | 0.7485731 | 0 |
Create Bmadstyle settings from a CSV mapping file, and an epics interface. | def bmad_from_csv(csvfile, epics, outfile=None):
df = pandas.read_csv(csvfile)
pvlist = list(df['device_name'] +':'+ df['attribute'].str.strip())
# Get values
df['value'] = epics.caget_many(pvlist)
# Form lines
lines = df['bmad_ele_name']+'[' + df['bmad_attribute'] + '] = '+ (df['bmad_factor'].astype(str)+'*'+df['value'].astype(str))
if outfile:
with open(outfile, 'w') as f:
for line in lines:
f.write(line+'\n')
print('Written:', outfile)
return list(lines) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_mappings_from_file(filename, organization, user, import_file_id=None):\n\n mappings = []\n if os.path.isfile(filename):\n with open(filename, 'rU') as csvfile:\n for row in csv.reader(csvfile):\n data = {\n \"from_field\": row[0],\n \"to_table_name\": row[1],\n \"to_field\": row[2],\n # \"to_display_name\": row[3],\n # \"to_data_type\": row[4],\n # \"to_unit_type\": row[5],\n }\n mappings.append(data)\n else:\n raise Exception(\"Mapping file does not exist: {}\".format(filename))\n\n if len(mappings) == 0:\n raise Exception(\"No mappings in file: {}\".format(filename))\n else:\n return Column.create_mappings(mappings, organization, user, import_file_id)",
"def buildFromCSV(self, filepath):\r\n\t\t# TODO: Implement\r\n\t\traise NotImplementedError('This function has not yet been implemented.')\r\n\t\t# with open(filepath, 'r') as scheduleFile:\r\n\t\t# \t# Reusing Parser.parseCSVs(), but not in the intended way; ok because validation is not yet implemented\r\n\t\t# \t# TODO: Split Parser.parseCSVs() into separate people/set file parsers \r\n\t\t# \tn, people, setConstraints = Parser.parseCSVs(-1, scheduleFile, [])\r",
"def from_csv(self, user, row):\n if len(row) < 4:\n raise BadRequest(_(\"Invalid line\"))\n self.name = row[1].strip().lower()\n for model in [DomainAlias, Domain]:\n if model.objects.filter(name=self.name).exists():\n raise Conflict\n domname = row[2].strip()\n try:\n self.target = Domain.objects.get(name=domname)\n except Domain.DoesNotExist:\n raise BadRequest(_(\"Unknown domain %s\") % domname)\n core_signals.can_create_object.send(\n sender=\"import\", context=self.target, object_type=\"domain_aliases\")\n self.enabled = row[3].strip().lower() in [\"true\", \"1\", \"yes\", \"y\"]\n self.save(creator=user)",
"def read_setup(inifile):\n # inifile = os.path.join(spathy_path, inifile)\n print(inifile)\n cfg = configparser.ConfigParser()\n cfg.read(inifile)\n\n pp = {}\n for s in cfg.sections():\n section = s.encode('ascii', 'ignore')\n pp[section] = {}\n for k, v in cfg.items(section):\n key = k.encode('ascii', 'ignore')\n val = v.encode('ascii', 'ignore')\n if section == 'General': # 'general' section\n pp[section][key] = val\n else:\n pp[section][key] = float(val)\n pp['General']['dt'] = float(pp['General']['dt'])\n\n pgen = pp['General']\n pcpy = pp['CanopyGrid']\n pbu = pp['BucketGrid']\n ptop = pp['Topmodel']\n\n return pgen, pcpy, pbu, ptop",
"def read_settings(self, settings_file):\n import configparser\n import itertools\n cfg = configparser.ConfigParser()\n filename = settings_file\n with open(filename) as fp:\n cfg.read_file(itertools.chain(['[global]'], fp), source=filename)\n d = {}\n for c in cfg.items('global'):\n try:\n d[c[0]] = eval(c[1].strip(';'))\n except Exception:\n pass\n self.distance = d['parameters.distance']\n self.a, self.b, self.c, alpha, beta, gamma = d['parameters.unitcell']\n self.alpha, self.beta, self.gamma = (\n alpha*degrees, beta*degrees, gamma*degrees)\n ubmat = np.matrix(d['parameters.ubmat'])\n self.Umat = ubmat * self.Bimat\n self.xc = d['parameters.det0x']\n self.yc = d['parameters.det0y']\n self.pitch = d['parameters.orienterrordetpitch'] * degrees\n self.roll = d['parameters.orienterrordetroll'] * degrees\n self.yaw = d['parameters.orienterrordetyaw'] * degrees\n self.theta = d['parameters.orienterrorgonpitch'] * degrees\n self.omega = d['parameters.omeganom'] * degrees\n self.chi = d['parameters.chinom'] * degrees\n self.phi = d['parameters.phinom'] * degrees\n self.phi_step = d['parameters.phistep'] * degrees\n self.h_start, self.k_start, self.l_start = d['parameters.gridorigin']\n self.h_stop, self.k_stop, self.l_stop = [-v\n for v in d\n ['parameters.gridorigin']]\n hs, ks, ls = d['parameters.griddim']\n self.h_step, self.k_step, self.l_step = [1.0/hs, 1.0/ks, 1.0/ls]\n self.h_shape, self.k_shape, self.l_shape = d['outputdata.dimensions']",
"def config():\n file_path = None # path to the input file\n db_path = None # path to the output db\n atomic_properties = (\n \"Properties=species:S:1:pos:R:3\"\n ) # atomic properties of the input file\n molecular_properties = [\"energy\"] # molecular properties of the input file\n overwrite = False",
"def create_mapping_file(options):\n\n mapping_file = open(os.path.splitext(options.bco)[0] + \"mapping.txt\", 'w')\n mapping_file.writelines(\n\"\"\"# Use this file to provide mapping values for a bco.\n# MISSING PROPERTIES/FIELDS lists properties/fields that are missing from bco\n# NONALLOWED PROPERTIES/FIELDS shows properties that are not allowed\n# Syntax for specifying values\n# To delete a value\n# PATH --> FIELD: DELETE\n# To add a value\n# PATH --> FIELD: ADD-value_to_add\n# To rename a field name\n# PATH --> FIELD: RENAME-new_field_name\n# To swap a field name with another current field name\n# PATH --> FIELD: SWAP-other_field_name\n# Blank values will be skipped. Data does not need to be double represented\n# For example, \n# if <bco_id> needs renamed to <object_id>, either\n# ['object_id'] --> object_id: \n# SWAP-bco_id\n# OR \n# ['bco_id'] --> bco_id: RENAME:object_id \n# will work. No need to fill out both values.\n\"\"\"\n)\n validate_bco(options)\n\n missing_reg = r'(.*?) is a required property' # missing required property\n additional_reg = r'Additional properties are not allowed (.*?)' # unalloewd extra property\n\n attribute_reg = r\"'(.*?)'\" # getting an attribute (field surronded by single quotes)\n index_reg = r\"On instance(.*?)\" # getting key path\n\n failed_validation_reg = r'Failed validating (.*?)' # invalid type\n\n missing = []\n additional = []\n invalid = []\n\n path = {}\n\n with open('error.log') as errors:\n for line in errors:\n if re.match(missing_reg, line): # if there is a missing property\n to_add = re.findall(attribute_reg, line)\n for match in to_add:\n missing.append(match)\n elif re.match(additional_reg, line): # if there is an additional property\n to_add = re.findall(attribute_reg, line)\n for match in to_add:\n additional.append(match)\n elif re.match(failed_validation_reg, line): # if a property is invalid\n # additional and required properties are already represnted by the above regexes,\n # so skip\n if line.__contains__(\"'additionalProperties'\") is False \\\n and line.__contains__(\"'required'\") is False:\n to_add = [line.split(\"schema\")[1].split(\"['\")[-1].strip(\"']:\\n\")]\n invalid.append(to_add[0])\n\n # field contains an index for some attribute\n # this attribute will be the last attribute found the above regexes, and is stored in\n # to_add\n if re.match(index_reg, line):\n keys = \"\"\n index_path = line.removeprefix(\"On instance\").removesuffix(\":\\n\")\n if index_path is not None:\n keys = str(index_path)\n if len(to_add) > 0: # if there are any attributes to add\n for item in to_add:\n add_or_update_list_HELPER(path, str(item), keys + \"['\" + str(item) +\n \"']\")\n to_add = [] # reset to_add\n mapping_file.write(\"====MISSING PROPERTIES/FIELDS====\\n\")\n for attribute in missing:\n mapping_file.write(str(path[attribute][0]) + \"-->\" + str(attribute) + \":\\n\")\n path[attribute].pop(0)\n\n mapping_file.write(\"====NONALLOWED PROPERTIES/FIELDS====\\n\")\n for attribute in additional:\n mapping_file.write(str(path[attribute][0]) + \"-->\" + str(attribute) + \":\\n\")\n path[attribute].pop(0)\n for attribute in invalid:\n mapping_file.write(str(path[attribute][0]).split(\"]\")[0]\n + \"]-->\" + str(attribute) + \":\\n\")\n path[attribute].pop(0)\n\n return mapping_file.name",
"def from_csv(self, user, row):\n if len(row) != 6:\n raise BadRequest(_(\"Invalid line\"))\n self.name = row[1].strip()\n self.target_host = row[2].strip()\n self.service, created = Service.objects.get_or_create(\n name=row[3].strip())\n self.enabled = (row[4].strip() == 'True')\n self.verify_recipients = (row[5].strip() == 'True')\n self.save(creator=user)",
"def load(cls):\n\n # Loop through meds and build patient med lists:\n meds = csv.reader(open(MEDS_FILE,'U'),dialect='excel-tab')\n header = next(meds)\n for med in meds:\n cls(dict(zip(header,med))) # Create a med instance (saved in Med.meds)",
"def __init__(self, file_handle):\n config = ConfigParser.ConfigParser()\n config.readfp(file_handle)\n self.database_address_ = config.get('General', 'database_address')\n self.google_developer_key_ = config.get('Google', 'developer_key')\n self.google_cref_ = config.get('Google', 'cref')",
"def f_read_adr_parameters_from_csv_file(parameters_file):\n parameters_dict = {}\n file = open(parameters_file, \"r\")\n for line in file:\n if line.strip().startswith('#'):\n pass\n else:\n line = line.strip().replace(' ', '').split(',')\n parameters_dict[\"operation_mode_num\"] = line[0]\n parameters_dict[\"FFT_size_samples\"] = line[1]\n parameters_dict[\"spectra_averaging\"] = line[2]\n parameters_dict[\"start_line_freq\"] = line[3]\n parameters_dict[\"width_line_freq\"] = line[4]\n parameters_dict[\"clock_source\"] = line[5]\n parameters_dict[\"sum_diff_mode_num\"] = line[6]\n parameters_dict[\"chan_diff_delay\"] = line[7]\n parameters_dict[\"data_file_size\"] = line[8]\n\n return parameters_dict",
"def from_file(cls, filepath, stemmer, synonyms):\n assert(isinstance(stemmer, Stemmer))\n assert(isinstance(synonyms, SynonymsCollection))\n entities = []\n with io.open(filepath, \"r\", encoding='utf-8') as f:\n for line in f.readlines():\n args = line.split()\n\n e_ID = args[0]\n e_str_type = args[1]\n e_begin = int(args[2])\n e_end = int(args[3])\n e_value = \" \".join([a.strip().replace(',', '') for a in args[4:]])\n a = Entity(e_ID, e_str_type, e_begin, e_end, e_value)\n\n entities.append(a)\n\n return cls(entities, stemmer, synonyms)",
"def from_csv(path, reaction_col='reaction', lower_bound_col=\"lower_bound\", upper_bound_col=\"upper_bound\", sep=\"\\t\"):\n\n if not os.path.exists(path):\n raise IOError(errno.ENOENT, \"File not found\", path)\n\n env = Environment()\n with open(path, \"r\") as f:\n header = next(f)\n header = header.strip()\n header = header.split(\"#\", 1)[0]\n header = [h.strip() for h in header.split(sep)]\n\n for col in [reaction_col, lower_bound_col, upper_bound_col]:\n if col not in header:\n raise IOError(errno.EIO, \"File '{}' has no column '{}'\".format(path, col), path)\n\n for row in f:\n if row.startswith(\"#\"):\n continue\n\n row = row.strip()\n if not row:\n continue\n\n row = row.split(\"#\", 1)[0]\n row = [c.strip() for c in row.split(sep)]\n row = dict(zip(header, row))\n\n env[row[reaction_col]] = (float(row[lower_bound_col]), float(row[upper_bound_col]))\n\n return env",
"def initialize_from_config(self):",
"def set_up(self):\n self.keywords = (\"DATES\", \"COMPDAT\", \"COMPDATL\")\n self.parameters = (\"Date\", \"Well name\", \"Local grid name\", \"I\", \"J\", \"K upper\", \"K lower\", \"Flag on connection\",\n \"Saturation table\", \"Transmissibility factor\", \"Well bore diameter\", \"Effective Kh\",\n \"Skin factor\", \"D-factor\", \"Dir_well_penetrates_grid_block\", \"Press_eq_radius\")\n\n # TODO: с названиями стоит подумать\n self.input_file_reference = \"data/test_schedule_input_reference.inc\"\n self.output_csv_reference = \"data/schedule_output_reference.csv\"\n\n self.clean_file = \"data/handled_schedule.inc\"\n self.output_csv = \"data/schedule_output.csv\"\n\n with open(self.clean_file, \"r\", encoding=\"utf-8\") as file:\n self.clean_file_text = file.read()\n\n self.parse_list_output_reference = [\n [np.nan, 'W1', np.nan, '10', '10', '1', '3', 'OPEN', 'DEFAULT', '1', '2', '1', 'DEFAULT', 'DEFAULT', 'DEFAULT', '1.0'],\n [np.nan, 'W2', np.nan, '32', '10', '1', '3', 'OPEN', 'DEFAULT', '1', '2', '1', 'DEFAULT', 'DEFAULT', 'DEFAULT', '2.0'],\n [np.nan, 'W3', np.nan, '5', '36', '2', '2', 'OPEN', 'DEFAULT', '1', '2', '1', 'DEFAULT', 'DEFAULT', 'DEFAULT', '3.0'],\n [np.nan, 'W4', np.nan, '40', '30', '1', '3', 'OPEN', 'DEFAULT', '1', '2', '1', 'DEFAULT', 'DEFAULT', 'DEFAULT', '4.0'],\n [np.nan, 'W5', np.nan, '21', '21', '4', '4', 'OPEN', 'DEFAULT', '1', '2', '1', 'DEFAULT', 'DEFAULT', 'DEFAULT', '5.0'],\n ['01 JUN 2018', np.nan],\n ['01 JUL 2018', 'W3', np.nan, '32', '10', '1', '1', 'OPEN', 'DEFAULT', '1', '2', '1', 'DEFAULT', 'DEFAULT', 'DEFAULT', '1.0718'],\n ['01 JUL 2018', 'W5', np.nan, '21', '21', '1', '3', 'OPEN', 'DEFAULT', '1', '2', '1', 'DEFAULT', 'DEFAULT', 'DEFAULT', '5.0'],\n ['01 AUG 2018', np.nan],\n ['01 SEP 2018', 'W1', np.nan, '10', '10', '2', '3', 'OPEN', 'DEFAULT', '1', '2', '1', 'DEFAULT', 'DEFAULT', 'DEFAULT', '1.0918'],\n ['01 SEP 2018', 'W2', np.nan, '32', '10', '1', '2', 'OPEN', 'DEFAULT', '1', '2', '1', 'DEFAULT', 'DEFAULT', 'DEFAULT', '2.0'],\n ['01 SEP 2018', 'W3', 'LGR1', '10', '10', '2', '2', 'OPEN', 'DEFAULT', '1', '2', '1', 'DEFAULT', 'DEFAULT', 'DEFAULT', '1.0918'],\n ['01 OCT 2018', np.nan],\n ['01 NOV 2018', np.nan],\n ['01 DEC 2018', np.nan]]",
"def load_from_csv(self):\n\n self._logger.info('Reading data coming from CSV files')\n\n sta = self.stations\n\n if sta != None:\n msta = \", \".join(sta)\n self._logger.debug('Using only stations {0}'.format(msta))\n\n # load the data\n v = list(self.variables)\n v.append('metadata')\n for i in v:\n if i in self.dataConfig:\n\n self._logger.debug('Reading %s...' % self.dataConfig[i])\n if i == 'metadata':\n dp_final = pd.read_csv(self.dataConfig[i],\n index_col='primary_id')\n #Ensure all stations are all caps.\n dp_final.index = [s.upper() for s in dp_final.index]\n\n elif self.dataConfig[i]:\n dp_full = pd.read_csv(self.dataConfig[i],\n index_col='date_time',\n parse_dates=[0])\n dp_full.columns = [s.upper() for s in dp_full.columns]\n\n if sta is not None:\n\n data_sta = dp_full.columns.str.upper()\n\n # Grab IDs from user list thats also in Data\n self.stations = [s for s in data_sta if s in sta]\n dp = dp_full[dp_full.columns[(data_sta).isin(sta)]]\n\n else:\n dp = dp_full\n\n # Only get the desired dates\n dp_final = dp[self.start_date:self.end_date]\n\n if dp_final.empty:\n raise Exception(\"No CSV data found for {0}\"\n \"\".format(i))\n\n setattr(self, i, dp_final)",
"def __init__(self, csvfile):\n self._reader = csv.DictReader(\n csvfile,\n delimiter=self.CSVCONFIG.delimiter,\n doublequote=self.CSVCONFIG.doublequote,\n escapechar=self.CSVCONFIG.escapechar,\n lineterminator=self.CSVCONFIG.lineterminator,\n quotechar=self.CSVCONFIG.quotechar,\n quoting=self.CSVCONFIG.quoting,\n skipinitialspace=self.CSVCONFIG.skipinitialspace,\n )\n self.badRows = []",
"def prepare_csv(self, filename, *args, **kwargs):\n x_possible = getattr(settings, 'IMPORT_CSV_X_FIELDS', ['Lon*', 'x', 'lon*'])\n y_possible = getattr(settings, 'IMPORT_CSV_Y_FIELDS', ['Lat*', 'y', 'lat*'])\n geom_possible = getattr(settings, 'IMPORT_CSV_GEOM_FIELDS',\n ['geom', 'GEOM', 'WKT', 'the_geom', 'THE_GEOM', 'WKB'])\n\n oo = kwargs.get('open_options', [])\n\n oo.append('X_POSSIBLE_NAMES={0}'.format(','.join(x_possible)))\n oo.append('Y_POSSIBLE_NAMES={0}'.format(','.join(y_possible)))\n oo.append('GEOM_POSSIBLE_NAMES={0}'.format(','.join(geom_possible)))\n\n kwargs['open_options'] = oo\n\n return filename, args, kwargs",
"def read_input_file():\n \n global input\n \n config = ConfigParser.RawConfigParser()\n config.read(os.path.join(os.getcwd(), 'INPUT.cfg'))\n\n input = {}\n input['datapath'] = config.get('Address_info', 'datapath')\n input['inter_address'] = config.get('Address_info', 'interactive_address')\n input['target_folder'] = config.get('Address_info', 'target_folder')\n input['save_folder'] = config.get('Address_info', 'save_folder')\n \n if not os.path.isabs(input['datapath']):\n input['datapath'] = os.path.join(os.getcwd(), input['datapath'])\n \n if not os.path.isabs(input['inter_address']):\n input['inter_address'] = os.path.join(os.getcwd(), input['inter_address'])\n \n if not os.path.isabs(input['target_folder']):\n input['target_folder'] = os.path.join(os.getcwd(), input['target_folder'])\n \n if not os.path.isabs(input['save_folder']):\n input['save_folder'] = os.path.join(os.getcwd(), input['save_folder'])\n \n \n input['min_date'] = str(eval(config.get('Event_Request', 'min_datetime')))\n input['max_date'] = str(eval(config.get('Event_Request', 'max_datetime')))\n input['min_mag'] = config.getfloat('Event_Request', 'min_magnitude')\n input['max_mag'] = config.getfloat('Event_Request', 'max_magnitude')\n input['min_depth'] = config.getfloat('Event_Request', 'min_depth')\n input['max_depth'] = config.getfloat('Event_Request', 'max_depth')\n input['evlonmin'] = config.getfloat('Event_Request', 'evlonmin')\n input['evlonmax'] = config.getfloat('Event_Request', 'evlonmax')\n input['evlatmin'] = config.getfloat('Event_Request', 'evlatmin')\n input['evlatmax'] = config.getfloat('Event_Request', 'evlatmax')\n input['preset'] = config.getfloat('Event_Request', 'preset')\n input['offset'] = config.getfloat('Event_Request', 'offset')\n input['max_result'] = config.getint('Event_Request', 'max_results')\n \n input['get_events'] = config.get('Request', 'get_events')\n input['input_period'] = config.get('Parallel', 'input_period')\n input['IRIS'] = config.get('Request', 'IRIS')\n input['ArcLink'] = config.get('Request', 'ArcLink')\n input['time_iris'] = config.get('Request', 'time_iris')\n input['time_arc'] = config.get('Request', 'time_arc')\n \n input['nodes'] = config.get('Parallel', 'nodes')\n\n input['waveform'] = config.get('Request', 'waveform')\n input['response'] = config.get('Request', 'response')\n input['SAC'] = config.get('Request', 'SAC')\n \n input['net'] = config.get('specifications_request', 'network')\n input['sta'] = config.get('specifications_request', 'station')\n \n if config.get('specifications_request', 'location') == \"''\":\n input['loc'] = ''\n elif config.get('specifications_request', 'location') == '\"\"':\n input['loc'] = ''\n else:\n input['loc'] = config.get('specifications_request', 'location')\n \n input['cha'] = config.get('specifications_request', 'channel')\n\n if config.get('specifications_request', 'lat') == 'None':\n input['lat_cba'] = None\n else:\n input['lat_cba'] = config.get('specifications_request', 'lat')\n \n if config.get('specifications_request', 'lon') == 'None':\n input['lon_cba'] = None\n else:\n input['lon_cba'] = config.get('specifications_request', 'lon')\n \n if config.get('specifications_request', 'minradius') == 'None':\n input['mr_cba'] = None\n else:\n input['mr_cba'] = config.get('specifications_request', 'minradius')\n \n if config.get('specifications_request', 'maxradius') == 'None':\n input['Mr_cba'] = None\n else:\n input['Mr_cba'] = config.get('specifications_request', 'maxradius')\n \n \n if config.get('specifications_request', 'minlat') == 'None':\n input['mlat_rbb'] = None\n else:\n input['mlat_rbb'] = config.get('specifications_request', 'minlat')\n \n if config.get('specifications_request', 'maxlat') == 'None':\n input['Mlat_rbb'] = None\n else:\n input['Mlat_rbb'] = config.get('specifications_request', 'maxlat')\n \n if config.get('specifications_request', 'minlon') == 'None':\n input['mlon_rbb'] = None\n else:\n input['mlon_rbb'] = config.get('specifications_request', 'minlon')\n \n if config.get('specifications_request', 'maxlon') == 'None':\n input['Mlon_rbb'] = None\n else:\n input['Mlon_rbb'] = config.get('specifications_request', 'maxlon')\n\n \n input['test'] = config.get('test', 'test')\n input['test_num'] = config.getint('test', 'test_num')\n \n input['update_interactive'] = config.get('update', 'update_interactive')\n input['iris_update'] = config.get('update', 'iris_update')\n input['arc_update'] = config.get('update', 'arc_update')\n\n input['QC_IRIS'] = config.get('QC', 'QC_IRIS')\n input['QC_ARC'] = config.get('QC', 'QC_ARC')\n \n input['email'] = config.get('email', 'email')\n input['email_address'] = config.get('email', 'email_address')\n \n input['report'] = config.get('report', 'report')\n \n input['corr_unit'] = config.get('instrument_correction', 'corr_unit')\n input['pre_filt'] = config.get('instrument_correction', 'pre_filter')\n \n input['plt_event'] = config.get('ObsPyPT', 'plot_event')\n input['plt_sta'] = config.get('ObsPyPT', 'plot_sta')\n input['plt_ray'] = config.get('ObsPyPT', 'plot_ray')\n\n input['llcrnrlon'] = config.getfloat('ObsPyPT', 'llcrnrlon')\n input['urcrnrlon'] = config.getfloat('ObsPyPT', 'urcrnrlon')\n input['llcrnrlat'] = config.getfloat('ObsPyPT', 'llcrnrlat')\n input['urcrnrlat'] = config.getfloat('ObsPyPT', 'urcrnrlat')\n \n input['lon_0'] = config.getfloat('ObsPyPT', 'lon_0')\n input['lat_0'] = config.getfloat('ObsPyPT', 'lat_0')",
"def init_from_file(self, filepath, batch_settings, effects_log):\n # don't forget to update the module docstring with changes here\n input_template_name = 'cost_factors_energysecurity'\n input_template_version = 0.3\n input_template_columns = {\n 'calendar_year',\n 'dollar_basis',\n 'dollars_per_bbl',\n 'oil_import_reduction_as_percent_of_total_oil_demand_reduction',\n }\n\n df = read_input_file(filepath, effects_log)\n validate_template_version_info(df, input_template_name, input_template_version, effects_log)\n\n # read in the data portion of the input file\n df = read_input_file(filepath, effects_log, skiprows=1)\n validate_template_column_names(filepath, df, input_template_columns, effects_log)\n\n df = df.loc[df['dollar_basis'] != 0, :]\n\n df = batch_settings.ip_deflators.adjust_dollars(batch_settings, df, effects_log, 'dollars_per_bbl')\n\n self._data = df.set_index('calendar_year').to_dict(orient='index')",
"def parseSettings(settings_file):\n\t# Make a new settings object\n\tsetting_object = settings.Settings()\n\n\t# Read the file line by line\n\tfor line in settings_file:\n\t\tthis_line = line.split()\n\t\tif this_line == []:\n\t\t\tpass\n\t\telif this_line[0] == 'input':\n\t\t\tfor filename in this_line[1:]:\n\t\t\t\tsetting_object.addInput(filename)\n\t\telif this_line[0] == 'atom':\n\t\t\tsymbol = this_line[1]\n\t\t\tnumber = this_line[2]\n\t\t\tmass = this_line[3]\n\t\t\tcharge = this_line[4]\n\t\t\tsigma = this_line[5]\n\t\t\teps = this_line[6]\n\t\t\tsetting_object.addAtom(symbol, number, mass, charge, sigma, eps)\n\t\telif this_line[0] == 'mix':\n\t\t\tsetting_object.mix()\n\t\telif this_line[0] == 'bond':\n\t\t\tatom1 = this_line[1]\n\t\t\tatom2 = this_line[2]\n\t\t\tdistance = this_line[3]\n\t\t\tbond_length = this_line[4]\n\t\t\tforce_constant = this_line[5]\n\t\t\tsetting_object.addBond(atom1, atom2, distance, bond_length, force_constant)\n\t\telif this_line[0] == 'angle':\n\t\t\tatom1 = this_line[1]\n\t\t\tatom2 = this_line[2]\n\t\t\tatom3 = this_line[3]\n\t\t\tangle = this_line[4]\n\t\t\tangle_constant = this_line[5]\n\t\t\tsetting_object.addAngle(atom1, atom2, atom3, angle, angle_constant)\n\t\telif this_line[0] == 'molecule':\n\t\t\tresidue = this_line[1]\n\t\t\tnmol = this_line[2]\n\t\t\tnrexcl = this_line[3]\n\t\t\tsetting_object.addMolecule(residue, nmol, nrexcl)\n\t\telif this_line[0] == 'output':\n\t\t\toutput = this_line[1]\n\t\t\tsetting_object.addOutput(output)\n\t\telif this_line[0] == 'system':\n\t\t\tsystem = \"\".join(this_line[1:])\n\t\t\tsetting_object.addSystem(system)\n\t\telif this_line[0] == '#':\n\t\t\tpass\n\treturn setting_object",
"def from_csv(self, user, row):\n if len(row) != 4:\n raise BadRequest(_(\"Invalid line\"))\n self.name = row[1].strip()\n try:\n self.target = RelayDomain.objects.get(name=row[2].strip())\n except RelayDomain.DoesNotExist:\n raise NotFound(_(\"Relay domain %s does not exist\" % row[2].strip()))\n self.enabled = (row[3].strip() == 'True')\n self.save(creator=user)",
"def load_from_file_csv(cls):\n fields = []\n rows = []\n new_dict = {}\n new_list = []\n key = \"\"\n filename = cls.__name__ + \".csv\"\n with open(filename) as fp:\n reader = csv.reader(fp)\n fields = next(reader)\n for row in reader:\n rows.append(row)\n for row in rows:\n i = 0\n new_dict = new_dict.fromkeys(fields)\n for attr in fields:\n key = fields[i]\n value = row[i]\n new_dict[key] = value\n i += 1\n new_list.append(cls.create(**new_dict))\n return new_list",
"def __init__(self, settings_file_name):\n with open(settings_file_name, 'r') as f:\n # load config file\n self.settings = yaml.load(f)\n\n # get key values\n sit_names = self.settings[HNF.Consts.SIT_NAMES]\n row_action_names = self.settings[HNF.Consts.ROW_ACT_NAMES]\n column_action_names = self.settings[HNF.Consts.COL_ACT_NAMES]\n name = self.settings[HNF.Consts.NAME]\n\n # init HNG object\n self.HNFOut = HNF.HNFInstance(sit_names, row_action_names, column_action_names, name)\n\n # set the values found in the settings\n self.__initFromFile()\n\n # calc the summary and expected utility\n self.HNFOut.initSummaryBelief()\n self.HNFOut.initExpectedUtility()\n self.HNFOut.calcHypergameExpectedUtility()\n self.HNFOut.calcModelingOpponentUtility()",
"def _import_source_data(self, source_file: str) -> None:\n with open(source_file, 'r') as csv_file:\n reader = csv.DictReader(csv_file)\n for row in reader:\n self.cell_map.append(\n Cell(\n datamap_id=None,\n cell_key=row['cell_key'],\n cell_value=None, # have no need of a value in dm\n cell_reference=row['cell_reference'],\n template_sheet=row['template_sheet'],\n bg_colour=row['bg_colour'],\n fg_colour=row['fg_colour'],\n number_format=row['number_format'],\n verification_list=None))",
"def load(self):\n try:\n with open(\"protocol.csv\", mode=\"r\") as csv_file:\n csv_reader = csv.DictReader(csv_file, delimiter=\"|\")\n\n for index, row in enumerate(csv_reader):\n if index != \"0\":\n if row[\"type\"] == \"Information\":\n item = Information(row[\"title\"], row[\"desc\"], row[\"given_by\"])\n if row[\"type\"] == \"Decision\":\n item = Decision(row[\"title\"], row[\"desc\"], row[\"result\"])\n if row[\"type\"] == \"Task\":\n item = Task(row[\"title\"], row[\"desc\"], row[\"owner\"], row[\"priority\"], row[\"due\"])\n item.creation_date = row[\"creation_date\"]\n item.creation_time = row[\"creation_time\"]\n self.add_item(item)\n except IOError:\n # create the file if it doesn't exist yet\n csv_file = open(\"protocol.csv\", \"w+\")\n csv_file.close()",
"def setUp(self):\n self.input_csv = os.path.join(os.path.dirname(__file__), '___valid_input.csv')\n with open(self.input_csv, 'wb') as opf1:\n opf1.write(\n\"\"\"\nGSE59813,GSM1446812;\nGSE61491,GSM1506106; GSM1506107;\n\"\"\")\n parser = rp_prep.get_parser()\n self.temp_outdir = tempfile.mkdtemp() # mkdtemp returns abspath\n self.options1 = parser.parse_args(['gen-csv', '-f', self.input_csv])\n self.options2 = parser.parse_args(['gen-csv', '-f', self.input_csv,\n '--outdir', self.temp_outdir])\n self.gse = 'GSE38003'\n self.gsm = 'GSM931711'",
"def create_deft_table_csv_mappings():\n mappings = list()\n mappings.append(CsvColumnMapping(columnName=\"rownumber\", cslDataType=\"int\", ordinal=0))\n mappings.append(CsvColumnMapping(columnName=\"rowguid\", cslDataType=\"string\", ordinal=1))\n mappings.append(CsvColumnMapping(columnName=\"xdouble\", cslDataType=\"real\", ordinal=2))\n mappings.append(CsvColumnMapping(columnName=\"xfloat\", cslDataType=\"real\", ordinal=3))\n mappings.append(CsvColumnMapping(columnName=\"xbool\", cslDataType=\"bool\", ordinal=4))\n mappings.append(CsvColumnMapping(columnName=\"xint16\", cslDataType=\"int\", ordinal=5))\n mappings.append(CsvColumnMapping(columnName=\"xint32\", cslDataType=\"int\", ordinal=6))\n mappings.append(CsvColumnMapping(columnName=\"xint64\", cslDataType=\"long\", ordinal=7))\n mappings.append(CsvColumnMapping(columnName=\"xuint8\", cslDataType=\"long\", ordinal=8))\n mappings.append(CsvColumnMapping(columnName=\"xuint16\", cslDataType=\"long\", ordinal=9))\n mappings.append(CsvColumnMapping(columnName=\"xuint32\", cslDataType=\"long\", ordinal=10))\n mappings.append(CsvColumnMapping(columnName=\"xuint64\", cslDataType=\"long\", ordinal=11))\n mappings.append(CsvColumnMapping(columnName=\"xdate\", cslDataType=\"datetime\", ordinal=12))\n mappings.append(CsvColumnMapping(columnName=\"xsmalltext\", cslDataType=\"string\", ordinal=13))\n mappings.append(CsvColumnMapping(columnName=\"xtext\", cslDataType=\"string\", ordinal=14))\n mappings.append(CsvColumnMapping(columnName=\"xnumberAsText\", cslDataType=\"string\", ordinal=15))\n mappings.append(CsvColumnMapping(columnName=\"xtime\", cslDataType=\"timespan\", ordinal=16))\n mappings.append(CsvColumnMapping(columnName=\"xtextWithNulls\", cslDataType=\"string\", ordinal=17))\n mappings.append(CsvColumnMapping(columnName=\"xdynamicWithNulls\", cslDataType=\"dynamic\", ordinal=18))\n return mappings",
"def __init__(self, csvfile, *args, **kwargs):\n self.encoding = kwargs.pop('encoding', 'utf-8')\n csv.DictReader.__init__(self, csvfile, *args, **kwargs)",
"def loadData(self, aircraftCSV='aircraft.csv'):\n aircraftDict = {}\n \n with open(aircraftCSV, 'r') as f:\n reader = csv.reader(f, delimiter=',')\n for line in reader:\n #if imperial convert to metric\n if line[2] == 'imperial':\n range = float(line[4]) * 8 / 5\n else:\n range = float(line[4])\n aircraftDict[line[0]] = [line[1], line[3], range]\n self.aircraftDict = aircraftDict",
"def import_jammers(csvfile, fieldnames=None):\n\tparsed_jammers = []\n\tif fieldnames is None:\n\t\t# Read fieldnames from first line of csvfile.\n\t\tjammers = csv.DictReader(csvfile) \n\telse:\n\t\t# Fieldnames provided\n\t\t# Skip header line/fieldnames line\n\t\tjammers = csv.DictReader(csvfile, fieldnames)\n\t\tnext(jammers)\n\n\tfor jammer in jammers:\n\t\tif hasattr(csvfile, \"name\") and csvfile.name == \"jammers.csv\":\n\t\t\t# These jammers has registered to the jam site. \n\t\t\tjammer[\"ticket\"] = True\n\t\t# Put it in object yo.\n\t\tjammer = Jammer(**jammer)\n\t\tparsed_jammers.append(jammer)\n\treturn parsed_jammers",
"def fromfile(self, file_like):\n defaults = dict((p.name, p._to_str(p.default_value)) for p in self)\n if future.utils.PY2:\n cfg = configparser.SafeConfigParser(defaults)\n else:\n cfg = configparser.ConfigParser(defaults, inline_comment_prefixes=(';', '#'))\n try:\n cfg.readfp(file_like)\n if cfg.sections() != ['header', 'params']:\n raise configparser.Error('Expected sections not found in model file')\n except configparser.Error as exc:\n filename = getattr(file_like, 'name', '')\n msg = 'Could not construct %s from %s\\n\\nOriginal exception: %s' % \\\n (self.__class__.__name__,\n ('file %r' % (filename,)) if filename else 'file-like object',\n str(exc))\n raise BadModelFile(msg)\n self.header = dict(cfg.items('header'))\n for param in defaults:\n self.header.pop(param.lower())\n for param in self:\n param.value_str = cfg.get('params', param.name)",
"def setUp(self):\n\n self.parsedFile = os.path.join(os.path.dirname(__file__),\"blast-parsed.csv\")\n self.bm = BlastMapper()",
"def create_pokedex(filepath):\n try:\n with open(filepath, 'r') as file:\n reader = csv.DictReader(file)\n pokedex = dict()\n for row in reader:\n pokedex[row[\"Name\"]] = create_entry(row[\"#\"], row[\"Name\"], row[\"Type 1\"], row[\"Type 2\"], row[\"HP\"],\n row[\"Attack\"], row[\"Defense\"], row[\"Sp. Atk\"], row[\"Sp. Def\"],\n row[\"Speed\"], row[\"Generation\"], row[\"Legendary\"])\n\n return pokedex\n\n except FileNotFoundError as e:\n return dict()",
"def load_structure(filename=STRUCTURE_FILENAME):\n\n with open(filename) as f:\n reader = csv.reader(f)\n\n for row in reader:\n structure_entry = {}\n structure_entry['floorID'] = row[1]\n structure_entry['roomID'] = row[2]\n \n if row[3] == 's': # Sensor\n dict_sensor[row[0]] = structure_entry\n elif row[3] == 'a': # Actuator\n dict_ac[row[0]] = structure_entry",
"def make_batman_config(tmin, tmax, tstep, wmin, wmax, wnum, wlog=True, suffix=\"\", path=\".\"):\n params = {}\n params[\"curves_fname\"] = p.join(path, 'batmanCurves{}.csv'.format(suffix))\n params[\"params_fname\"] = p.join(path, 'batmanParams{}.csv'.format(suffix))\n params[\"tmin\"] = tmin\n params[\"tmax\"] = tmax\n params[\"tstep\"] = tstep\n params[\"wmin\"] = wmin\n params[\"wmax\"] = wmax\n params[\"wnum\"] = wnum\n params[\"wlog\"] = wlog\n\n outfile = p.join(path, 'batmanConfig{}.param'.format(suffix))\n with open(outfile, \"w+\") as f:\n json.dump(params, f)\n print(\"Batman config written to {}\".format(outfile))",
"def read_ini(ini_file): \n project = ''\n bd_analysis_time = bd_interval_time = '' \n bd_ripper_analysis_time = bd_ripper_interval_time = '' \n dvd_analysis_time = dvd_interval_time = '' \n file_analysis_time = file_interval_time = '' \n bd_path = dvd_path = file_path = ''\n bd_path_mac = dvd_path_mac = file_path_mac = ''\n params_dict = {}\n \n if os.path.exists(ini_file): \n try:\n config = ConfigParser.ConfigParser()\n config.readfp(open(ini_file))\n except Exception, e:\n initlog('failed to read ini file; %s' % str(e)) \n else: \n try:\n project = (config.get('Project', 'project')) \n bd_analysis_time = int(config.get('BD/3Dcopy','analysis time')) \n bd_interval_time = int(config.get('BD/3Dcopy','interval time')) \n bd_ripper_analysis_time = int(config.get('BD/3Dcopy','ripper analysis time')) \n bd_ripper_interval_time = int(config.get('BD/3Dcopy','ripper interval time')) \t\t\t\t\n dvd_analysis_time = int(config.get('DVD/DVDcopy','analysis time')) \n dvd_interval_time = int(config.get('DVD/DVDcopy','interval time'))\n file_analysis_time = int(config.get('FILE','analysis time')) \n file_interval_time = int(config.get('FILE','interval time'))\n bd_path = config.get('BD/3Dcopy','bd_path')\n dvd_path = config.get('DVD/DVDcopy', 'dvd_path')\n file_path = config.get('FILE','file_path')\n bd_path_mac = config.get('BD/3Dcopy','bd_path_mac')\n dvd_path_mac = config.get('DVD/DVDcopy', 'dvd_path_mac')\n file_path_mac = config.get('FILE','file_path_mac')\n except Exception, e:\n initlog('read ini file error; %s' % str(e))\n else:\n initlog('dvdfab_auto_tool.ini file does not exist')\n params_dict[\"project\"] = project\n params_dict[\"bd_analysis_time\"] = bd_analysis_time\n params_dict[\"bd_interval_time\"] = bd_interval_time\n params_dict[\"bd_ripper_analysis_time\"] = bd_ripper_analysis_time\n params_dict[\"bd_ripper_interval_time\"] = bd_ripper_interval_time\n params_dict[\"dvd_analysis_time\"] = dvd_analysis_time\n params_dict[\"dvd_interval_time\"] = dvd_interval_time\n params_dict[\"file_analysis_time\"] = file_analysis_time\n params_dict[\"file_interval_time\"] = file_interval_time\n params_dict[\"bd_path\"] = bd_path\n params_dict[\"dvd_path\"] = dvd_path\n params_dict[\"file_path\"] = file_path\n params_dict[\"bd_path_mac\"] = bd_path_mac\n params_dict[\"dvd_path_mac\"] = dvd_path_mac\n params_dict[\"file_path_mac\"] = file_path_mac\n return params_dict\n return project, bd_analysis_time, bd_interval_time, bd_ripper_analysis_time, bd_ripper_interval_time, dvd_analysis_time, dvd_interval_time, file_analysis_time, file_interval_time, bd_path, dvd_path, file_path,bd_path_mac,dvd_path_mac,file_path_mac",
"def init_from_filepath(csv_filepath):\n with open(csv_filepath, 'rb') as csv_file:\n filedata = csv.reader(csv_file, delimiter=',')\n instance = IngredientsStore(filedata)\n return instance",
"def apply_config_file(self, filename):\n def extractor(template, options):\n \"\"\"Ignore things that are existing non default values\"\"\"\n for name, val in options:\n normalised = self.normalise_key(name)\n if normalised in self.values and not isinstance(self.values[normalised], Default):\n continue\n else:\n yield name, val\n\n items = json.load(open(filename)).items()\n self.use_options(items, extractor)",
"def create_meta_dict_L1(adcp_meta):\n meta_dict = {}\n with open(adcp_meta) as csv_file:\n csv_reader = csv.reader(csv_file, delimiter=',')\n line_count = 0\n next(csv_reader, None) # Skip header row\n for row in csv_reader:\n # extract all metadata from csv file into dictionary -- some items not passed to netCDF file but are extracted anyway\n if row[0] == '' and row[1] == '':\n print('Metadata file contains a blank row; skipping this row !')\n elif row[0] != '' and row[1] == '':\n print('Metadata item in csv file has blank value; skipping this row '\n 'in metadata file !')\n else:\n meta_dict[row[0]] = row[1]\n\n # Add conventions metadata to meta_dict\n meta_dict['deployment_type'] = 'Sub Surface'\n meta_dict['flag_meaning'] = 'no_quality_control, good_value, probably_good_value, probably_bad_value, ' \\\n 'bad_value, changed_value, value_below_detection, value_in_excess, ' \\\n 'interpolated_value, missing_value'\n meta_dict['flag_references'] = 'BODC SeaDataNet'\n meta_dict['flag_values'] = '0, 1, 2, 3, 4, 5, 6, 7, 8, 9'\n meta_dict['keywords'] = 'Oceans > Ocean Circulation > Ocean Currents'\n meta_dict['keywords_vocabulary'] = 'GCMD Science Keywords'\n meta_dict['naming_authority'] = 'BODC, MEDS, CF v72'\n meta_dict['variable_code_reference'] = 'BODC P01'\n meta_dict['Conventions'] = \"CF-1.8\"\n\n return meta_dict",
"def file_setup(outfile):\n\n extant_objids = []\n\n if os.path.exists(outfile):\n print('This file exists.')\n try:\n extant_objids = np.array(pd.read_csv(outfile)['objid']).tolist()\n except:\n print('And nonstandard!')\n # Raise an exception?\n return False\n else:\n # Initialize the file with a header\n with open(outfile, 'wb') as csvfile:\n cols = ['objid', 'flat_counts', 'mcat_bg', 'bg_counts',\n 'flux_bgsub_err', 'cps_mcatbgsub', 'counts',\n 'mag_mcatbgsub', 'cps_err', 'mag_bgsub', 'cps_bgsub',\n 'detys', 'flux_bgsub', 'flux_err', 'mag_err_1',\n 'cps_bgsub_err', 't1_data', 'bg', 'responses', 't_mean',\n 'cps_mcatbgsub_err', 'mag_bgsub_err_1', 'mag_err_2',\n 't0_data', 'racent', 'deccent', 'mag', 'exptime',\n 'bg_flat_counts', 'detxs', 't0', 't1',\n 'mag_mcatbgsub_err_2', 'flux', 'mag_mcatbgsub_err_1',\n 'flags', 'mag_bgsub_err_2', 'detrad', 'cps',\n 'flux_mcatbgsub_err', 'flux_mcatbgsub', 'mcat_expt', 'ra',\n 'dec', 'aper4', 'aper4_err', 'mcat_bg',\n 'aper7', 'aper7_err']\n\n spreadsheet = csv.writer(csvfile, delimiter=',', quotechar='|',\n quoting=csv.QUOTE_MINIMAL)\n spreadsheet.writerow(cols)\n\n return extant_objids",
"def loadCSV(input_file):",
"def __create_config_file__(fileparser):\n fileparser['server'] = {\n 'server': Configuration.server + \" # Server IP\",\n 'port': str(Configuration.port) +\n \" # Values allowed: \" + str(Configuration.port_min) +\n \"..\" + str(Configuration.port_max),\n 'certfile': Configuration.certfile +\n \" # Use an absolute path\",\n 'timeout': str(Configuration.timeout) +\n \" # Timeout of the connection request\"\n }\n fileparser['client'] = {\n 'curve1': Configuration.curve1 +\n \" # Values allowed: secp521r1, sect571r1, secp384r1, etc.\",\n 'cipher1': Configuration.cipher1 +\n \" # Values allowed: aes-128-cbc, aes-256-cbc, etc.\",\n 'curve2': Configuration.curve2 +\n \" # Values allowed: None, secp521r1, sect571r1, secp384r1, etc.\",\n 'cipher2': Configuration.cipher2 +\n \" # Values allowed: None, aes-128-cbc, aes-256-cbc, etc.\",\n 'curve3': Configuration.curve3 +\n \" # Values allowed: None, secp521r1, sect571r1, secp384r1, etc.\",\n 'cipher3': Configuration.cipher3 +\n \" # Values allowed: None, aes-128-cbc, aes-256-cbc, etc.\"\n }\n fileparser['ui'] = {\n 'lock': str(Configuration.lock) +\n \" # Lock screen - Values allowed: 0 or a positive integer\",\n 'colour': str(Configuration.colour) +\n \" # If available use colours (1) or not (0)\",\n 'colourB': Configuration.colourB +\n \" # Colour for editable widgets (button, input box...)\",\n 'colourD': Configuration.colourD +\n \" # Colour for decoration (label, frame...)\",\n 'colourT': Configuration.colourT +\n \" # Colour for titles\",\n 'colourM': Configuration.colourM +\n \" # Colour for messages\"\n }\n with open(Configuration.configfile, 'w') as configfile:\n fileparser.write(configfile)\n os.chmod(Configuration.configfile,\n stat.S_IRUSR | stat.S_IWUSR | stat.S_IREAD | stat.S_IWRITE)",
"def _csv_import(self, imppath):\n \n self.lookup_table = []\n\n with open(imppath, 'r') as csvfile:\n csvreader = csv.reader(csvfile, delimiter=',')\n\n headerorder = []\n\n for i, row in enumerate(csvreader):\n if i == 0:\n headerorder = [s.lower() for s in row]\n\n if rgb.is_hex_color(row[headerorder.index('hexstr')]):\n self.lookup_table.append(DmcColor(hex=rgb.RgbColor(row[headerorder.index('hexstr')].strip()),\n id=row[headerorder.index('dmc')].strip(),\n name=row[headerorder.index('name')].strip()))",
"def from_file(cls, gait_name, gait_directory, robot, gait_version_map, *args):\n gait_map = gait_name\n gait_path = os.path.join(gait_directory, gait_map, gait_name + '.gait')\n if not os.path.isfile(gait_path):\n raise FileNotFoundError(gait_path)\n\n with open(gait_path, 'r') as gait_file:\n gait = yaml.load(gait_file, Loader=yaml.SafeLoader)\n\n return cls.from_dict(robot, gait, gait_directory, gait_version_map, *args)",
"def parse_mapping_file(mapping_file, default, check_file):\n mapping_dict = {\n \"to_add\": {},\n \"to_delete\": {},\n \"to_swap\": {}\n }\n with open(mapping_file) as file:\n for line in file:\n if line.startswith(\"#\"): # ignore comments\n continue\n if line.startswith(\"====\"): # ignore headers\n continue\n field_name = line.split('-->')[1].split(\":\")[0] # get field name\n\n if check_file: # if checking mapping file\n mode = line.split(':')[1].strip(\" \").strip(\"\\n\") # get value from file\n if mode == \"\": # skip blank values\n continue\n if mode == \"DELETE\": # field value to delete = field name\n field_value = field_name\n else:\n field_value = mode.split(\"-\")[1]\n mode = mode.split(\"-\")[0]\n\n if default: # read value from default mapping value\n try:\n mode = default_mapping[field_name][\"mode\"]\n field_value = default_mapping[field_name][\"value\"]\n except KeyError:\n continue\n\n key_list = []\n keys = line.split(\"-->\")[0]\n key_reg = r\"\\[(.*?)\\]\"\n keys = re.findall(key_reg, keys) # get each key in keylist\n for key in keys:\n key_list.append(key.strip(\"[\").strip(\"]\").strip(\"'\")) # get string from key\n if mode.lower() == \"delete\":\n add_or_update_list_HELPER(mapping_dict[\"to_delete\"],\n field_name,\n {\"index\": key_list, \"value\": field_value}\n )\n if mode.lower() == \"swap\": # swap value with field name\n add_or_update_list_HELPER(mapping_dict[\"to_swap\"],\n field_name,\n {\"index\": key_list, \"value\": field_value}\n )\n if mode.lower() == \"add\":\n add_or_update_list_HELPER(mapping_dict[\"to_add\"],\n field_name,\n {\"index\": key_list, \"value\": field_value},\n )\n if mode.lower() == \"rename\": # swap field name with value\n add_or_update_list_HELPER(mapping_dict[\"to_swap\"],\n field_value,\n {\"index\": key_list, \"value\": field_name}\n )\n\n return mapping_dict",
"def create():\n logging.info('\"Create\" task started using config file %s', args.config)\n input_csv = os.path.join(config['input_dir'], config['input_csv'])\n if os.path.exists(input_csv):\n # Store a dictionary of id_field values: node IDs so we can add child nodes.\n node_ids = dict()\n\n field_definitions = get_field_definitions(config)\n with open(input_csv) as csvfile:\n csv_data = csv.DictReader(csvfile, delimiter=config['delimiter'])\n csv_column_headers = csv_data.fieldnames\n\n node_endpoint = config['host'] + '/node?_format=json'\n\n for row in csv_data:\n row = clean_csv_values(row)\n id_field = row[config['id_field']]\n\n # Add required fields.\n node = {\n 'type': [\n {'target_id': config['content_type'],\n 'target_type': 'node_type'}\n ],\n 'title': [\n {'value': row['title']}\n ],\n 'status': [\n {'value': config['published']}\n ]\n }\n\n # If a node with an ID that matches the current item's\n # 'parent_id' value has just been created, make the item\n # a child of the node.\n if 'parent_id' in row.keys() and row['parent_id'] in node_ids:\n row['field_member_of'] = node_ids[row['parent_id']]\n\n # Add custom (non-required) CSV fields.\n required_fields = ['file', config['id_field'], 'title']\n custom_fields = list(\n set(csv_column_headers) - set(required_fields))\n for custom_field in custom_fields:\n if not isinstance(row[custom_field], str):\n continue\n # Skip updating field if value is empty.\n if len(row[custom_field]) == 0:\n continue\n\n # This field can exist in the CSV to create parent/child\n # relationships and is not a Drupal field.\n if custom_field == 'parent_id':\n continue\n\n # 'langcode' is a core Drupal field, but is not considered a \"base field\".\n if custom_field == 'langcode':\n continue\n\n # Execute field preprocessor scripts, if any are configured. Note that these scripts\n # are applied to the entire value from the CSV field and not split field values,\n # e.g., if a field is multivalued, the preprocesor must split it and then reassemble\n # it back into a string before returning it. Note that preprocessor scripts work only\n # on string data and not on binary data like images, etc. and only on custom fields\n # (so not title).\n if 'preprocessors' in config and len(config['preprocessors']) > 0:\n for field, command in config['preprocessors'].items():\n if field in csv_column_headers:\n output, return_code = preprocess_field_data(config['subdelimiter'], row[field], command)\n if return_code == 0:\n preprocessor_input = copy.deepcopy(row[field])\n row[field] = output.decode().strip()\n logging.info('Preprocess command %s executed, taking \"%s\" as input and returning \"%s\".', command, preprocessor_input, output.decode().strip())\n else:\n message = 'Preprocess command ' + command + ' failed with return code ' + str(return_code)\n logging.error(message)\n sys.exit(message)\n\n # Assemble Drupal field structures for entity reference fields from CSV data. For\n # taxonomy terms, target_type is 'taxonomy_term'; for nodes, it's 'node_type'.\n if field_definitions[custom_field]['field_type'] == 'entity_reference':\n if field_definitions[custom_field]['target_type'] == 'taxonomy_term':\n target_type = 'taxonomy_term'\n field_vocabs = get_field_vocabularies(config, field_definitions, custom_field)\n if config['subdelimiter'] in row[custom_field]:\n prepared_tids = []\n delimited_values = row[custom_field].split(config['subdelimiter'])\n for delimited_value in delimited_values:\n tid = prepare_term_id(config, field_vocabs, delimited_value)\n tid = str(tid)\n prepared_tids.append(tid)\n row[custom_field] = config['subdelimiter'].join(prepared_tids)\n else:\n row[custom_field] = prepare_term_id(config, field_vocabs, row[custom_field])\n row[custom_field] = str(row[custom_field])\n\n if field_definitions[custom_field]['target_type'] == 'node':\n target_type = 'node_type'\n\n # Cardinality is unlimited.\n if field_definitions[custom_field]['cardinality'] == -1:\n if config['subdelimiter'] in row[custom_field]:\n field_values = []\n subvalues = row[custom_field].split(config['subdelimiter'])\n for subvalue in subvalues:\n field_values.append({'target_id': subvalue, 'target_type': target_type})\n node[custom_field] = field_values\n else:\n node[custom_field] = [\n {'target_id': row[custom_field],\n 'target_type': target_type}]\n # Cardinality has a limit.\n elif field_definitions[custom_field]['cardinality'] > 1:\n if config['subdelimiter'] in row[custom_field]:\n field_values = []\n subvalues = row[custom_field].split(config['subdelimiter'])\n for subvalue in subvalues:\n field_values.append({'target_id': subvalue, 'target_type': target_type})\n node[custom_field] = field_values[:field_definitions[custom_field]['cardinality']]\n log_field_cardinality_violation(custom_field, id_field, field_definitions[custom_field]['cardinality'])\n else:\n node[custom_field] = [\n {'target_id': row[custom_field],\n 'target_type': target_type}]\n # Cardinality is 1.\n else:\n subvalues = row[custom_field].split(config['subdelimiter'])\n node[custom_field] = [\n {'target_id': subvalues[0],\n 'target_type': target_type}]\n if len(subvalues) > 1:\n log_field_cardinality_violation(custom_field, id_field, '1')\n\n # Typed relation fields.\n elif field_definitions[custom_field]['field_type'] == 'typed_relation':\n target_type = field_definitions[custom_field]['target_type']\n # Cardinality is unlimited.\n if field_definitions[custom_field]['cardinality'] == -1:\n if config['subdelimiter'] in row[custom_field]:\n field_values = []\n subvalues = split_typed_relation_string(config, row[custom_field], target_type)\n for subvalue in subvalues:\n field_values.append(subvalue)\n node[custom_field] = field_values\n else:\n field_value = split_typed_relation_string(config, row[custom_field], target_type)\n node[custom_field] = field_value\n # Cardinality has a limit.\n elif field_definitions[custom_field]['cardinality'] > 1:\n if config['subdelimiter'] in row[custom_field]:\n field_values = []\n subvalues = split_typed_relation_string(config, row[custom_field], target_type)\n subvalues = subvalues[:field_definitions[custom_field]['cardinality']]\n if len(subvalues) > field_definitions[custom_field]['cardinality']:\n log_field_cardinality_violation(custom_field, id_field, field_definitions[custom_field]['cardinality'])\n for subvalue in subvalues:\n field_values.append(subvalue)\n node[custom_field] = field_values\n else:\n field_value = split_typed_relation_string(config, row[custom_field], target_type)\n node[custom_field] = field_value\n # Cardinality is 1.\n else:\n field_values = split_typed_relation_string(config, row[custom_field], target_type)\n node[custom_field] = field_value[0]\n log_field_cardinality_violation(custom_field, id_field, '1')\n\n # Geolocation fields.\n elif field_definitions[custom_field]['field_type'] == 'geolocation':\n target_type = field_definitions[custom_field]['target_type']\n # Cardinality is unlimited.\n if field_definitions[custom_field]['cardinality'] == -1:\n if config['subdelimiter'] in row[custom_field]:\n field_values = []\n subvalues = split_geolocation_string(config, row[custom_field])\n for subvalue in subvalues:\n field_values.append(subvalue)\n node[custom_field] = field_values\n else:\n field_value = split_geolocation_string(config, row[custom_field])\n node[custom_field] = field_value\n # Cardinality has a limit.\n elif field_definitions[custom_field]['cardinality'] > 1:\n if config['subdelimiter'] in row[custom_field]:\n field_values = []\n subvalues = split_geolocation_string(config, row[custom_field])\n subvalues = subvalues[:field_definitions[custom_field]['cardinality']]\n log_field_cardinality_violation(custom_field, id_field, field_definitions[custom_field]['cardinality'])\n for subvalue in subvalues:\n field_values.append(subvalue)\n node[custom_field] = field_values\n else:\n field_value = split_geolocation_string(config, row[custom_field])\n node[custom_field] = field_value\n # Cardinality is 1.\n else:\n field_values = split_geolocation_string(config, row[custom_field])\n node[custom_field] = field_value[0]\n log_field_cardinality_violation(custom_field, id_field, '1')\n\n # For non-entity reference and non-typed relation fields (text, integer, boolean etc.).\n else:\n # Cardinality is unlimited.\n if field_definitions[custom_field]['cardinality'] == -1:\n if config['subdelimiter'] in row[custom_field]:\n field_values = []\n subvalues = row[custom_field].split(config['subdelimiter'])\n for subvalue in subvalues:\n subvalue = truncate_csv_value(custom_field, id_field, field_definitions[custom_field], subvalue)\n field_values.append({'value': subvalue})\n node[custom_field] = field_values\n else:\n row[custom_field] = truncate_csv_value(custom_field, id_field, field_definitions[custom_field], row[custom_field])\n node[custom_field] = [{'value': row[custom_field]}]\n # Cardinality has a limit.\n elif field_definitions[custom_field]['cardinality'] > 1:\n if config['subdelimiter'] in row[custom_field]:\n field_values = []\n subvalues = row[custom_field].split(config['subdelimiter'])\n subvalues = subvalues[:field_definitions[custom_field]['cardinality']]\n if len(subvalues) > field_definitions[custom_field]['cardinality']:\n log_field_cardinality_violation(custom_field, id_field, field_definitions[custom_field]['cardinality'])\n for subvalue in subvalues:\n subvalue = truncate_csv_value(custom_field, id_field, field_definitions[custom_field], subvalue)\n field_values.append({'value': subvalue})\n node[custom_field] = field_values\n else:\n row[custom_field] = truncate_csv_value(custom_field, id_field, field_definitions[custom_field], row[custom_field])\n node[custom_field] = [{'value': row[custom_field]}]\n # Cardinality is 1.\n else:\n subvalues = row[custom_field].split(config['subdelimiter'])\n first_subvalue = subvalues[0]\n first_subvalue = truncate_csv_value(custom_field, id_field, field_definitions[custom_field], first_subvalue)\n node[custom_field] = [{'value': first_subvalue}]\n if len(subvalues) > 1:\n log_field_cardinality_violation(custom_field, id_field, '1')\n\n node_headers = {'Content-Type': 'application/json'}\n node_endpoint = '/node?_format=json'\n node_response = issue_request(config, 'POST', node_endpoint, node_headers, node, None)\n if node_response.status_code == 201:\n node_uri = node_response.headers['location']\n print('Node for \"' + row['title'] + '\" (record ' + id_field + ') created at ' + node_uri + '.')\n logging.info(\"Node for %s (record %s) created at %s.\", row['title'], id_field, node_uri)\n if 'output_csv' in config.keys():\n write_to_output_csv(config, id_field, node_response.text)\n else:\n logging.error(\"Node for CSV record %s not created, HTTP response code was %s.\", id_field, node_response.status_code)\n continue\n\n # Map ID from CSV of newly created node to its node ID so we can use it for linking child nodes, etc.\n if node_response.status_code == 201:\n node_nid = node_uri.rsplit('/', 1)[-1]\n node_ids[id_field] = node_nid\n\n # If there is no media file (and we're not creating paged content), move on to the next CSV row.\n if 'file' in row and len(row['file']) == 0 and config['paged_content_from_directories'] is False:\n print('+No media for ' + node_uri + ' created since its \"file\" field in the CSV is empty.')\n logging.warning(\"No media for %s created since its 'file' field in the CSV is empty.\", node_uri)\n continue\n\n # If there is a media file, add it.\n if 'file' in row:\n file_path = os.path.join(config['input_dir'], row['file'])\n media_type = set_media_type(file_path, config)\n\n if node_response.status_code == 201:\n # If what is identified in the 'file' field is a file, create the media from it.\n if 'file' in row and len(row['file']) != 0 and os.path.isfile(file_path):\n media_response_status_code = create_media(config, row['file'], node_uri)\n allowed_media_response_codes = [201, 204]\n if media_response_status_code in allowed_media_response_codes:\n print('+' + media_type.title() + \" media for \" + row['file'] + \" created.\")\n logging.info(\"%s media for %s created.\", media_type.title(), row['file'])\n\n if 'file' in row and len(row['file']) == 0 and config['paged_content_from_directories'] is False:\n print('+ No file specified in CSV for ' + row['title'])\n logging.info(\"No file specified for %s, so no media created.\", id_field)\n\n if config['paged_content_from_directories'] is True:\n # Console output and logging are done in the create_children_from_directory function.\n create_children_from_directory(config, row, node_nid, row['title'])",
"def from_csv(filename: str) -> List['Parameter']:\n parameters = []\n lines = FileUtiles.csvLoad(filename)\n keys = lines[0]\n for line_idx in range(1, len(lines)):\n values = lines[line_idx]\n parameters.append(Parameter(*((keys[idx], Parameter.eval(values[idx])) for idx in range(len(keys)))))\n return parameters",
"def _setup(self, emma_conf):\n settings = configparser.RawConfigParser()\n settings.read('settings.conf')\n self.root = settings.get(\"Datasets\", \"datasets_path\")\n\n # Assign trace set paths\n if self.format == \"cw\": # .npy\n path = join(self.root, self.id)\n self.trace_set_paths = sorted([join(self.id, f) for f in listdir(path) if isfile(join(path, f)) and '_traces.npy' in f])\n elif self.format == \"sigmf\": # .meta\n self.trace_set_paths = None\n raise NotImplementedError\n elif self.format == \"gnuradio\": # .cfile\n self.trace_set_paths = None\n raise NotImplementedError\n elif self.format == \"ascad\": # ASCAD .h5\n if ':' not in self.id:\n raise EMMAConfException(\"No group specified. Specify the H5 group to use by using a colon, e.g. file:group\")\n file, _, group = self.id.rpartition(\":\")\n path = join(self.root, 'ASCAD/ASCAD_data/ASCAD_databases/%s.h5' % file)\n\n # Make sure we never use training set when attacking or classifying\n self.trace_set_paths = emma.io.io.get_ascad_paths(path, group)\n else:\n raise Exception(\"Unknown input format '%s'\" % self.format)\n\n # Limit trace set paths\n self.trace_set_paths = self.trace_set_paths[0:emma_conf.max_num_tracesets]\n assert(len(self.trace_set_paths) > 0)\n\n # Assign reference signal\n reference_trace_set = emma.io.io.get_trace_set(join(self.root, self.trace_set_paths[0]), self.format, ignore_malformed=False, remote=False) # TODO add parameter to allow choosing reference trace set index. Fixed now to 0.\n\n self.traces_per_set = len(reference_trace_set.traces)\n self.reference_signal = reference_trace_set.traces[self.reference_index].signal",
"def create_pokedex(filepath):\n try:\n file = open(filepath, 'r')\n except FileNotFoundError:\n return {}\n else:\n pokedex = {}\n for line in file:\n stats = line.rstrip().split(\",\")\n if stats[0].isdigit():\n stats.pop(4)\n if stats[11] == \"True\":\n legendary = True\n else:\n legendary = False\n pokedex[stats[1]] = create_entry(int(stats[0]), stats[1], stats[2], stats[3], int(stats[4]), int(stats[5]), int(stats[6]), int(stats[7]), int(stats[8]), int(stats[9]), int(stats[10]), legendary)\n file.close()\n return pokedex",
"def makeConfig (self):\n for line in self.lines :\n ll = line.split ('=', 1)\n if len(ll) < 2 :\n print \"Error in parsing cfg label line: \" , line\n return None\n self.config[(ll[0]).strip()] = ((ll[1]).strip())",
"def process_csv():\n csv_rows = []\n fieldnames = ['site',\n 'latitude',\n 'longitude',\n 'city',\n 'region_code',\n 'country_code',\n 'continent_code',\n 'min_ip_hex',\n 'max_ip_hex',\n 'transit_provider',\n 'min_ip',\n 'max_ip',\n 'ip_prefix',\n 'min_ipv6_hex',\n 'max_ipv6_hex',\n 'min_ipv6',\n 'max_ipv6',\n 'ipv6_prefix']\n\n location_map = build_location_map()\n\n # Read in the CSV file and augment the columns\n with open(INPUT_FILE, 'rb') as csvfile:\n reader = csv.DictReader(csvfile)\n\n for row in reader:\n csv_rows.append(process_row(row, location_map))\n\n # Write the new CSV file with new columns\n with open(OUTPUT_FILE, 'w') as csvfile:\n writer = csv.DictWriter(csvfile, fieldnames=fieldnames)\n writer.writeheader()\n for row in csv_rows:\n writer.writerow(row)\n\n print(\"MLab Sites CSV generated at {0}\".format(OUTPUT_FILE))",
"def load_map_config(self, filename):\n # Load map configuration file\n assert os.path.exists(filename), \"Missing config file {}\".format(filename)\n self.config_root = os.path.dirname(os.path.abspath(filename))\n cfg = XFasterConfig()\n cfg.read(filename)\n\n # dictionary of map frequencies keyed by map tag\n assert \"frequencies\" in cfg, \"Missing [frequencies] section\"\n self.dict_freqs = {\n k: cfg.getfloat(\"frequencies\", k) for k in cfg[\"frequencies\"]\n }\n tagset = set(self.dict_freqs)\n assert len(self.dict_freqs) > 0, \"At least one map tag is required\"\n\n # beam fwhm for each tag, if not supplied in beam_product\n # converted from arcmin to radians\n if \"fwhm\" in cfg:\n self.fwhm = {\n k: np.radians(cfg.getfloat(\"fwhm\", k) / 60.0) for k in cfg[\"fwhm\"]\n }\n assert tagset >= set(self.fwhm), \"Unknown tags in [fwhm]\"\n else:\n self.fwhm = {}\n\n # beam fwhm error for each tag, if not supplied in beam_error_product\n if \"fwhm_err\" in cfg:\n self.fwhm_err = {k: cfg.getfloat(\"fwhm_err\", k) for k in cfg[\"fwhm_err\"]}\n assert tagset >= set(self.fwhm_err), \"Unknown tags in [fwhm_err]\"\n else:\n self.fwhm_err = {}\n\n # make sure beam product files exist\n if \"beam\" in cfg:\n v = cfg[\"beam\"].get(\"beam_product\", None)\n if str(v).lower() != \"none\":\n if not os.path.exists(v):\n v = os.path.join(self.config_root, v)\n assert os.path.exists(v), \"Missing beam product file {}\".format(v)\n self.beam_product = pt.load_compat(v)\n beam_set = set(self.beam_product)\n assert tagset >= beam_set, \"Unknown tags in beam product\"\n else:\n self.beam_product = {}\n\n v = cfg[\"beam\"].get(\"beam_error_product\", None)\n if str(v).lower() != \"none\":\n if not os.path.exists(v):\n v = os.path.join(self.config_root, v)\n assert os.path.exists(v), \"Missing beam error product file {}\".format(v)\n self.beam_error_product = pt.load_compat(v)\n beam_set = set(self.beam_error_product)\n assert tagset >= beam_set, \"Unknown tags in beam error product\"\n else:\n self.beam_error_product = {}\n else:\n self.beam_product = {}\n self.beam_error_product = {}\n\n # make sure all tags are present in either beam products or fwhm tables\n fwhm_set = set(self.fwhm) | set(self.beam_product)\n assert fwhm_set == tagset, \"Missing tags in [fwhm] or beam product\"\n\n if len(self.fwhm_err) or len(self.beam_error_product):\n fwhm_set = set(self.fwhm_err) | set(self.beam_error_product)\n assert (\n fwhm_set == tagset\n ), \"Missing tags in [fwhm_err] or beam error product\"\n\n # fit for the transfer function for each tag?\n if \"transfer\" in cfg:\n self.fit_transfer = {\n k: cfg.getboolean(\"transfer\", k) for k in cfg[\"transfer\"]\n }\n assert tagset == set(self.fit_transfer), \"Missing tags in [transfer]\"\n else:\n # assume true for all tags otherwise\n self.fit_transfer = {k: True for k in self.dict_freqs}",
"def process_settings(self, settings_file):\n int_keys = [ 'first_base_to_keep', 'last_base_to_keep', 'max_reads_to_split', 'minimum_reads_for_inclusion',\n 'pool_5trim', 'pool_3trim', 'min_post_adaptor_length']\n #float_keys = []\n str_keys = ['adaptor_sequence', 'rrna_index', 'genome_index', 'pool_append', 'pool_prepend', 'primer_sequence']\n boolean_keys = ['collapse_identical_reads', 'force_read_resplit', 'force_remapping', 'force_recollapse',\n 'force_recount', 'force_index_rebuild', 'force_retrim', 'trim_adaptor']\n list_str_keys = ['fastq_gz_files', 'sample_names']\n #list_float_keys = ['concentrations', 'input_rna']\n extant_files = ['pool_fasta',]\n config = ConfigParser.ConfigParser()\n config.read(settings_file)\n settings = {}\n for section in config.sections():\n for option in config.options(section):\n settings[option] = config.get(section, option)\n settings[section] = True\n for k in int_keys:\n settings[k] = int(settings[k])\n for k in str_keys:\n settings[k] = settings[k]\n #for k in float_keys:\n # settings[k] = float(settings[k])\n for k in boolean_keys:\n if not settings[k].lower() in ['true', 'false']:\n raise ValueError(\n 'Boolean value %s must be \"true\" or \"false\"' % k)\n settings[k] = settings[k].lower() == 'true'\n #for k in list_float_keys:\n # settings[k] = map(float, simplejson.loads(settings[k]))\n #for k in list_int_keys:\n # settings[k] = map(int, simplejson.loads(settings[k]))\n for k in list_str_keys:\n settings[k] = simplejson.loads(settings[k])\n self.fqdir = settings['fastq_dir']\n self.sample_names = settings['sample_names']\n self.fastq_gz_file_handles = [os.path.join(self.fqdir, fastq_gz_file) for fastq_gz_file in\n settings['fastq_gz_files']]\n for file_handle in self.fastq_gz_file_handles:\n assert tps_utils.file_exists(file_handle)\n for k in extant_files:\n assert tps_utils.file_exists(settings[k])\n self.settings = settings\n self.wdir = settings['working_dir']\n self.rdir = settings['results_dir']\n shutil.copy(settings_file, self.rdir)",
"def from_csv(self, path):\n for model, table in [(self.Dataset, 'dataset'),\n (self.Datarun, 'datarun'),\n (self.Hyperpartition, 'hyperpartition'),\n (self.Classifier, 'classifier')]:\n df = pd.read_csv(os.path.join(path, '%ss.csv' % table))\n\n # parse datetime columns. This is necessary because SQLAlchemy can't\n # interpret strings as datetimes on its own.\n # yes, this is the easiest way to do it\n for c in inspect(model).attrs:\n if type(c) != ColumnProperty:\n continue\n col = c.columns[0]\n if type(col.type) == DateTime:\n df[c.key] = pd.to_datetime(df[c.key],\n infer_datetime_format=True)\n\n for _, r in df.iterrows():\n # replace NaN and NaT with None\n for k, v in list(r.iteritems()):\n if pd.isnull(v):\n r[k] = None\n\n # insert the row into the database\n create_func = getattr(self, 'create_%s' % table)\n create_func(**r)",
"def fromCSV(self, filename):\n with open(filename, newline = '') as csvfile:\n read = csv.reader(csvfile, delimiter = ',')\n param = []\n for row in read:\n param.append(row)\n self.c = complex(param[0][0])\n self.xmin = int(param[1][0])\n self.xmax = int(param[2][0])\n self.xlen = int(param[3][0])\n self.ymin = int(param[4][0])\n self.ymax = int(param[5][0])\n self.ylen = int(param[6][0])\n self.fs = np.vectorize(param[7][0])\n self.plane = np.loadtxt(\"plane.csv\", delimiter = ',', dtype = 'int') #Read plane from plane.csv file ",
"def __init__(self, in_csvfile, out_csvfile, col_name, cell_filler):\r\n self.in_csvfile = in_csvfile\r\n self.out_csvfile = out_csvfile\r\n self.col_name = col_name\r\n self.cell_filler = cell_filler",
"def from_csv(cls, load_folder: Path) -> \"Parameters\":\n serializer = serializer_factory(fmt=SerializerEnum.CSV)\n return serializer.load(class_obj=cls, folder_path=load_folder)",
"def createPPSConfig(ppsConfigFilePath, keyDict):\n out = csv.OutFileBuffer(ppsConfigFilePath)\n out.writeText(\n \"\"\"#######################################################################\n#configuration file (PPS+ GENERATED !!!)\n#please make sure that there is no space before or after \":\"\n#lines starting with character \"#\" are treated as comments\n#please provide complete paths instead of only file or directory names\n#######################################################################\n#directory where processed NCBI data is stored, provide empty directory to create new\n#REUSABLE\\n\"\"\")\n out.writeText('NCBI_PROCESSED_DIR:%s\\n' % keyDict.get('NCBI_PROCESSED_DIR', ''))\n out.writeText(\n \"\"\"#Directory containing NCBI taxonomy in SQlite3 format with file name \"ncbitax_sqlite.db\"\n#provide empty directory to create new database\n#REUSABLE\\n\"\"\")\n out.writeText('NCBI_TAX_DIR:%s\\n' % keyDict.get('NCBI_TAX_DIR', ''))\n out.writeText('#project directory, the directory must be empty\\n')\n out.writeText('PROJECT_DIR:%s\\n' % keyDict.get('PROJECT_DIR', ''))\n out.writeText(\n \"\"\"#############################\n#!!!FOLLOWING ARE OPTIONAL!!!\n#############################\n###### Output space options #####\n#a file containing a tree in newick format (see restrictions in INSTALL.txt)\n#OR a file with ncbi taxon ids (one id per line) to create a tree from\\n\"\"\")\n out.writeText('TREE_FILE:%s\\n' % keyDict.get('TREE_FILE', ''))\n out.writeText(\n \"\"\"#Taxonomic ranks (comma separated, no space) starting at the lowest rank. \\\nPlease make sure that \"root\" is there at the end.\nTAXONOMY_RANKS:species,genus,family,order,class,phylum,superkingdom,root\n#number of minimum genomes a clade must contain to be included in generic model\n#effective only if tree file is not provided\nN_MIN_GENOMES_GENERIC:3\n#action on loss 0:disabled, 1:invert\nLOSS_ACTION:0\n###### Input space options #####\n#a directory with sample specific fasta files (file names must start with appropriate organism/species \\\nncbi taxonomic id)\n#leave empty if you don't have any\\n\"\"\")\n out.writeText('SAMPLE_SPECIFIC_DIR:%s\\n' % keyDict.get('SAMPLE_SPECIFIC_DIR', ''))\n out.writeText(\n \"\"\"#kmer feature space for multiple kmers use kmer_min-kmer_max\nKMER:4-6\n#Fragment lengths for different models (comma separated, no space)\nFRAGMENT_LEN:1000,3000,5000,10000,15000,50000\n#kmer feature\n#use reverse complement for computing kmer features?\nREV_COMPLEMENT:1\n#remove reverse complement features?\nRM_REV_COMPLEMENT:1\n#0:disabled, 1:sequence length, 2:sequence_length-k+1, 3:embedded monomer frequency\nKMER_NORMALIZATION:1\n#Number of examples per training file\nNUMBER_EXAMPLES:10000\n#step size for sample specific data; either a single number (for all fragment lengths) or an array separated with \",\"\nSAMPLE_SPECIFIC_STEP:1000,300,500,1000,1500,5000\n###### Training options #####\n#C values for SVM, if single value is given then models will be build with that value.\n#If comma separated (no space) values are given then cross-validation will be performed.\n#If a single value is provided, all models will be built with it. Our experience shows that in general\n#values less than 1 (e.g. 0.01 and 0.1) do not provide good models.\nC_GRID:1000\n#clean-up the data (sampled_fasta and train_data directories) created after training? TRUE/FALSE\nCLEAN_UP_TRAIN:FALSE\n#kernel type 0:linear, 1:polynomial, 2:rbf (on-linear kernels are computationally expensive)\nKERNEL:0\n##polynomial kernel degree\nKERNEL_POLYNOMIAL_DEGREE:2\n##rbf kernel gamma\nKERNEL_RBF_GAMMA:1\n##polynomial kernel s\nKERNEL_POLYNOMIAL_S:1\n###### Predictions options #####\n#number of classifiers to use, keep this odd to avoid ties\nN_CLASSIFIERS:3\n#Create Pie charts for every taxonomic rank TRUE/FALSE (in prediction)\n#slice colors are determined automatically so no color consistency is guaranteed\nPIE_CHARTS:FALSE\n###### Misc options #####\n#should the models be built in parallel (please make sure that you have enough number of\nprocessors and main memory)\\n\"\"\")\n out.writeText('PARALLEL_MODELS:%s\\n' % keyDict.get('PARALLEL_MODELS', 'FALSE'))\n out.writeText(\n \"\"\"#allowed file extensions\nEXTENSIONS:\n#genomes to exclude: file containing one ncbi tax_id per line\\n\"\"\")\n out.writeText('GENOMES_EXCLUDE:%s\\n' % keyDict.get('GENOMES_EXCLUDE', ''))\n out.writeText(\n \"\"\"#if the training data is already there then just build models (TRUE/FALSE)\nONLY_MODELS:FALSE\\n\"\"\")\n out.close()",
"def initFromFile(self,file):\n self.source = file\n file_reader = open(file,\"r\")\n self.isInit = True\n lineCounter = 0\n firstLine = None\n SecondLine = None\n ThirdLine = None\n for line in file_reader:\n if(lineCounter == 0):\n firstLine = line.split()\n self.rowsNumber = int(firstLine[0])\n self.columnsNumber = int(firstLine[1])\n self.routerRangeRadius = int(firstLine[2])\n if(lineCounter == 1):\n SecondLine = line.split()\n self.backBoneCosts = int(SecondLine[0])\n Path.backBoneCost = self.backBoneCosts\n self.routerCosts = int(SecondLine[1])\n self.budget = int(SecondLine[2])\n if(lineCounter == 2):\n ThirdLine = line.split()\n self.firstCell = Cell(int(ThirdLine[0]),int(ThirdLine[1]))\n if(lineCounter>2):\n self.map.append([])\n LINE = line\n columnCounter = 0\n for char in LINE:\n temp = Cell(len(self.map)-1,columnCounter,Cell.getCellType(char))\n self.map[len(self.map)-1].append(temp)\n if(temp.cellType == \"FLOOR\"):\n self.notComputeRouter.append(temp)\n columnCounter += 1\n lineCounter +=1\n self.isInit = True",
"def read_csv(file_name):\n data = {}\n with open(file_name) as f:\n f = MyIter(f)\n try:\n for line in f:\n if not line.strip():\n continue\n if line == 'Points\\n':\n break\n key, val = read_key_value(line, separator=',')\n key = key.lower().replace(' ', '_')\n data[key] = val\n\n x_units, y_units = next(f).split(',')\n data['x_units'], data['y_units'] = x_units.strip(), y_units.strip()\n\n xs, ys = [], []\n for line in f:\n x, y = line.split(',')\n xs.append(float(x.strip()))\n ys.append(float(y.strip()))\n except Exception as e:\n print(f'Error on line {f._index}')\n print(f._line)\n raise e\n\n elong = Elongation(\n np.array(xs), np.array(ys),\n float(data['gauge_length']),\n float(data['sample_width']),\n float(data['sample_thickness'])\n )\n return [elong]",
"def loadConfig():\n lines = []\n config = {}\n here = path.dirname(__file__)\n fn = path.join(here,'manatee.conf')\n try:\n with codecs.open(fn,'rU','utf-8') as conf:\n lines = conf.readlines()\n conf.close()\n except IOError as e:\n print \" Could not open configuration file: %s\" % e\n\n for line in lines:\n try:\n line = line.strip()\n if line:\n values = [x.strip() for x in line.split('=')]\n config[values[0]] = values[1]\n except Exception as e:\n print \"There was an error in the configuration file: %s\" % e\n # TODO: Any strings from the config file that might be displayed or passed into the SQL server need to be validated here.\n# config = validateConfig(config)\n return config",
"def initCSV(self, makeFile, overWrite):\n self.initialized = True\n\n os.chdir(os.path.dirname(os.path.abspath(__file__)))\n\n if os.path.exists(str(self.fileName)):\n\n f = open(str(self.fileName), \"r\")\n\n if not f.read():\n f.close()\n\n f = open(str(self.fileName), \"w\")\n outString = \"\"\n for varName in self.variableDescriptions:\n outString += varName\n outString += \",\"\n\n f.write(outString[0:-1])\n\n f.write('\\n')\n else:\n if overWrite == True:\n f.close()\n\n f = open(str(self.fileName), \"w\")\n outString = \"\"\n for varName in self.variableDescriptions:\n outString += varName\n outString += \",\"\n\n f.write(outString[0:-1])\n\n f.write('\\n')\n if overWrite == False:\n raise OSError(\"csv file is not empty!\")\n\n else:\n if makeFile == True:\n f = open(str(self.fileName), \"w\")\n \n f.close()\n else:\n raise OSError(\"csv file not found!\")",
"def load(cls):\n \n # Loop through procedures and build patient procedure lists:\n procs = csv.reader(file(PROCEDURES_FILE,'U'),dialect='excel-tab')\n header = procs.next() \n for proc in procs:\n cls(dict(zip(header,proc))) # Create a procedure instance ",
"def __init__(self, ini_file):\n self.config = configparser.ConfigParser()\n self.config.read(ini_file)",
"def load_from_file_csv(cls):\n try:\n with open(cls.__name__ + \".csv\", \"r\") as f:\n ld = []\n reader = csv.DictReader(f)\n for row in reader:\n for key, val in row.items():\n row[key] = int(val)\n ld.append(row)\n return [cls.create(**item) for item in ld]\n except FileNotFoundError:\n return []",
"def cell_map_from_csv(self, source_file: str) -> None:\n if source_file[-4:] == '.csv':\n try:\n self._import_source_data(source_file)\n except Exception:\n print(\"Problem with that CSV file. File extension?\")",
"def parse(filehandle):\n for row in csv.DictReader(filehandle):\n yield cccalc.types.Fill(row)",
"def __init__(self,file_path):\n\t\tdata_reader = csv.DictReader(file(file_path,'rU'))\n\t\tfor row in data_reader:\n\t\t\t# we have to turn the strings into floating point numbers.\n\t\t\tc = Compound( name = row['Name'],\n\t\t\t Antoine_params = [float(row['Antoine A']),float(row['Antoine B']),float(row['Antoine C'])],\n\t\t\t mass_density = float(row['Mass Density']),\n\t\t\t MW = float(row['Molecular Weight']),\n\t\t\t #Hvap = float(row['Enthalpy of Vaporization']),\n\t\t\t Cp = float(row['Molar Heat Capacity']) )\n\t\t\t# place it in the dictionary\n\t\t\t#print \"Have just read in \",c\n\t\t\tself[c.name] = c",
"def _parse_dataset_config(self, user_cfg):\n\n # default dataset parameters\n # TODO: move to conf/\n dataset = {\n \"file_type\": \"\",\n \"compression\": None,\n \"encoding\": \"utf-8\",\n \"path\": \"\",\n \"name\": \"\",\n \"xid\": \"x\",\n \"yid\": \"y\",\n \"sep\": \"\",\n \"sheet\": 0,\n \"config_file\": \"\",\n \"index_col\": 0,\n \"metadata\": {\n \"columns\": \"\",\n \"rows\": \"\"\n },\n \"styles\": {\n \"columns\": {\n \"color\": []\n },\n \"rows\": {\n \"color\": []\n }\n },\n \"actions\": []\n }\n\n logging.info(\"Parsing %s config\", user_cfg[\"name\"])\n\n # check for any unsupported settings\n self._detect_unknown_settings(dataset, user_cfg)\n\n # overide default settings with user-provided ones\n # dataset.update(user_cfg)\n dataset = recursive_update(dataset, user_cfg)\n\n # get file extension (excluding .gz suffix, if present)\n ext = pathlib.Path(dataset[\"path\"].lower().replace(\".gz\", \"\")).suffix\n\n # if data source not specified, attempt to guess from file extension\n if dataset[\"file_type\"] == \"\":\n if ext in [\".csv\", \".txt\", \".tsv\", \".tab\"]:\n # comma-separated / tab-delmited\n dataset[\"file_type\"] = \"csv\"\n elif ext in [\".xls\", \".xlsx\"]:\n # excel spreadsheet\n dataset[\"file_type\"] = \"xls\"\n elif ext in [\".feather\"]:\n dataset[\"file_type\"] = \"feather\"\n elif ext in [\".parquet\"]:\n dataset[\"file_type\"] = \"parquet\"\n else:\n msg = \"[ERROR] Config error: could not determine appropriate file_type for {}\"\n sys.exit(msg.format(dataset[\"path\"]))\n\n # determine delimiter for csv/tsv files\n if dataset[\"file_type\"] == \"csv\" and dataset[\"sep\"] == \"\":\n if ext in [\".csv\"]:\n dataset[\"sep\"] = \",\"\n elif ext in [\".tsv\", \".tab\", \".txt\"]:\n dataset[\"sep\"] = \"\\t\"\n\n # compression flag for csv/tsv files\n # for feather/parquet input datasets compression does not need to be specified\n if dataset[\"path\"].endswith(\"gz\"):\n dataset[\"compression\"] = \"gzip\"\n\n # if a str index column value is specified, wrap in quotation marks so that it is handled\n # properly in templates\n if isinstance(dataset[\"index_col\"], str):\n dataset[\"index_col\"] = \"'{}'\".format(dataset[\"index_col\"])\n\n # parse actions section config section\n dataset[\"actions\"] = self._parse_actions_list(\n dataset[\"actions\"], dataset[\"name\"]\n )\n\n # if nothing is specified in config file, generate report plots using all\n # relevant metadata fields as stylistic elements\n self._check_styles(dataset)\n\n # validate dataset config\n self._validate_dataset_config(dataset)\n\n # separate actions from rest of dataset parameters\n dataset_actions = dataset[\"actions\"]\n\n del dataset[\"actions\"]\n\n # add actions to SnakeWrangler instance\n self._wrangler.add_actions(dataset[\"name\"], dataset_actions, **dataset)\n\n # store parsed dataset config\n return dataset",
"def __init__(self, csvfile, fieldnames, *args, **kwargs):\n self.encoding = kwargs.pop('encoding', 'utf-8')\n csv.DictWriter.__init__(self, csvfile, fieldnames, *args, **kwargs)",
"def __init__(self, settings):\n self._read_config(settings)",
"def dataLoad():\n try:\n try: #Python3\n f = open(__file__ + \".csv\",\"rt\")\n except: #Python2\n f = open(__file__ + \".csv\",\"rb\")\n data = f.read().split(',')\n entryCol.entry0.delete(0,END)\n entryCol.entry0.insert(0,data[0])\n entryCol.entry1.delete(0,END)\n entryCol.entry1.insert(0,data[1])\n entryCol.entry2.delete(0,END)\n entryCol.entry2.insert(0,data[2])\n entryCol.entry3.delete(0,END)\n entryCol.entry3.insert(0,data[3])\n botWind.writeN(\"DataLoad: File\")\n except:\n botWind.writeN(\"DataLoad: Default\")",
"def init_from_file(self, filepath, effects_log):\n df = read_input_file(filepath, effects_log, index_col=0)\n\n key = pd.Series(zip(\n df['vehicle_id'],\n df['calendar_year']\n ))\n df.set_index(key, inplace=True)\n\n self._dict = df.to_dict('index')",
"def import_glucose_from_csv(user, csv_file):\n csv_data = []\n reader = csv.reader(csv_file.read().splitlines(), delimiter=',',\n quotechar='\"')\n for row in reader:\n csv_data.append([item.strip() for item in row])\n\n glucose_objects = []\n\n # Check if headers exists. Skip the first entry if true.\n header_check = ['value', 'category', 'date', 'time']\n first_row = [i.lower().strip() for i in csv_data[0]]\n if all(i in first_row for i in header_check):\n csv_data = csv_data[1:]\n\n for row in csv_data:\n # Let's do an extra check to make sure the row is not empty.\n if row:\n try:\n category = Category.objects.get(name__iexact=row[1].strip())\n except ObjectDoesNotExist:\n category = Category.objects.get(name__iexact='No Category'.strip())\n\n # Since we always store the value in mg/dL format in the db, we need\n # to make sure we convert it here if the user's setting is set to\n # mmol/L.\n if user.settings.glucose_unit.name == 'mmol/L':\n value = int(to_mg(row[0]))\n else:\n value = int(row[0])\n\n glucose_objects.append(Glucose(\n user=user,\n value=value,\n category=category,\n record_date=datetime.strptime(row[2], DATE_FORMAT),\n record_time=datetime.strptime(row[3], TIME_FORMAT),\n notes=row[4],\n ))\n\n Glucose.objects.bulk_create(glucose_objects)",
"def readSiteCsv(csv_site_info_directory, csv_site_info):\n\t\n params_dict = {}\n #lon, lat = 0., 0.\n\t\n with open ((csv_site_info_directory + '/' + csv_site_info), newline='') as csvfile:\n sitereader = csv.reader(csvfile, delimiter = ',')\n \n # Skip header row\n sitereader.__next__()\n\n for row in sitereader:\n \n # record params in variable for clarity\n lon = row[0]\n lat = row[1]\n vs30 = float(row[2])\n # using California calc from Campbell & Bozorgnia 2014\n z2pt5 = math.exp(7.089 - 1.144 * math.log(vs30))\n # using calc from Chiou & Youngs 2008\n z1pt0 = math.exp(28.5 - (3.82 / 8.0) * math.log(vs30**8 + 378.7**8))\n\n # Only add key to dictionary if site params can become floats\n if isFloat(lon) & isFloat(lat) & isFloat(vs30) & isFloat(z2pt5) & isFloat(z1pt0):\n if (float(lon) < 180) & (float(lon) > -180) & (float(lat) < 90) & (float(lon) > -90):\n params_dict.update({(lon, lat): (vs30, z1pt0, z2pt5)})\n \n return params_dict",
"def from_csv(self, filename):\n\t\tpoints = np.genfromtxt(filename, delimiter=\",\")\n\t\tassert points.shape[1] == 2\n\n\t\tself.N = points.shape[0]\n\t\tself.points = points\n\t\tself.original_points = points",
"def load_from_file(self):\n\n # if state = 0 the function checks if\n # the file starts with the string CONFIG_VID_PID\n state = 0\n\n try: \n with open(self.filename, 'r') as csvfile:\n reader = csv.reader(csvfile, delimiter=' ')\n for row in reader:\n\n # checks if the file starts with CONFIG_VID_PID\n if state == 0:\n if not (row[0] == 'CONFIG_VID_PID'):\n raise MalformedConfigurationFile\n else:\n # go to next step, i.e., checking the header of the file\n state = state + 1\n \n # checks if the header is of the form 'VID PID'\n elif state == 1:\n if not (row[0] == 'VID' and row[1] == 'PID'):\n raise MalformedConfigurationFile\n else:\n # go to next step, i.e., reading tuples of VIDs and PIDs\n # if they are valid\n state = state + 1\n\n # read and store tuples of VIDs and PIDs, if they are valid\n else:\n # extract VID and PID\n vid = row[0]\n pid = row[1]\n \n # check if the row contains a 4 characters long VID and\n # a 4 characters long PID\n if (len(row[0]) != 4) or (len(row[1]) != 4):\n raise MalformedConfigurationFile\n\n # store (VID, PID) pair\n self.vid_pid_s.append((vid,pid))\n \n except (OSError, IOError) as e:\n if getattr(e, 'errno', 0) == errno.ENOENT:\n print('Error: Configuration file ' + self.filename + ' not found.')\n sys.exit(1)\n except MalformedConfigurationFile:\n print('Error: Malformed configuration file ' + self.filename + '.')\n sys.exit(1)\n\n # if no (VID, PID) tuples were found exit\n if len(self.vid_pid_s) == 0:\n print('Error: No (VID, PID) entries found in ' + self.filename + '.')\n sys.exit(1)",
"def populate(self):\n\n self.create_index()\n self.check_type()\n self.create_mapping()\n\n f = open(self.csv_file, 'rU')\n\n # Read the first line for all the headers\n headers = f.readline().split(',')\n\n # Read the rest of the document\n rows = f.readlines()\n added_counter = 0\n\n actions = []\n for row in rows:\n fields = row.split(',')\n obj = {}\n for header in headers:\n # we call lower-case here because we were originally using\n # analyzed strings in elasticsearch (and they were\n # automatically converted). Code was built based on that so it's\n # easiest to convert for now\n try:\n obj[header.replace('\\n', '')] = float(fields[\n headers.index(header)].replace('\\n', '').lower())\n except ValueError:\n obj[header.replace('\\n', '')] = fields[\n headers.index(header)].replace('\\n', '').lower()\n # check afterwards to replace empty strings with None (which json.dumps hopefully writes to null)\n if obj[header.replace('\\n', '')] == '':\n obj[header.replace('\\n', '')] = None\n try:\n item = {\n '_index': self.es_main_index,\n '_type': self.es_main_type,\n '_source': obj\n }\n\n actions.append(item)\n\n added_counter += 1\n print('%s new records added' % added_counter,\n end='\\r')\n sys.stdout.flush()\n\n if added_counter % self.chunk_size == 0:\n helpers.bulk(self.es, actions)\n actions = []\n\n except ConnectionError:\n print('There was a connection error. Check your Elastic' +\n ' Search setting and make sure Elastic Search is ' +\n 'running.')\n return False\n\n # add the remaining items\n if actions:\n helpers.bulk(self.es, actions)\n\n print('The update is completed. %s new records were added.' %\n added_counter)",
"def __init__(self, csv_file: str = None) -> None:\n super().__init__(csv_file)",
"def __init__(self, csv_file: str = None) -> None:\n super().__init__(csv_file)",
"def _create_default_setting(path):\n try:\n from configparser import ConfigParser\n except ImportError:\n from ConfigParser import ConfigParser # ver. < 3.0\n\n # instantiate\n config = ConfigParser()\n\n # update existing value\n config['Assets Paths'] = {\n 'background': 'assets\\\\images\\\\background.png',\n 'bullet': 'assets\\\\images\\\\bullet.png',\n 'bullet_red': 'assets\\\\images\\\\bullet_red.png',\n 'icon' : 'assets\\\\images\\\\RedInvader.png',\n\n 'ship': 'assets\\\\images\\\\Ship.png',\n 'ship_cr': 'assets\\\\images\\\\ShipCrushedRight.png',\n 'ship_cl': 'assets\\\\images\\\\ShipCrushedLeft.png',\n 'ship_cc': 'assets\\\\images\\\\ShipWhite.png',\n\n 'invadera1': 'assets\\\\images\\\\InvaderA1.png',\n 'invadera2': 'assets\\\\images\\\\InvaderA2.png',\n 'invaderb1': 'assets\\\\images\\\\InvaderB1.png',\n 'invaderb2': 'assets\\\\images\\\\InvaderB2.png',\n 'invaderc1': 'assets\\\\images\\\\InvaderC1.png',\n 'invaderc2': 'assets\\\\images\\\\InvaderC2.png',\n\n }\n config['castle'] = {\n 'castle_location': [\n [1, 1, 1, 1, 1],\n [1, 1, 1, 1, 1],\n [1, 1, 0, 1, 1],\n [1, 1, 0, 1, 1],\n [1, 0, 0, 0, 1],\n ],\n 'start_x': 50,\n 'start_y': 500,\n 'column': 5,\n 'row': 5,\n 'block_l3': (9, 255, 14),\n 'block_l2': (27, 255, 30),\n 'block_l1': (114, 255, 133),\n }\n config['alien'] = {\n 'margin_width': 200,\n 'margin_height': 20,\n 'column': 'd',\n 'Row': 5,\n 'width_x': 10,\n 'width_y': 10,\n 'movement': 10,\n 'alien_column_config': r'{\"0\":{\"path1\":\"InvaderA1\",\"path2\":\"InvaderA2\"},\"1\":{\"path1\":\"InvaderB1\",\"path2\":\"InvaderB2\"},\"2\":{\"path1\":\"InvaderB1\",\"path2\":\"InvaderB2\"},\"3\":{\"path1\":\"InvaderC1\",\"path2\":\"InvaderC2\"},\"4\":{\"path1\":\"InvaderC1\",\"path2\":\"InvaderC2\"}}'\n }\n config['player 1'] = {\n 'margin': 20,\n 'speed': 3\n }\n\n with open(path, 'w') as configfile:\n config.write(configfile)",
"def parse_config(file):\n\n # dictionaries specifying required parameters for various modules\n general = [\"tube_number\", \"tube_radius\", \"collision_objects_filename\"]\n optimizers = {\"nelder_mead\": [\"optimizer_precision\", \"optimize_iterations\"]}\n solvers = {\"rrt\": [\"step_bound\", \"iteration_number\",\n \"tube_lengths\", \"single_tube_control\", \"rotation_max\"],\n \"rrt_star\": [\"step_bound\", \"iteration_number\", \"tube_lengths\", \"rotation_max\"]}\n models = {\"kinematic\": [\"q_dof\", \"delta_x\", \"tube_lengths\", \"strain_bases\"],\n \"static\": [\"q_dof\", \"delta_x\", \"tube_lengths\", \"strain_bases\",\n \"basis_type\", \"degree\"]}\n heuristics = {\"square_obstacle_avg_plus_weighted_goal\": [\"goal_weight\"],\n \"only_goal_distance\": [],\n \"follow_the_leader\": [\"only_tip\"],\n \"follow_the_leader_w_insertion\": [\"only_tip\", \"insertion_weight\"],\n \"follow_the_leader_translation\": [\"only_tip\"]}\n\n # groups together the required parameter dictionaries\n dictionaries = {\"optimizer\": optimizers, \"solver\": solvers,\n \"model\": models, \"heuristic\": heuristics}\n\n # dictionary detailing all of the default parameter values\n defaults = {\"optimizer_type\": \"nelder_mead\",\n \"solver_type\": \"rrt\",\n \"model_type\": \"kinematic\",\n \"heuristic_type\": \"square_obstacle_avg_plus_weighted_goal\",\n \"tube_number\": 2,\n \"tube_radius\": [3, 2],\n \"collision_objects_filename\": \"init_objects.json\",\n \"optimizer_precision\": 0.1,\n \"step_bound\": 3,\n \"tube_lengths\": [60, 50],\n \"iteration_number\": 2000,\n \"rewire_probability\": 0.1,\n \"goal_weight\": 2,\n \"q_dof\": [1, 1],\n \"delta_x\": 1,\n \"single_tube_control\": True,\n \"optimize_iterations\": 50,\n \"only_tip\": True,\n \"insertion_weight\": 10,\n \"strain_bases\": [\"constant\", \"constant\"],\n \"basis_type\": \"last_strain_base\",\n \"degree\": 2,\n \"rotation_max\": 0.1745\n }\n\n with file.open(mode='r') as fid:\n config = yaml.full_load(fid)\n\n if config is None: # for empty config file\n config = dict()\n\n for g in general:\n if g not in config:\n config[g] = defaults.get(g)\n print(f\"{g} not specified in {file.name}. Using default value \"\n f\"{defaults.get(g)} instead.\")\n\n _config_helper(\"optimizer_type\", optimizers, config, file.name, defaults)\n _config_helper(\"solver_type\", solvers, config, file.name, defaults)\n _config_helper(\"model_type\", models, config, file.name, defaults)\n _config_helper(\"heuristic_type\", heuristics, config, file.name, defaults)\n\n config_validation(config)\n\n return config, dictionaries",
"def parse_data_config(path):\n cfg = dict()\n cfg['gpus'] = '0,1,2,3'\n cfg['num_workers'] = '10'\n \n with open(path, 'r') as fp:\n lines = fp.readlines()\n for line in lines:\n line = line.strip()\n if line == '' or line.startswith('#'):\n continue\n key, value = line.split('=')\n cfg[key.strip()] = value.strip()\n \n return cfg",
"def __csv_schema_generator(file):\n try:\n # Parses the first line of the file to get all the headers.\n metadata = str(file.readline().decode('utf-8')).strip().split(',')\n # Will be further implemented in phase 3.\n return SchemaGenerator.__build_schema(metadata)\n except Exception as e:\n logging.error('Failed to parse csv file into schema: ' + str(e))\n raise FailedCreatingSchemaException(\"Failed to create schema from csv file.\")",
"def __init__(self, filename=\"config.ini\"):\n if not os.path.isfile(filename):\n self.set_default_config(filename)\n\n self.config = configparser.ConfigParser()\n self.config.read(filename)\n\n self.filename = filename\n self.database_name = self.config.get('config',\n 'database_name',\n fallback='manga.db')\n self.volume_limit = self.config.getint('config',\n 'volume_limit',\n fallback=128)\n self.series_per_page = self.config.getint('config',\n 'series_per_page',\n fallback=0)\n self.compact_list = self.config.getboolean('config',\n 'compact_list',\n fallback=False)\n self.show_empty_series = self.config.getboolean('config',\n 'show_empty_series',\n fallback=False)\n self.default_to_gui = self.config.getboolean('config',\n 'default_to_gui',\n fallback=True)",
"def buildCurrencyDict(filename): \n currencies = {}\n with open(os.path.join(\"input\", filename), \"rt\", encoding=\"utf8\") as f:\n reader = csv.reader(f)\n for line in reader:\n currencies[line[1]] = Currency(line[1], line[0], float(line[2]))\n return currencies",
"def __init__(self,\n config,\n stream_handle,\n exception_callback):\n\n # Call the superclass constructor\n super(DostaAbcdjmCsppParser, self).__init__(config,\n stream_handle,\n exception_callback,\n DATA_REGEX)",
"def load_ini_file(ini_file_path):\n config = configparser.ConfigParser()\n config.read(ini_file_path)\n cfg = {}\n\n # Load hyperparameters\n cfg[\"hyperparameters\"] = {}\n cfg[\"hyperparameters\"][\"gpu_id\"] = config.getint(\"hyperparameters\", \"gpu_id\")\n cfg[\"hyperparameters\"][\"seed\"] = config.getint(\"hyperparameters\", \"seed\")\n cfg[\"hyperparameters\"][\"optimizer\"] = config.get(\"hyperparameters\", \"optimizer\")\n cfg[\"hyperparameters\"][\"lr\"] = config.getfloat(\"hyperparameters\", \"lr\")\n cfg[\"hyperparameters\"][\"momentum\"] = config.getfloat(\"hyperparameters\", \"momentum\")\n cfg[\"hyperparameters\"][\"clip\"] = config.getfloat(\"hyperparameters\", \"clip\")\n cfg[\"hyperparameters\"][\"dropout\"] = config.getfloat(\"hyperparameters\", \"dropout\")\n cfg[\"hyperparameters\"][\"batch_size\"] = config.getint(\"hyperparameters\", \"batch_size\")\n cfg[\"hyperparameters\"][\"embedding_dim\"] = config.getint(\"hyperparameters\", \"embedding_dim\")\n cfg[\"hyperparameters\"][\"commun_embed_size\"] = config.getint(\"hyperparameters\", \"commun_embed_size\")\n cfg[\"hyperparameters\"][\"num_epochs\"] = config.getint(\"hyperparameters\", \"num_epochs\")\n cfg[\"hyperparameters\"][\"use_one_hot\"] = config.getboolean(\"hyperparameters\", \"use_one_hot\")\n cfg[\"hyperparameters\"][\"max_input_length\"] = config.getint(\"hyperparameters\", \"max_input_length\")\n cfg[\"hyperparameters\"][\"max_num_answers\"] = config.getint(\"hyperparameters\", \"max_num_answers\")\n cfg[\"hyperparameters\"][\"use_dnc_c\"] = config.getboolean(\"hyperparameters\", \"use_dnc_c\") \n cfg[\"hyperparameters\"][\"use_dnc_q\"] = config.getboolean(\"hyperparameters\", \"use_dnc_q\")\n cfg[\"hyperparameters\"][\"share_memory\"] = config.getboolean(\"hyperparameters\", \"share_memory\")\n cfg[\"hyperparameters\"][\"weight_decay\"] = config.getfloat(\"hyperparameters\", \"weight_decay\")\n cfg[\"hyperparameters\"][\"use_clip_grad\"] = config.getboolean(\"hyperparameters\", \"use_clip_grad\")\n cfg[\"hyperparameters\"][\"clip_value\"] = config.getfloat(\"hyperparameters\", \"clip_value\")\n cfg[\"hyperparameters\"][\"lr_reduce_after\"] = config.getint(\"hyperparameters\", \"lr_reduce_after\")\n cfg[\"hyperparameters\"][\"lr_decay_rate\"] = config.getfloat(\"hyperparameters\", \"lr_decay_rate\")\n cfg[\"hyperparameters\"][\"grad_flow_interval\"] = config.getfloat(\"hyperparameters\", \"grad_flow_interval\")\n cfg[\"hyperparameters\"][\"add_noise\"] = config.getboolean(\"hyperparameters\", \"add_noise\")\n cfg[\"hyperparameters\"][\"finetune\"] = config.getboolean(\"hyperparameters\", \"finetune\")\n cfg[\"hyperparameters\"][\"fc_flag\"] = config.getboolean(\"hyperparameters\", \"fc_flag\")\n\n # Load lstm parameters\n cfg[\"lstm\"] = {}\n cfg[\"lstm\"][\"hidden_dim\"] = config.getint(\"lstm\", \"hidden_dim\")\n cfg[\"lstm\"][\"num_layers\"] = config.getint(\"lstm\", \"num_layers\")\n cfg[\"lstm\"][\"dropout\"] = config.getfloat(\"lstm\", \"dropout\")\n\n # Load dnc_q parameters\n cfg[\"dnc_q\"] = {}\n cfg[\"dnc_q\"][\"input_size\"] = config.getint(\"dnc_q\", \"input_size\")\n cfg[\"dnc_q\"][\"output_size\"] = config.getint(\"dnc_q\", \"output_size\")\n cfg[\"dnc_q\"][\"rnn_type\"] = config.get(\"dnc_q\", \"rnn_type\")\n cfg[\"dnc_q\"][\"hidden_dim\"] = config.getint(\"dnc_q\", \"hidden_dim\")\n cfg[\"dnc_q\"][\"memory_type\"] = config.get(\"dnc_q\", \"memory_type\")\n cfg[\"dnc_q\"][\"num_layers\"] = config.getint(\"dnc_q\", \"num_layers\")\n cfg[\"dnc_q\"][\"num_layers_hidden\"] = config.getint(\"dnc_q\", \"num_layers_hidden\")\n cfg[\"dnc_q\"][\"n\"] = config.getint(\"dnc_q\", \"n\")\n cfg[\"dnc_q\"][\"w\"] = config.getint(\"dnc_q\", \"w\")\n cfg[\"dnc_q\"][\"r\"] = config.getint(\"dnc_q\", \"r\")\n cfg[\"dnc_q\"][\"s_r\"] = config.getint(\"dnc_q\", \"t_r\")\n cfg[\"dnc_q\"][\"t_r\"] = config.getint(\"dnc_q\", \"s_r\")\n cfg[\"dnc_q\"][\"pass_through_mem\"] = config.getboolean(\"dnc_q\", \"pass_through_mem\")\n cfg[\"dnc_q\"][\"reset_experience\"] = config.getboolean(\"dnc_q\", \"reset_experience\")\n cfg[\"dnc_q\"][\"debug\"] = config.getboolean(\"dnc_q\", \"debug\")\n cfg[\"dnc_q\"][\"lr\"] = config.getfloat(\"dnc_q\", \"lr\")\n cfg[\"dnc_q\"][\"dropout\"] = config.getfloat(\"dnc_q\", \"dropout\")\n\n # Load dnc_c parameters\n cfg[\"dnc_c\"] = {}\n cfg[\"dnc_c\"][\"output_size\"] = config.getint(\"dnc_c\", \"output_size\")\n cfg[\"dnc_c\"][\"rnn_type\"] = config.get(\"dnc_c\", \"rnn_type\")\n cfg[\"dnc_c\"][\"hidden_dim\"] = config.getint(\"dnc_c\", \"hidden_dim\")\n cfg[\"dnc_c\"][\"memory_type\"] = config.get(\"dnc_c\", \"memory_type\")\n cfg[\"dnc_c\"][\"num_layers\"] = config.getint(\"dnc_c\", \"num_layers\")\n cfg[\"dnc_c\"][\"num_layers_hidden\"] = config.getint(\"dnc_c\", \"num_layers_hidden\")\n cfg[\"dnc_c\"][\"n\"] = config.getint(\"dnc_c\", \"n\")\n cfg[\"dnc_c\"][\"w\"] = config.getint(\"dnc_c\", \"w\")\n cfg[\"dnc_c\"][\"r\"] = config.getint(\"dnc_c\", \"r\")\n cfg[\"dnc_c\"][\"s_r\"] = config.getint(\"dnc_c\", \"t_r\")\n cfg[\"dnc_c\"][\"t_r\"] = config.getint(\"dnc_c\", \"s_r\")\n cfg[\"dnc_c\"][\"pass_through_mem\"] = config.getboolean(\"dnc_c\", \"pass_through_mem\")\n cfg[\"dnc_c\"][\"reset_experience\"] = config.getboolean(\"dnc_c\", \"reset_experience\")\n cfg[\"dnc_c\"][\"debug\"] = config.getboolean(\"dnc_c\", \"debug\")\n cfg[\"dnc_c\"][\"lr\"] = config.getfloat(\"dnc_c\", \"lr\")\n cfg[\"dnc_c\"][\"dropout\"] = config.getfloat(\"dnc_c\", \"dropout\")\n cfg[\"dnc_c\"][\"type\"] = config.get(\"dnc_c\", \"type\")\n cfg[\"dnc_c\"][\"nonlinearity\"] = config.get(\"dnc_c\", \"nonlinearity\")\n cfg[\"dnc_c\"][\"concat_out_rv\"] = config.getboolean(\"dnc_c\", \"concat_out_rv\")\n cfg[\"dnc_c\"][\"bidirectional\"] = config.getboolean(\"dnc_c\", \"bidirectional\")\n\n # Load logging paths\n cfg[\"logging\"] = {}\n cfg[\"logging\"][\"tensorboard_dir\"] = config.get(\"logging\", \"tensorboard_dir\")\n cfg[\"logging\"][\"checkpoints_dir\"] = config.get(\"logging\", \"checkpoints_dir\")\n cfg[\"logging\"][\"results_dir\"] = config.get(\"logging\", \"results_dir\")\n cfg[\"logging\"][\"grad_flow_dir\"] = config.get(\"logging\", \"grad_flow_dir\")\n\n # Load paths\n cfg[\"paths\"] = {}\n cfg[\"paths\"][\"input\"] = config.get(\"paths\", \"input\")\n cfg[\"paths\"][\"json_q_path_tr\"] = config.get(\"paths\", \"json_q_path_tr\")\n cfg[\"paths\"][\"json_q_path_val\"] = config.get(\"paths\", \"json_q_path_val\")\n cfg[\"paths\"][\"json_a_path_tr\"] = config.get(\"paths\", \"json_a_path_tr\")\n cfg[\"paths\"][\"json_a_path_val\"] = config.get(\"paths\", \"json_a_path_val\")\n cfg[\"paths\"][\"json_q_path_test\"] = config.get(\"paths\", \"json_q_path_test\")\n cfg[\"paths\"][\"dnc_q\"] = config.get(\"paths\", \"dnc_q\")\n cfg[\"paths\"][\"dnc_c\"] = config.get(\"paths\", \"dnc_c\")\n return cfg",
"def createCfg_analyze(self, jobOptions): \n lines = []\n ##lines.append(\"process.fwliteInput.fileNames = cms.vstring(%s)\" % [ os.path.basename(inputFile) for inputFile in inputFiles ])\n lines.append(\"process.fwliteInput.fileNames = cms.vstring(%s)\" % jobOptions['ntupleFiles'])\n lines.append(\"process.fwliteOutput.fileName = cms.string('%s')\" % os.path.basename(jobOptions['histogramFile']))\n lines.append(\"process.analyze_jetToTauFakeRate.process = cms.string('%s')\" % jobOptions['sample_category'])\n lines.append(\"process.analyze_jetToTauFakeRate.era = cms.string('%s')\" % self.era)\n lines.append(\"process.analyze_jetToTauFakeRate.triggers_1e = cms.vstring(%s)\" % self.triggers_1e)\n lines.append(\"process.analyze_jetToTauFakeRate.use_triggers_1e = cms.bool(%s)\" % (\"1e\" in jobOptions['triggers']))\n lines.append(\"process.analyze_jetToTauFakeRate.triggers_1mu = cms.vstring(%s)\" % self.triggers_1mu)\n lines.append(\"process.analyze_jetToTauFakeRate.use_triggers_1mu = cms.bool(%s)\" % (\"1mu\" in jobOptions['triggers']))\n lines.append(\"process.analyze_jetToTauFakeRate.triggers_1e1mu = cms.vstring(%s)\" % self.triggers_1e1mu)\n lines.append(\"process.analyze_jetToTauFakeRate.use_triggers_1e1mu = cms.bool(%s)\" % (\"1e1mu\" in jobOptions['triggers']))\n lines.append(\"process.analyze_jetToTauFakeRate.chargeSelection = cms.string('%s')\" % jobOptions['charge_selection'])\n lines.append(\"process.analyze_jetToTauFakeRate.jet_minPt = cms.double('%f')\" % jobOptions['jet_minPt'])\n lines.append(\"process.analyze_jetToTauFakeRate.jet_maxPt = cms.double('%f')\" % jobOptions['jet_maxPt'])\n lines.append(\"process.analyze_jetToTauFakeRate.jet_minAbsEta = cms.double('%f')\" % jobOptions['jet_minAbsEta'])\n lines.append(\"process.analyze_jetToTauFakeRate.jet_maxAbsEta = cms.double('%f')\" % jobOptions['jet_maxAbsEta'])\n lines.append(\"process.analyze_jetToTauFakeRate.hadTauSelection_denominator = cms.string('%s')\" % jobOptions['hadTau_selection_denominator'])\n lines.append(\"process.analyze_jetToTauFakeRate.hadTauSelections_numerator = cms.vstring(\")\n for hadTau_selection in jobOptions['hadTau_selections_numerator']:\n lines.append(\" '%s',\" % hadTau_selection)\n lines.append(\")\")\n lines.append(\"process.analyze_jetToTauFakeRate.absEtaBins = cms.vdouble(%s)\" % jobOptions['absEtaBins'])\n lines.append(\"process.analyze_jetToTauFakeRate.use_HIP_mitigation_mediumMuonId = cms.bool(%s)\" % jobOptions['use_HIP_mitigation_mediumMuonId'])\n lines.append(\"process.analyze_jetToTauFakeRate.isMC = cms.bool(%s)\" % jobOptions['is_mc'])\n lines.append(\"process.analyze_jetToTauFakeRate.central_or_shift = cms.string('%s')\" % jobOptions['central_or_shift'])\n lines.append(\"process.analyze_jetToTauFakeRate.lumiScale = cms.double(%f)\" % jobOptions['lumi_scale'])\n lines.append(\"process.analyze_jetToTauFakeRate.apply_genWeight = cms.bool(%s)\" % jobOptions['apply_genWeight'])\n lines.append(\"process.analyze_jetToTauFakeRate.apply_trigger_bits = cms.bool(%s)\" % jobOptions['apply_trigger_bits'])\n create_cfg(self.cfgFile_analyze, jobOptions['cfgFile_modified'], lines)",
"def __init__(self, configfile, ikfile, filterfile):\n \n self.configfile = configfile\n self.filterfile = filterfile\n self.ikfile=ikfile\n self.ckfile=\"test.ck\"\n self.fkfile=\"test.fk\"\n self.sclkfile=\"fakesclk\"\n\n with open(configfile, newline='') as f:\n reader = csv.reader(f,delimiter=' ', skipinitialspace=True)\n type=next(reader)\n if (type[0].lower() == 'polygon'):\n self.x=[]\n self.y=[]\n self.z=[]\n n_lines = 0\n for row in reader:\n n_lines=n_lines+1\n if n_lines==1:\n if row[0]=='fill_factor':\n self.fillfactor = float(row[1])\n if n_lines>1:\n tmpx=np.tan(np.radians(float(row[0])))\n tmpy=np.tan(np.radians(float(row[1])))\n tmpz=1\n self.x.append(tmpx)\n self.y.append(tmpy)\n self.z.append(tmpz)\n self.save_poly(self.ikfile,[0,0,1],self.x,self.y,self.z)\n \n elif (type[0].lower() == 'circle'):\n self.x=float(next(reader)[0])\n self.save_circ(self.ikfile,[0,0,1],self.x)\n self.fillfactor = float(next(reader)[1])\n\n else:\n sys.exit(\"In file %s, instrument FOV is invalid. Only \\\"Circle\\\" or \\\"Polygon\\\" allowed.\" %(configfile))\n \n with open(filterfile, newline='') as f:\n transform = pd.read_csv(f, sep='\\s+', index_col='colors')\n self.transforms = transform",
"def __init__(self, config_file_name=\"config.json\"):\n with open(config_file_name, \"r\") as config:\n f = dict(json.load(config))\n for key, value in f.items():\n setattr(self, key, value)",
"def load(cls):\n \n # Loop through problems and build patient problem lists:\n probs = csv.reader(file(PROBLEMS_FILE,'U'),dialect='excel-tab')\n header = probs.next() \n for prob in probs:\n cls(dict(zip(header,prob))) # Create a problem instance ",
"def createCfg_analyze(self, jobOptions, sample_info):\n\n additionalJobOptions = [\n 'chargeSelection',\n 'jet_minPt',\n 'jet_maxPt',\n 'jet_minAbsEta',\n 'jet_maxAbsEta',\n 'hadTau_selection_tight',\n 'hadTauSelection_denominator',\n 'hadTauSelections_numerator',\n 'trigMatchingOptions',\n 'absEtaBins',\n 'decayModes'\n ]\n\n lines = super(analyzeConfig_jetToTauFakeRate, self).createCfg_analyze(jobOptions, sample_info, additionalJobOptions)\n create_cfg(self.cfgFile_analyze, jobOptions['cfgFile_modified'], lines)",
"def import_csv(self, csvfileobject):\n # Clear previously stored info\n self._tracks = []\n self._selected = None\n\n for row in csvfileobject:\n if row[0] == \"T\":\n track = self.addTrack()\n track.properties = row\n elif row[0] == \"P\":\n period = self.addPeriod([0,1,'-'])\n period.properties = row",
"def make_config():\n # find date of data obtained\n current_pathname = os.path.basename(os.getcwd())\n guess_date = extract_date(current_pathname)\n\n while(True):\n if guess_date is None:\n prompt = 'YYYYMMDD'\n else:\n prompt = guess_date\n\n string = input('Date of observation [{}]: '.format(prompt))\n input_date = extract_date(string)\n if input_date is None:\n if guess_date is None:\n continue\n else:\n input_date = guess_date\n break\n else:\n break\n \n input_datetime = datetime.datetime.strptime(input_date, '%Y-%m-%d')\n\n # create config object\n config = configparser.ConfigParser()\n\n config.add_section('data')\n\n config.set('data', 'telescope', 'Keck-I')\n config.set('data', 'instrument', 'HIRES')\n config.set('data', 'rawpath', 'rawdata')\n #config.set('data', 'statime_key', statime_key)\n #config.set('data', 'exptime_key', exptime_key)\n\n config.add_section('reduce')\n config.set('reduce', 'midpath', 'midproc')\n config.set('reduce', 'figpath', 'images')\n config.set('reduce', 'odspath', 'onedspec')\n config.set('reduce', 'mode', 'normal')\n config.set('reduce', 'oned_suffix', 'ods')\n config.set('reduce', 'fig_format', 'png')\n \n config.add_section('reduce.bias')\n config.set('reduce.bias', 'bias_file', '${reduce:midpath}/bias.fits')\n config.set('reduce.bias', 'cosmic_clip', str(10))\n config.set('reduce.bias', 'maxiter', str(5))\n config.set('reduce.bias', 'smooth', 'yes')\n config.set('reduce.bias', 'smooth_method', 'gaussian')\n config.set('reduce.bias', 'smooth_sigma', str(3))\n config.set('reduce.bias', 'smooth_mode', 'nearest')\n\n config.add_section('reduce.trace')\n config.set('reduce.trace', 'minimum', str(1e-3))\n config.set('reduce.trace', 'scan_step', str(100))\n config.set('reduce.trace', 'separation', '100:84, 1500:45, 3000:14')\n config.set('reduce.trace', 'filling', str(0.2))\n config.set('reduce.trace', 'align_deg', str(2))\n config.set('reduce.trace', 'display', 'no')\n config.set('reduce.trace', 'degree', str(4))\n config.set('reduce.trace', 'file', '${reduce:midpath}/trace.fits')\n\n config.add_section('reduce.flat')\n config.set('reduce.flat', 'file', '${reduce:midpath}/flat.fits')\n\n # write to config file\n filename = 'HIRES.{}.cfg'.format(input_date)\n outfile = open(filename, 'w')\n for section in config.sections():\n maxkeylen = max([len(key) for key in config[section].keys()])\n outfile.write('[{}]'.format(section)+os.linesep)\n fmt = '{{:{}s}} = {{}}'.format(maxkeylen)\n for key, value in config[section].items():\n outfile.write(fmt.format(key, value)+os.linesep)\n outfile.write(os.linesep)\n outfile.close()\n\n print('Config file written to {}'.format(filename))",
"def run_from_file(f):\n #set defaults\n x_loops=1;max_steps=0;display_on=True;max_fps=10;garden_size=13;tako_number=20\n pop_max=40;max_width=1800;max_height=900;collect_data=True;export_all=False\n rand_nets=False;max_gen=0;genetic_mode=\"Plain\";learning_on=False\n seeds=None;garden_mode=\"Diverse Static\";family_detection=None;family_mod=0\n record_inbreeding=True;inbreed_lim=1.1;filename=\"default file\"\n hla_genes=0;binary_health=0;carrier_percentage=40;two_envs=False\n diff_envs=False;migration_rate=0;phen_pref=False\n\n \n atr_dict = {\"x_loops\": x_loops, \"max_steps\": max_steps,\n \"display_on\": display_on, \"max_fps\": max_fps,\n \"garden_size\": garden_size,\n \"tako_number\": tako_number, \"pop_max\": pop_max,\n \"max_width\": max_width, \"max_height\": max_height,\n \"collect_data\": collect_data, \"export_all\": export_all,\n \"rand_nets\": rand_nets, \"max_gen\": max_gen,\n \"genetic_mode\": genetic_mode, \"learning_on\": learning_on,\n \"seeds\": seeds, \"garden_mode\": garden_mode,\n \"family_detection\": family_detection, \"family_mod\": family_mod,\n \"record_inbreeding\": record_inbreeding,\n \"inbreed_lim\": inbreed_lim, \"filename\": filename,\n \"hla_genes\": hla_genes, \"binary_health\": binary_health,\n \"carrier_percentage\": carrier_percentage,\n \"two_envs\": two_envs, \"diff_envs\": diff_envs,\n \"migration_rate\": migration_rate, \"phen_pref\": phen_pref}\n \n ints = [\"x_loops\", \"max_steps\", \"garden_size\", \"tako_number\", \"pop_max\",\n \"max_width\", \"max_height\", \"max_gen\", \"hla_genes\",\n \"binary_health\", \"carrier_percentage\", \"max_fps\"]\n floats = [\"family_mod\", \"inbreed_lim\", \"migration_rate\"]\n strs = [\"genetic_mode\", \"garden_mode\", \"filename\"]\n bools = [\"display_on\", \"collect_data\", \"export_all\", \"rand_nets\",\n \"learning_on\", \"record_inbreeding\", \"two_envs\", \"diff_envs\",\n \"phen_pref\"]\n\n #then sets all user-defined settings from the file f\n with open(f) as exp_file:\n for line in exp_file:\n #comments\n if line[0] == \"#\":\n pass\n #blank line = run what we have, then continue\n #to read the file for a new set of parameters\n elif line == \"\\n\":\n run_experiment(atr_dict[\"x_loops\"], atr_dict[\"max_steps\"],\n atr_dict[\"display_on\"], atr_dict[\"max_fps\"],\n atr_dict[\"garden_size\"],\n atr_dict[\"tako_number\"], atr_dict[\"pop_max\"],\n atr_dict[\"max_width\"], atr_dict[\"max_height\"],\n atr_dict[\"collect_data\"], atr_dict[\"export_all\"],\n atr_dict[\"rand_nets\"], atr_dict[\"max_gen\"],\n atr_dict[\"genetic_mode\"],\n atr_dict[\"learning_on\"],\n atr_dict[\"seeds\"], atr_dict[\"garden_mode\"],\n atr_dict[\"family_detection\"],\n atr_dict[\"family_mod\"],\n atr_dict[\"record_inbreeding\"],\n atr_dict[\"inbreed_lim\"],\n atr_dict[\"hla_genes\"], atr_dict[\"binary_health\"],\n atr_dict[\"carrier_percentage\"],\n atr_dict[\"filename\"],\n atr_dict[\"two_envs\"],\n atr_dict[\"diff_envs\"],\n atr_dict[\"migration_rate\"],\n atr_dict[\"phen_pref\"])\n #reset defaults\n atr_dict = {\"x_loops\": x_loops, \"max_steps\": max_steps,\n \"display_on\": display_on, \"max_fps\": max_fps,\n \"garden_size\": garden_size,\n \"tako_number\": tako_number, \"pop_max\": pop_max,\n \"max_width\": max_width, \"max_height\": max_height,\n \"collect_data\": collect_data, \"export_all\": export_all,\n \"rand_nets\": rand_nets, \"max_gen\": max_gen,\n \"genetic_mode\": genetic_mode, \"learning_on\": learning_on,\n \"seeds\": seeds, \"garden_mode\": garden_mode,\n \"family_detection\": family_detection,\n \"family_mod\": family_mod,\n \"record_inbreeding\": record_inbreeding,\n \"inbreed_lim\": inbreed_lim, \"filename\": filename,\n \"hla_genes\": hla_genes, \"binary_health\": binary_health,\n \"carrier_percentage\": carrier_percentage,\n \"two_envs\": two_envs, \"diff_envs\": diff_envs,\n \"migration_rate\": migration_rate, \"phen_pref\": phen_pref}\n else:\n #get rid of newline character\n line = line[:-1]\n line = line.split(\": \")\n if line[0] in ints:\n val = int(line[1])\n elif line[0] in floats:\n val = float(line[1])\n elif line[0] in bools:\n val = True if line[1] == \"True\" else False\n elif line[0] in strs:\n val = line[1]\n elif line[0] == \"family_detection\":\n if line[1] == \"None\":\n val = None\n else:\n val = line[1]\n elif line[0] == \"seeds\":\n val = line[1].split(\" \")\n atr_dict[line[0]] = val\n #run the last one in the file\n run_experiment(atr_dict[\"x_loops\"], atr_dict[\"max_steps\"],\n atr_dict[\"display_on\"], atr_dict[\"max_fps\"],\n atr_dict[\"garden_size\"],\n atr_dict[\"tako_number\"], atr_dict[\"pop_max\"],\n atr_dict[\"max_width\"], atr_dict[\"max_height\"],\n atr_dict[\"collect_data\"], atr_dict[\"export_all\"],\n atr_dict[\"rand_nets\"], atr_dict[\"max_gen\"],\n atr_dict[\"genetic_mode\"],\n atr_dict[\"learning_on\"],\n atr_dict[\"seeds\"], atr_dict[\"garden_mode\"],\n atr_dict[\"family_detection\"],\n atr_dict[\"family_mod\"],\n atr_dict[\"record_inbreeding\"],\n atr_dict[\"inbreed_lim\"], atr_dict[\"hla_genes\"],\n atr_dict[\"binary_health\"], atr_dict[\"carrier_percentage\"],\n atr_dict[\"two_envs\"], atr_dict[\"diff_envs\"],\n atr_dict[\"migration_rate\"], atr_dict[\"phen_pref\"],\n atr_dict[\"filename\"])",
"def _initialize_attributes(self, string_as_file):\n for row in string_as_file:\n first = row[0]\n second = row[1]\n third = row[3]\n match first:\n case 'quadrat':\n self.quadrat = { 'id': second, 'comment': third }\n case 'waypoint':\n self.waypoint = { 'name': second, 'comment': third }",
"def load_from_file_csv(cls):\n list_rectangle = [\"id\", \"width\", \"height\", \"x\", \"y\"]\n list_square = [\"id\", \"size\", \"x\", \"y\"]\n filename = cls.__name__ + \".csv\"\n dictionary = []\n result = []\n\n try:\n with open(filename, encoding=\"utf-8\") as file:\n obj_list = csv.reader(file)\n # read obj_list <_csv.reader object at 0x7fbfe5614b38>\n if cls.__name__ == \"Rectangle\":\n for list in obj_list:\n # create dictionary\n dict = {}\n for key, value in zip(list_rectangle, list):\n dict[key] = int(value)\n # create an object and append to a list\n result.append(cls.create(**dict))\n if cls.__name__ == \"Square\":\n for list in obj_list:\n # create dictionary\n dict = {}\n for key, value in zip(list_square, list):\n dict[key] = int(value)\n # create an object and append to a list\n result.append(cls.create(**dict))\n return result\n except:\n return result",
"def __init__(self, file_path=None, writer=None,\n output_encoding=\"utf-8\", input_encoding=\"utf-8\",\n try_encodings_hard=True, fallback_input_encodings=None,\n from_row=0, from_col=0, ignore_blank_rows=False,\n input_dialect=csv.excel):\n self.file_path = None\n self.output_encoding = output_encoding\n self.input_encoding = input_encoding\n\n # useful to know about this for any future work on encodings: https://docs.python.org/2.4/lib/standard-encodings.html\n if fallback_input_encodings is None and try_encodings_hard:\n fallback_input_encodings = [\"cp1252\", \"cp1251\", \"iso-8859-1\", \"iso-8859-2\", \"windows-1252\", \"windows-1251\", \"mac_roman\"]\n else:\n fallback_input_encodings = []\n self.fallback_input_encodings = fallback_input_encodings\n\n self.from_row = from_row\n self.from_col = from_col\n self.ignore_blank_rows = ignore_blank_rows\n self.input_dialect = input_dialect\n\n # Store the csv contents in a list of tuples, [ (column_header, [contents]) ]\n self.data = []\n\n # Get an open file object from the given file_path or file object\n if file_path is not None:\n if type(file_path) == file:\n self.file_path = file_path.name\n # NOTE: if you have passed in a file object, it MUST work - as in, it must be set to\n # read the right encoding, and everything. We will not try to parse it again if it\n # fails the first time. If it is closed, you will also need to be sure to set the input_encoding.\n # All round - better if you just give us the file path\n self.file_object = file_path\n if self.file_object.closed:\n self.file_object = codecs.open(self.file_object.name, 'r+b', encoding=self.input_encoding)\n\n # explicitly read this file in\n self._read_file(self.file_object)\n else:\n self.file_path = file_path\n if os.path.exists(file_path) and os.path.isfile(file_path):\n self._read_from_path(file_path)\n else:\n # If the file doesn't exist, create it.\n self.file_object = codecs.open(file_path, 'w+b', encoding=self.output_encoding)\n\n elif writer is not None:\n self.file_object = writer"
] | [
"0.60839015",
"0.5907737",
"0.55338466",
"0.548672",
"0.53889763",
"0.53884614",
"0.5330307",
"0.53111887",
"0.5284984",
"0.52468073",
"0.52041686",
"0.5191584",
"0.5108381",
"0.50697786",
"0.5067091",
"0.5060284",
"0.50485945",
"0.50454515",
"0.5044164",
"0.50334924",
"0.5018432",
"0.5008941",
"0.5008442",
"0.5007505",
"0.50072175",
"0.5003448",
"0.4994699",
"0.49932322",
"0.49921677",
"0.49901196",
"0.4983941",
"0.49749562",
"0.4962419",
"0.49527073",
"0.49526107",
"0.4949039",
"0.49322674",
"0.49276924",
"0.49276483",
"0.49191475",
"0.4917394",
"0.49080914",
"0.49048698",
"0.49035197",
"0.4903384",
"0.4894498",
"0.489165",
"0.48903617",
"0.48855573",
"0.48845732",
"0.488147",
"0.48753995",
"0.4874854",
"0.48731965",
"0.48646784",
"0.48562422",
"0.48483914",
"0.48475918",
"0.48445517",
"0.4841805",
"0.48408788",
"0.48406297",
"0.48395923",
"0.4825992",
"0.48236558",
"0.48227423",
"0.48193118",
"0.48168033",
"0.48157474",
"0.48122686",
"0.48077592",
"0.4804118",
"0.4801693",
"0.48008135",
"0.47997782",
"0.4782641",
"0.47825465",
"0.478234",
"0.47745138",
"0.47741854",
"0.47741854",
"0.47734767",
"0.47717786",
"0.47658598",
"0.47636172",
"0.4762444",
"0.47586673",
"0.47555515",
"0.47486",
"0.4746311",
"0.47453618",
"0.47415313",
"0.4735488",
"0.47302964",
"0.47297665",
"0.47291428",
"0.47276947",
"0.47258106",
"0.4723532",
"0.47157326"
] | 0.50761986 | 13 |
Function for rendering HTML code of this element | def html(self):
bop = ('<b>' if self._bold else '')
iop = ('<i>' if self._italic else '')
icl = ('</i>' if self._italic else '')
bcl = ('</b>' if self._bold else '')
txt = escape(self._text)
s = '%s%s%s%s%s' % (bop, iop, txt, icl, bcl)
return '%s' % s | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __html__(self):\n return str(self)",
"def rawHTMLrendered(self):",
"def _repr_html_(self):\n return self.__repr__()",
"def _repr_html_(self):\n return self.__repr__()",
"def __html__(self):\n return self.html",
"def html(self) -> str:\n if self._inner_element:\n return self.start_tag + self._inner_element.html + self.end_tag\n return super().html",
"def _repr_html_(self):\n\n return self._repr__base(rich_output=True)",
"def _repr_html_(self):\n\n return self._repr__base(rich_output=True)",
"def html(self) -> SafeString:\n return format_html(self.__html__())",
"def html(self) -> SafeString:\n return format_html(self.__html__())",
"def render(self, value, context=None):\n if self.raw_html is not None:\n return format_html(self.raw_html)\n else:\n return ''",
"def _repr_html_(self):\n return self.data.to_html()",
"def _repr_html_(self) -> str:\n output_html = self.template_base.render(context=self.context)\n return output_html",
"def html(self) -> str:\n return self._html",
"def render_html(self):\n return self.template.render(content=self.content, **self.styles)",
"def get_inner_html(self):\n\n pass",
"def __html__(self):\n if not self.hasArticle:\n return None\n\n if self.bbcode_is_active:\n return self._bbcodeAsHtml\n\n return self.html",
"def render(self):\n self.rendered = self.value\n return self.rendered",
"def __html__(self) -> str:\n components = [\n self.attributee_html,\n f'\"{self.linked_title}\"',\n self.date.string if self.date else '',\n ]\n return self.components_to_html(components)",
"def _repr_html_(self):\n return util.tree_sequence_html(self)",
"def __html__(self) -> str:\n components = [\n self.attributee_html,\n self.linked_title if self.title else 'untitled document',\n self.date.string if self.date else '',\n self.descriptive_phrase,\n f'archived in {self.collection}' if self.collection else '',\n ]\n return self.components_to_html(components)",
"def _repr_html_(self):\n return self._frame._repr_html_()",
"def get_html(self):\r\n pass",
"def _repr_html_(self):\n return util.tree_html(self)",
"def html(self):\n return self._html",
"def renderCode(self):\n className = type(self).__name__\n raise NotImplementedError('Method renderCode needs to be implemented by ' + className)",
"def render(self):\n start_tag = format_html('<div {}>', mark_safe(' '.join(self.field_attrs)))\n output = [start_tag]\n for widget in self:\n output.append(force_text(widget))\n output.append('</div>')\n return mark_safe('\\n'.join(output))",
"def getHtml(self):\n return self.html",
"def code(self):\n return '{}\\n<script>{}</script>'.format(self.html, self.js)",
"def to_html(self, data=None, **kwargs) -> str:\n html = self.create_container()\n return html",
"def render(self):\n return mark_safe(u'%s' % u'\\n'.join([u'<td class=\"qcradiobutton\">%s</td>'\n % force_unicode(w) for w in self]))",
"def get_html_string(self, **kwargs):\n ...",
"def render(self):\n raise NotImplementedError",
"def get_html(self):\r\n context = {\r\n 'display_name': self.display_name_with_default,\r\n 'element_id': self.element_id,\r\n 'instructions_html': self.instructions,\r\n 'content_html': self._render_content()\r\n }\r\n\r\n return self.system.render_template('annotatable.html', context)",
"def get_outer_html(self):\n\n pass",
"def innerHTML(self) -> str:\n if self._inner_element:\n return self._inner_element.innerHTML\n return super().innerHTML",
"def _repr_html_(self):\n return \"<td><b>{0}</b></td><td>{1}</td>\".format(self.id, self.title)",
"def render(self, value):\r\n return value",
"def render(self):\n return mark_safe(u'\\n'.join([force_unicode(w) for w in self]))",
"def generate_html(self):\n html_text_1 = \"\"\"\n <div class=\"concept\">\n\n \t\t<div class=\"concept-title\">\n\n \t\t\t\t\"\"\" + self.title\n\n html_text_2 = \"\"\"\n \t\t</div>\n\n \t\t<div class=\"concept-description\">\n\n\t\t <p>\n\t\t\t\n \t\t \t\t\"\"\" + self.description + \"\"\" \n \n </p>\"\"\"\n\n html_text_3 = '''\n\n \t\t</div>\n\n </div>'''\n\n return html_text_1 + html_text_2 + html_text_3",
"def value_as_html(self):\n property_name = \"_%s_as_html\" % self.attribute.type\n return getattr(self, property_name, self.value_as_text)",
"def as_html(self):\r\n return mark_safe(' '.join(['%s=\"%s\"' % (k, escape(v if not callable(v) else v()))\r\n for k, v in six.iteritems(self)]))",
"def render(self):\n raise NotImplementedError()",
"def render(self) -> str:\n\n return \"\\n\".join(self.render_iter())",
"def render(self):",
"def get_html_string_representation(self):\n return self.map.get_root().render()",
"def get_html(self, *args, **kwargs):\n return Text(self.get_data(*args, **kwargs), escape=False)",
"def innerHTML(element):\n return element.encode_contents()",
"def render(self):\n try:\n if self.permit():\n return self.renderer.render(self)\n except AttributeError:\n if self.renderer is None:\n raise NotImplementedError(\"Should have implemented a renderer for {0}\".format(self.name))\n else:\n raise\n return ''",
"def _repr_html_(self):\n import jinja2 # noqa\n\n call_result = self._get_call_result()\n\n id_result = str(id(self) + np.random.random()).replace(\".\", \"rr\")\n\n params = {\n \"result\": self,\n \"id_result\": id_result,\n \"call_result\": call_result,\n \"json_result\": json.dumps(self.json_, indent=2),\n }\n return jinja2.Template(RESULT_HTML_TEMPLATE).render(**params)",
"def rawHTML(self):\n #TODO : do checking for scripts and hacks here?\n return mark_safe(self.html)",
"def markup(self):\n return '%s%s%s' % (\n self.options['markup_prefix'],\n self._markup,\n self.options['markup_suffix'],\n )",
"def _repr_html_(self): # pragma: no cover\n return Utils.render_html('extent.html', extent=self)",
"def _repr_html_(self):\n return (\n f'<b>{self.__class__.__name__}</b>'\n f'<br> <b>defined by:</b> {self._str_meta_()}'\n f'<br> <b>with columns:</b> {self._str_colnames()}'\n f'<br> {len(self)} objects'\n f'<br> {self._html_table()}'\n )",
"def to_html(self) -> str:\n return f'''\n <a href=\"{self.link}\"> ({self.source_name}, {self.timestamp.strftime('%Y')}) </a>\n '''",
"def render(self):\n pass",
"def render(self):\n pass",
"def render(self):\n pass",
"def render(self):\n pass",
"def render(self):\n pass",
"def render(self):\n pass",
"def render_content(self):\n return mark_safe(markdown(self.content))",
"def _render_content(self):\r\n xmltree = etree.fromstring(self.content)\r\n xmltree.tag = 'div'\r\n if 'display_name' in xmltree.attrib:\r\n del xmltree.attrib['display_name']\r\n\r\n index = 0\r\n for el in xmltree.findall('.//annotation'):\r\n self._render_annotation(index, el)\r\n index += 1\r\n\r\n return etree.tostring(xmltree, encoding='unicode')",
"def render(self, **kwargs) -> str:\n return self.renderable(**kwargs).render()",
"def render(self,value):\n self.content += value\n if self._rendernl:\n self.content += self._rendernl",
"def _repr_html_(self):\n if self.container_id():\n return \"<i>This widget is already shown in this notebook</i>\"\n \n container_id = self.id + '_container'\n def set_cointainer_id():\n self.container_id._set(container_id)\n # Set container id, this gets applied in the next event loop\n # iteration, so by the time it gets called in JS, the div that\n # we define below will have been created.\n from ..app import call_later\n call_later(0.1, set_cointainer_id) # todo: always do calls in next iter\n return \"<div class='flx-container' id=%s />\" % container_id",
"def unrendered(self) -> str:",
"def raw_html(self):\n if self._html:\n return self._html\n else:\n return lxml.html.tostring(self.element, encoding=self.encoding)",
"def get_html(self):\r\n return u'This is supposed to be test html.'",
"def _render(self) -> str:\n html = self._template.render(self._transient_context)\n self._transient_context = None\n return html",
"def __repr__(self):\n\n template = \"\"\"\n - inline : {}\n \"\"\"\n\n return super().__repr__() + template.format(self.inline)",
"def __str__(self):\n t = Template(\n \"\"\"\n <h4>$title</h4>\n $imgs\n $footnotes\n <hr/>\"\"\")\n # Return result.\n return t.substitute({\n \"title\": self.title,\n \"imgs\": self.render_images(),\n \"footnotes\": self.render_footnotes()\n })",
"def render(self):\n raise RenderNotImplemented('Render function is not implemented.')",
"def render(self):\n self._render_text = self.content.replace('\\n', '<br>') # deal with new line\n return render_str(\"post.html\", p = self)",
"def __repr__(self) -> str:\n return f\"{self.text}\"",
"def to_html(self) -> str:\n coverage_class = 'zero' if self.called == 0 else 'all'\n return '''<tr id=\"func-{}\" class=\"cov-health-{}\">\n <td><a href=\"#line-{}\">{}</a></td>\n <td>{}</td><td>{}%</td><td>{}%</td>\n </tr>\\n'''.format(\n self.name, coverage_class, self.linenum, self.pretty_name, self.called,\n self.returned, self.blocks\n )",
"def _repr_html_(self):\n # pylint: disable=protected-access\n return self.folium_map._repr_html_()\n # pylint: enable=protected-access",
"def render_html(self, renderer, response_msg=''):\r\n # render ourself as a <span> + our content\r\n tree = etree.Element('span')\r\n\r\n # problem author can make this span display:inline\r\n if self.xml.get('inline', ''):\r\n tree.set('class', 'inline')\r\n\r\n for item in self.xml:\r\n # call provided procedure to do the rendering\r\n item_xhtml = renderer(item)\r\n if item_xhtml is not None:\r\n tree.append(item_xhtml)\r\n tree.tail = self.xml.tail\r\n\r\n # Add a <div> for the message at the end of the response\r\n if response_msg:\r\n tree.append(self._render_response_msg_html(response_msg))\r\n\r\n return tree",
"def __repr__(self):\n return self.text",
"def __repr__(self):\n return self.text",
"def part_render(self, attr, *a, **kw):\r\n style = kw.get('style', 'html')\r\n template = self.template(style)\r\n dt = template.get_def(attr)\r\n return unsafe(dt.render(thing = self, *a, **kw))",
"def _repr_html_(self):\n nb_ticks = 7\n delta_x = math.floor(self.width / (nb_ticks - 1))\n x_ticks = [(i) * delta_x for i in range(0, nb_ticks)]\n delta_val = delta_x * (self.vmax - self.vmin) / self.width\n val_ticks = [round(self.vmin + (i) * delta_val, 1) for i in range(0, nb_ticks)]\n\n return (\n f'<svg height=\"40\" width=\"{self.width}\">'\n + \"\".join(\n [\n (\n '<line x1=\"{i}\" y1=\"15\" x2=\"{i}\" '\n 'y2=\"27\" style=\"stroke:{color};stroke-width:2;\" />'\n ).format(\n i=i * 1,\n color=self.rgba_hex_str(\n self.vmin + (self.vmax - self.vmin) * i / (self.width - 1),\n ),\n )\n for i in range(self.width)\n ],\n )\n + '<text x=\"0\" y=\"38\" style=\"text-anchor:start; font-size:11px; font:Arial\">{}</text>'.format( # noqa\n self.vmin,\n )\n + \"\".join(\n [\n (\n '<text x=\"{}\" y=\"38\"; style=\"text-anchor:middle; font-size:11px; font:Arial\">{}</text>' # noqa\n ).format(x_ticks[i], val_ticks[i])\n for i in range(1, nb_ticks - 1)\n ],\n )\n + '<text x=\"{}\" y=\"38\" style=\"text-anchor:end; font-size:11px; font:Arial\">{}</text>'.format(\n self.width,\n self.vmax,\n )\n + '<text x=\"0\" y=\"12\" style=\"font-size:11px; font:Arial\">{}</text>'.format(\n self.caption,\n )\n + \"</svg>\"\n )",
"def render(self):\r\n super().render()",
"def htmlise(s):\n return '<div><pre class=\"tablecell\">' + html.escape(s) + '</pre></div>'",
"def _render(self) -> None:\n pass",
"def html(self):\n dis = ('disabled' if not self._enabled else '')\n met = ('post' if self._html_post else 'get')\n act = escape(self._action)\n txt = escape(self._text)\n return '<button %s formaction=\"%s\" formmethod=\"%s\">%s</button>' % (dis, act, met, txt)",
"def _repr_(self):\n return repr(self.element())",
"def to_html(self):\n return self.serializer.render(self.formatter.formatMessage(self.oldmsg))",
"def as_html(self): # pragma: no cover\n\n return render_to_string(\n self._meta.template,\n { \"table\": self } )",
"def html(self, children, ordering):\n\n raise NotImplementedError",
"def render(self):\n self._render_text = self.content.replace('\\n', '<br>')\n return TemplateFile.jinja_render_str(\"post.html\", p=self)",
"def to_html(self) -> str:\n source_name = escape(self.source_name)\n (covered, lines) = self.coverage_stats()\n lines_stats = \"{} / {} ({} lines of code)\".format(covered, lines, len(self.source_code))\n (br_covered, br_count, calls_covered, calls_count) = self.branch_stats()\n branch_stats = \"{} / {}\".format(br_covered, br_count)\n call_stats = \"{} / {}\".format(calls_covered, calls_count)\n (fn_covered, fn_count) = self.function_stats()\n fn_stats = \"{} / {}\".format(fn_covered, fn_count)\n\n self.decode_cpp_function_names()\n\n result = [\"\"\"\n <!DOCTYPE html>\n <html>\n <head>\n <meta charset=\"utf-8\">\n <title>Coverage report of file \"\"\" + source_name + \"\"\"</title>\n <style type=\"text/css\">\n /*<![CDATA[*/\n .cov-health-zero td { color: white; }\n .cov-health-zero a { color: #CCCCFF; }\n .cov-health-zero a:visited { color: #FFCCFF; }\n .cov-health-zero:nth-child(odd) td { background-color: #CC0000; }\n .cov-health-zero:nth-child(even) td { background-color: #DD0000; }\n .cov-health-na td { color: silver; }\n .cov-health-na td:nth-child(2) { visibility: hidden; }\n .branch { cursor: help; }\n .branch-taken { color: silver; }\n .branch-taken:hover { color: black; }\n .branch-not-taken { color: red; }\n .branch-not-taken:hover { color: maroon; }\n #source tbody td:last-child, #funcs tbody td:first-child\n { text-align: left; font-family: monospace; white-space: pre; }\n .sortable { border-collapse: collapse; }\n div { width: 100%; overflow: hidden; }\n .sortable td { text-align: right; padding-left: 2em; }\n .sortable tbody tr:nth-child(odd) { background-color: #FFFFCC; }\n .sortable tbody tr:nth-child(even) { background-color: #FFFFDD; }\n #source tbody tr:hover td:last-child { font-weight: bold; }\n #source tbody td:first-child { max-width: 7em; font-size: smaller; word-wrap: break-word; }\n #source tbody td:nth-child(2) { font-size: smaller; color: silver; }\n #summary { float: right; border-collapse: collapse; }\n #summary td { border: 1px solid black; }\n caption { font-weight: bold; }\n /*]]>*/\n </style>\n <script src=\"sorttable.js\"></script>\n </head>\n <body>\n <p><a href=\"index.html\">⇐ Back</a> | Go to line #<input type=\"number\" id=\"goto\" /></p>\n <h1>\"\"\" + source_name + \"\"\"</h1>\n <div>\n <table id=\"summary\">\n <caption>Summary</caption>\n <tr><td>Lines</td><td>\"\"\" + lines_stats + \"\"\"</td></tr>\n <tr><td>Branches</td><td>\"\"\" + branch_stats + \"\"\"</td></tr>\n <tr><td>Calls</td><td>\"\"\" + call_stats + \"\"\"</td></tr>\n <tr><td><a href=\"#functions\">Functions</a></td><td>\"\"\" + fn_stats + \"\"\"</td></tr>\n </ul>\n </table>\n <table class=\"sortable\" id=\"source\">\n <thead><tr><th>Branches</th><th>Cov</th><th>Line</th><th class=\"sorttable_nosort\">Source</th></tr></thead>\n <tbody>\n \"\"\"]\n result.extend(line.to_html() for line in self.source_code)\n result.append(\"\"\"\n </tbody>\n </table>\n </div>\n <h2 id=\"functions\">Functions</h2>\n <div>\n <table class=\"sortable\" id=\"funcs\">\n <thead><tr><th>Function</th><th>Calls</th><th>Ret.</th><th>Blk. Exec.</th></tr></thead>\n <tbody>\"\"\")\n result.extend(func.to_html() for func in self.source_functions)\n result.append(\"\"\"\n </tbody>\n </table>\n </div>\n <script>\n //<![CDATA[\n document.getElementById('goto').onchange = function()\n {\n location = \"#line-\" + this.value;\n }\n //]]>\n </script>\n </body>\n </html>\n \"\"\")\n return '\\n'.join(result)",
"def asHTML(self):\n return self.context.xml_get('xml_content')",
"def _html(self, message):\n content = f\"<html><body><h1>{message}</h1></body></html>\"\n return content.encode(\"utf8\")",
"def get_html(self):\r\n return self.system.render_template('lti.html', self.get_context())",
"def to_html(self, content, request, **parameters):\n raise NotImplementedError",
"def _repr_html_(self):\n import io\n import base64\n from PIL import Image\n\n library_name = \"vedo.assembly.Assembly\"\n help_url = \"https://vedo.embl.es/docs/vedo/assembly.html\"\n\n arr = self.thumbnail(zoom=1.1, elevation=-60)\n\n im = Image.fromarray(arr)\n buffered = io.BytesIO()\n im.save(buffered, format=\"PNG\", quality=100)\n encoded = base64.b64encode(buffered.getvalue()).decode(\"utf-8\")\n url = \"data:image/png;base64,\" + encoded\n image = f\"<img src='{url}'></img>\"\n\n # statisitics\n bounds = \"<br/>\".join(\n [\n vedo.utils.precision(min_x, 4) + \" ... \" + vedo.utils.precision(max_x, 4)\n for min_x, max_x in zip(self.bounds()[::2], self.bounds()[1::2])\n ]\n )\n\n help_text = \"\"\n if self.name:\n help_text += f\"<b> {self.name}:   </b>\"\n help_text += '<b><a href=\"' + help_url + '\" target=\"_blank\">' + library_name + \"</a></b>\"\n if self.filename:\n dots = \"\"\n if len(self.filename) > 30:\n dots = \"...\"\n help_text += f\"<br/><code><i>({dots}{self.filename[-30:]})</i></code>\"\n\n allt = [\n \"<table>\",\n \"<tr>\",\n \"<td>\",\n image,\n \"</td>\",\n \"<td style='text-align: center; vertical-align: center;'><br/>\",\n help_text,\n \"<table>\",\n \"<tr><td><b> nr. of objects </b></td><td>\"\n + str(self.GetNumberOfPaths())\n + \"</td></tr>\",\n \"<tr><td><b> position </b></td><td>\" + str(self.GetPosition()) + \"</td></tr>\",\n \"<tr><td><b> diagonal size </b></td><td>\"\n + vedo.utils.precision(self.diagonal_size(), 5)\n + \"</td></tr>\",\n \"<tr><td><b> bounds </b> <br/> (x/y/z) </td><td>\" + str(bounds) + \"</td></tr>\",\n \"</table>\",\n \"</table>\",\n ]\n return \"\\n\".join(allt)",
"def render(self):\n return render_to_string(\n self.template_name, self.get_context_data(), request=self.request\n )",
"def to_html(self) -> str:\n if self.count:\n class_name = 'branch-taken'\n symbol = '▷' if self.type_ == 'branch' else '○'\n else:\n class_name = 'branch-not-taken'\n symbol = '▶' if self.type_ == 'branch' else '●'\n\n info_text = ' (' + self.info + ')' if self.info else ''\n return '<span class=\"branch {}\" title=\"{} {}{} × {}\">{}</span>'.format(\n class_name, self.type_, self.id_, info_text, self.count, symbol\n )",
"def output_open_html(self):\n text = self.token[\"text\"]\n tag = self.token[\"tag\"]\n if self._parse_block_html and tag not in _pre_tags:\n text = self.inline(text)\n extra = self.token.get(\"extra\") or \"\"\n html = \"<%s%s>%s</%s>\" % (tag, extra, text, tag)\n return self.renderer.block_html(html)"
] | [
"0.76847166",
"0.7620541",
"0.7579551",
"0.7579551",
"0.7545735",
"0.7409317",
"0.7385336",
"0.7385336",
"0.7304668",
"0.7304668",
"0.72542566",
"0.7233775",
"0.7104469",
"0.7065664",
"0.70531815",
"0.7014759",
"0.69775945",
"0.6977155",
"0.69219375",
"0.6887626",
"0.6865015",
"0.68577087",
"0.6832544",
"0.6793131",
"0.6786746",
"0.6755257",
"0.6700565",
"0.6677593",
"0.6667174",
"0.66605365",
"0.66569227",
"0.6654718",
"0.6650187",
"0.6647357",
"0.66471726",
"0.6644631",
"0.66151345",
"0.657141",
"0.6570393",
"0.6562595",
"0.6546755",
"0.65458214",
"0.65410066",
"0.6525329",
"0.64996165",
"0.64961267",
"0.6492899",
"0.64831465",
"0.6472499",
"0.64690435",
"0.64618444",
"0.64429647",
"0.6442671",
"0.64419985",
"0.64315027",
"0.6429977",
"0.6429977",
"0.6429977",
"0.6429977",
"0.6429977",
"0.6429977",
"0.64285254",
"0.64199877",
"0.6414929",
"0.6407741",
"0.63980436",
"0.6386511",
"0.63800627",
"0.6378482",
"0.6371753",
"0.6363004",
"0.6348087",
"0.6346591",
"0.6306697",
"0.6293999",
"0.62862784",
"0.628417",
"0.62665707",
"0.626092",
"0.626092",
"0.62424207",
"0.62371033",
"0.6219805",
"0.62149316",
"0.6206116",
"0.61986023",
"0.61945295",
"0.6184101",
"0.6172638",
"0.6152366",
"0.61520904",
"0.6150414",
"0.6139339",
"0.6136774",
"0.61351085",
"0.6126126",
"0.61231214",
"0.6119979",
"0.61137086",
"0.61115175"
] | 0.7432328 | 5 |
Function for rendering HTML code of this element | def html(self):
dis = ('disabled' if not self._enabled else '')
met = ('post' if self._html_post else 'get')
act = escape(self._action)
txt = escape(self._text)
return '<button %s formaction="%s" formmethod="%s">%s</button>' % (dis, act, met, txt) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __html__(self):\n return str(self)",
"def rawHTMLrendered(self):",
"def _repr_html_(self):\n return self.__repr__()",
"def _repr_html_(self):\n return self.__repr__()",
"def __html__(self):\n return self.html",
"def html(self):\n bop = ('<b>' if self._bold else '')\n iop = ('<i>' if self._italic else '')\n icl = ('</i>' if self._italic else '')\n bcl = ('</b>' if self._bold else '')\n txt = escape(self._text)\n s = '%s%s%s%s%s' % (bop, iop, txt, icl, bcl)\n return '%s' % s",
"def html(self) -> str:\n if self._inner_element:\n return self.start_tag + self._inner_element.html + self.end_tag\n return super().html",
"def _repr_html_(self):\n\n return self._repr__base(rich_output=True)",
"def _repr_html_(self):\n\n return self._repr__base(rich_output=True)",
"def html(self) -> SafeString:\n return format_html(self.__html__())",
"def html(self) -> SafeString:\n return format_html(self.__html__())",
"def render(self, value, context=None):\n if self.raw_html is not None:\n return format_html(self.raw_html)\n else:\n return ''",
"def _repr_html_(self):\n return self.data.to_html()",
"def _repr_html_(self) -> str:\n output_html = self.template_base.render(context=self.context)\n return output_html",
"def html(self) -> str:\n return self._html",
"def render_html(self):\n return self.template.render(content=self.content, **self.styles)",
"def get_inner_html(self):\n\n pass",
"def __html__(self):\n if not self.hasArticle:\n return None\n\n if self.bbcode_is_active:\n return self._bbcodeAsHtml\n\n return self.html",
"def render(self):\n self.rendered = self.value\n return self.rendered",
"def __html__(self) -> str:\n components = [\n self.attributee_html,\n f'\"{self.linked_title}\"',\n self.date.string if self.date else '',\n ]\n return self.components_to_html(components)",
"def _repr_html_(self):\n return util.tree_sequence_html(self)",
"def __html__(self) -> str:\n components = [\n self.attributee_html,\n self.linked_title if self.title else 'untitled document',\n self.date.string if self.date else '',\n self.descriptive_phrase,\n f'archived in {self.collection}' if self.collection else '',\n ]\n return self.components_to_html(components)",
"def _repr_html_(self):\n return self._frame._repr_html_()",
"def get_html(self):\r\n pass",
"def _repr_html_(self):\n return util.tree_html(self)",
"def html(self):\n return self._html",
"def renderCode(self):\n className = type(self).__name__\n raise NotImplementedError('Method renderCode needs to be implemented by ' + className)",
"def render(self):\n start_tag = format_html('<div {}>', mark_safe(' '.join(self.field_attrs)))\n output = [start_tag]\n for widget in self:\n output.append(force_text(widget))\n output.append('</div>')\n return mark_safe('\\n'.join(output))",
"def getHtml(self):\n return self.html",
"def code(self):\n return '{}\\n<script>{}</script>'.format(self.html, self.js)",
"def to_html(self, data=None, **kwargs) -> str:\n html = self.create_container()\n return html",
"def render(self):\n return mark_safe(u'%s' % u'\\n'.join([u'<td class=\"qcradiobutton\">%s</td>'\n % force_unicode(w) for w in self]))",
"def get_html_string(self, **kwargs):\n ...",
"def render(self):\n raise NotImplementedError",
"def get_html(self):\r\n context = {\r\n 'display_name': self.display_name_with_default,\r\n 'element_id': self.element_id,\r\n 'instructions_html': self.instructions,\r\n 'content_html': self._render_content()\r\n }\r\n\r\n return self.system.render_template('annotatable.html', context)",
"def get_outer_html(self):\n\n pass",
"def innerHTML(self) -> str:\n if self._inner_element:\n return self._inner_element.innerHTML\n return super().innerHTML",
"def _repr_html_(self):\n return \"<td><b>{0}</b></td><td>{1}</td>\".format(self.id, self.title)",
"def render(self, value):\r\n return value",
"def render(self):\n return mark_safe(u'\\n'.join([force_unicode(w) for w in self]))",
"def generate_html(self):\n html_text_1 = \"\"\"\n <div class=\"concept\">\n\n \t\t<div class=\"concept-title\">\n\n \t\t\t\t\"\"\" + self.title\n\n html_text_2 = \"\"\"\n \t\t</div>\n\n \t\t<div class=\"concept-description\">\n\n\t\t <p>\n\t\t\t\n \t\t \t\t\"\"\" + self.description + \"\"\" \n \n </p>\"\"\"\n\n html_text_3 = '''\n\n \t\t</div>\n\n </div>'''\n\n return html_text_1 + html_text_2 + html_text_3",
"def value_as_html(self):\n property_name = \"_%s_as_html\" % self.attribute.type\n return getattr(self, property_name, self.value_as_text)",
"def as_html(self):\r\n return mark_safe(' '.join(['%s=\"%s\"' % (k, escape(v if not callable(v) else v()))\r\n for k, v in six.iteritems(self)]))",
"def render(self):\n raise NotImplementedError()",
"def render(self) -> str:\n\n return \"\\n\".join(self.render_iter())",
"def render(self):",
"def get_html_string_representation(self):\n return self.map.get_root().render()",
"def get_html(self, *args, **kwargs):\n return Text(self.get_data(*args, **kwargs), escape=False)",
"def innerHTML(element):\n return element.encode_contents()",
"def render(self):\n try:\n if self.permit():\n return self.renderer.render(self)\n except AttributeError:\n if self.renderer is None:\n raise NotImplementedError(\"Should have implemented a renderer for {0}\".format(self.name))\n else:\n raise\n return ''",
"def _repr_html_(self):\n import jinja2 # noqa\n\n call_result = self._get_call_result()\n\n id_result = str(id(self) + np.random.random()).replace(\".\", \"rr\")\n\n params = {\n \"result\": self,\n \"id_result\": id_result,\n \"call_result\": call_result,\n \"json_result\": json.dumps(self.json_, indent=2),\n }\n return jinja2.Template(RESULT_HTML_TEMPLATE).render(**params)",
"def rawHTML(self):\n #TODO : do checking for scripts and hacks here?\n return mark_safe(self.html)",
"def markup(self):\n return '%s%s%s' % (\n self.options['markup_prefix'],\n self._markup,\n self.options['markup_suffix'],\n )",
"def _repr_html_(self): # pragma: no cover\n return Utils.render_html('extent.html', extent=self)",
"def _repr_html_(self):\n return (\n f'<b>{self.__class__.__name__}</b>'\n f'<br> <b>defined by:</b> {self._str_meta_()}'\n f'<br> <b>with columns:</b> {self._str_colnames()}'\n f'<br> {len(self)} objects'\n f'<br> {self._html_table()}'\n )",
"def to_html(self) -> str:\n return f'''\n <a href=\"{self.link}\"> ({self.source_name}, {self.timestamp.strftime('%Y')}) </a>\n '''",
"def render(self):\n pass",
"def render(self):\n pass",
"def render(self):\n pass",
"def render(self):\n pass",
"def render(self):\n pass",
"def render(self):\n pass",
"def render_content(self):\n return mark_safe(markdown(self.content))",
"def _render_content(self):\r\n xmltree = etree.fromstring(self.content)\r\n xmltree.tag = 'div'\r\n if 'display_name' in xmltree.attrib:\r\n del xmltree.attrib['display_name']\r\n\r\n index = 0\r\n for el in xmltree.findall('.//annotation'):\r\n self._render_annotation(index, el)\r\n index += 1\r\n\r\n return etree.tostring(xmltree, encoding='unicode')",
"def render(self, **kwargs) -> str:\n return self.renderable(**kwargs).render()",
"def render(self,value):\n self.content += value\n if self._rendernl:\n self.content += self._rendernl",
"def _repr_html_(self):\n if self.container_id():\n return \"<i>This widget is already shown in this notebook</i>\"\n \n container_id = self.id + '_container'\n def set_cointainer_id():\n self.container_id._set(container_id)\n # Set container id, this gets applied in the next event loop\n # iteration, so by the time it gets called in JS, the div that\n # we define below will have been created.\n from ..app import call_later\n call_later(0.1, set_cointainer_id) # todo: always do calls in next iter\n return \"<div class='flx-container' id=%s />\" % container_id",
"def unrendered(self) -> str:",
"def raw_html(self):\n if self._html:\n return self._html\n else:\n return lxml.html.tostring(self.element, encoding=self.encoding)",
"def get_html(self):\r\n return u'This is supposed to be test html.'",
"def _render(self) -> str:\n html = self._template.render(self._transient_context)\n self._transient_context = None\n return html",
"def __repr__(self):\n\n template = \"\"\"\n - inline : {}\n \"\"\"\n\n return super().__repr__() + template.format(self.inline)",
"def __str__(self):\n t = Template(\n \"\"\"\n <h4>$title</h4>\n $imgs\n $footnotes\n <hr/>\"\"\")\n # Return result.\n return t.substitute({\n \"title\": self.title,\n \"imgs\": self.render_images(),\n \"footnotes\": self.render_footnotes()\n })",
"def render(self):\n raise RenderNotImplemented('Render function is not implemented.')",
"def render(self):\n self._render_text = self.content.replace('\\n', '<br>') # deal with new line\n return render_str(\"post.html\", p = self)",
"def __repr__(self) -> str:\n return f\"{self.text}\"",
"def to_html(self) -> str:\n coverage_class = 'zero' if self.called == 0 else 'all'\n return '''<tr id=\"func-{}\" class=\"cov-health-{}\">\n <td><a href=\"#line-{}\">{}</a></td>\n <td>{}</td><td>{}%</td><td>{}%</td>\n </tr>\\n'''.format(\n self.name, coverage_class, self.linenum, self.pretty_name, self.called,\n self.returned, self.blocks\n )",
"def _repr_html_(self):\n # pylint: disable=protected-access\n return self.folium_map._repr_html_()\n # pylint: enable=protected-access",
"def render_html(self, renderer, response_msg=''):\r\n # render ourself as a <span> + our content\r\n tree = etree.Element('span')\r\n\r\n # problem author can make this span display:inline\r\n if self.xml.get('inline', ''):\r\n tree.set('class', 'inline')\r\n\r\n for item in self.xml:\r\n # call provided procedure to do the rendering\r\n item_xhtml = renderer(item)\r\n if item_xhtml is not None:\r\n tree.append(item_xhtml)\r\n tree.tail = self.xml.tail\r\n\r\n # Add a <div> for the message at the end of the response\r\n if response_msg:\r\n tree.append(self._render_response_msg_html(response_msg))\r\n\r\n return tree",
"def __repr__(self):\n return self.text",
"def __repr__(self):\n return self.text",
"def part_render(self, attr, *a, **kw):\r\n style = kw.get('style', 'html')\r\n template = self.template(style)\r\n dt = template.get_def(attr)\r\n return unsafe(dt.render(thing = self, *a, **kw))",
"def _repr_html_(self):\n nb_ticks = 7\n delta_x = math.floor(self.width / (nb_ticks - 1))\n x_ticks = [(i) * delta_x for i in range(0, nb_ticks)]\n delta_val = delta_x * (self.vmax - self.vmin) / self.width\n val_ticks = [round(self.vmin + (i) * delta_val, 1) for i in range(0, nb_ticks)]\n\n return (\n f'<svg height=\"40\" width=\"{self.width}\">'\n + \"\".join(\n [\n (\n '<line x1=\"{i}\" y1=\"15\" x2=\"{i}\" '\n 'y2=\"27\" style=\"stroke:{color};stroke-width:2;\" />'\n ).format(\n i=i * 1,\n color=self.rgba_hex_str(\n self.vmin + (self.vmax - self.vmin) * i / (self.width - 1),\n ),\n )\n for i in range(self.width)\n ],\n )\n + '<text x=\"0\" y=\"38\" style=\"text-anchor:start; font-size:11px; font:Arial\">{}</text>'.format( # noqa\n self.vmin,\n )\n + \"\".join(\n [\n (\n '<text x=\"{}\" y=\"38\"; style=\"text-anchor:middle; font-size:11px; font:Arial\">{}</text>' # noqa\n ).format(x_ticks[i], val_ticks[i])\n for i in range(1, nb_ticks - 1)\n ],\n )\n + '<text x=\"{}\" y=\"38\" style=\"text-anchor:end; font-size:11px; font:Arial\">{}</text>'.format(\n self.width,\n self.vmax,\n )\n + '<text x=\"0\" y=\"12\" style=\"font-size:11px; font:Arial\">{}</text>'.format(\n self.caption,\n )\n + \"</svg>\"\n )",
"def render(self):\r\n super().render()",
"def htmlise(s):\n return '<div><pre class=\"tablecell\">' + html.escape(s) + '</pre></div>'",
"def _render(self) -> None:\n pass",
"def _repr_(self):\n return repr(self.element())",
"def to_html(self):\n return self.serializer.render(self.formatter.formatMessage(self.oldmsg))",
"def as_html(self): # pragma: no cover\n\n return render_to_string(\n self._meta.template,\n { \"table\": self } )",
"def html(self, children, ordering):\n\n raise NotImplementedError",
"def render(self):\n self._render_text = self.content.replace('\\n', '<br>')\n return TemplateFile.jinja_render_str(\"post.html\", p=self)",
"def to_html(self) -> str:\n source_name = escape(self.source_name)\n (covered, lines) = self.coverage_stats()\n lines_stats = \"{} / {} ({} lines of code)\".format(covered, lines, len(self.source_code))\n (br_covered, br_count, calls_covered, calls_count) = self.branch_stats()\n branch_stats = \"{} / {}\".format(br_covered, br_count)\n call_stats = \"{} / {}\".format(calls_covered, calls_count)\n (fn_covered, fn_count) = self.function_stats()\n fn_stats = \"{} / {}\".format(fn_covered, fn_count)\n\n self.decode_cpp_function_names()\n\n result = [\"\"\"\n <!DOCTYPE html>\n <html>\n <head>\n <meta charset=\"utf-8\">\n <title>Coverage report of file \"\"\" + source_name + \"\"\"</title>\n <style type=\"text/css\">\n /*<![CDATA[*/\n .cov-health-zero td { color: white; }\n .cov-health-zero a { color: #CCCCFF; }\n .cov-health-zero a:visited { color: #FFCCFF; }\n .cov-health-zero:nth-child(odd) td { background-color: #CC0000; }\n .cov-health-zero:nth-child(even) td { background-color: #DD0000; }\n .cov-health-na td { color: silver; }\n .cov-health-na td:nth-child(2) { visibility: hidden; }\n .branch { cursor: help; }\n .branch-taken { color: silver; }\n .branch-taken:hover { color: black; }\n .branch-not-taken { color: red; }\n .branch-not-taken:hover { color: maroon; }\n #source tbody td:last-child, #funcs tbody td:first-child\n { text-align: left; font-family: monospace; white-space: pre; }\n .sortable { border-collapse: collapse; }\n div { width: 100%; overflow: hidden; }\n .sortable td { text-align: right; padding-left: 2em; }\n .sortable tbody tr:nth-child(odd) { background-color: #FFFFCC; }\n .sortable tbody tr:nth-child(even) { background-color: #FFFFDD; }\n #source tbody tr:hover td:last-child { font-weight: bold; }\n #source tbody td:first-child { max-width: 7em; font-size: smaller; word-wrap: break-word; }\n #source tbody td:nth-child(2) { font-size: smaller; color: silver; }\n #summary { float: right; border-collapse: collapse; }\n #summary td { border: 1px solid black; }\n caption { font-weight: bold; }\n /*]]>*/\n </style>\n <script src=\"sorttable.js\"></script>\n </head>\n <body>\n <p><a href=\"index.html\">⇐ Back</a> | Go to line #<input type=\"number\" id=\"goto\" /></p>\n <h1>\"\"\" + source_name + \"\"\"</h1>\n <div>\n <table id=\"summary\">\n <caption>Summary</caption>\n <tr><td>Lines</td><td>\"\"\" + lines_stats + \"\"\"</td></tr>\n <tr><td>Branches</td><td>\"\"\" + branch_stats + \"\"\"</td></tr>\n <tr><td>Calls</td><td>\"\"\" + call_stats + \"\"\"</td></tr>\n <tr><td><a href=\"#functions\">Functions</a></td><td>\"\"\" + fn_stats + \"\"\"</td></tr>\n </ul>\n </table>\n <table class=\"sortable\" id=\"source\">\n <thead><tr><th>Branches</th><th>Cov</th><th>Line</th><th class=\"sorttable_nosort\">Source</th></tr></thead>\n <tbody>\n \"\"\"]\n result.extend(line.to_html() for line in self.source_code)\n result.append(\"\"\"\n </tbody>\n </table>\n </div>\n <h2 id=\"functions\">Functions</h2>\n <div>\n <table class=\"sortable\" id=\"funcs\">\n <thead><tr><th>Function</th><th>Calls</th><th>Ret.</th><th>Blk. Exec.</th></tr></thead>\n <tbody>\"\"\")\n result.extend(func.to_html() for func in self.source_functions)\n result.append(\"\"\"\n </tbody>\n </table>\n </div>\n <script>\n //<![CDATA[\n document.getElementById('goto').onchange = function()\n {\n location = \"#line-\" + this.value;\n }\n //]]>\n </script>\n </body>\n </html>\n \"\"\")\n return '\\n'.join(result)",
"def asHTML(self):\n return self.context.xml_get('xml_content')",
"def _html(self, message):\n content = f\"<html><body><h1>{message}</h1></body></html>\"\n return content.encode(\"utf8\")",
"def get_html(self):\r\n return self.system.render_template('lti.html', self.get_context())",
"def to_html(self, content, request, **parameters):\n raise NotImplementedError",
"def _repr_html_(self):\n import io\n import base64\n from PIL import Image\n\n library_name = \"vedo.assembly.Assembly\"\n help_url = \"https://vedo.embl.es/docs/vedo/assembly.html\"\n\n arr = self.thumbnail(zoom=1.1, elevation=-60)\n\n im = Image.fromarray(arr)\n buffered = io.BytesIO()\n im.save(buffered, format=\"PNG\", quality=100)\n encoded = base64.b64encode(buffered.getvalue()).decode(\"utf-8\")\n url = \"data:image/png;base64,\" + encoded\n image = f\"<img src='{url}'></img>\"\n\n # statisitics\n bounds = \"<br/>\".join(\n [\n vedo.utils.precision(min_x, 4) + \" ... \" + vedo.utils.precision(max_x, 4)\n for min_x, max_x in zip(self.bounds()[::2], self.bounds()[1::2])\n ]\n )\n\n help_text = \"\"\n if self.name:\n help_text += f\"<b> {self.name}:   </b>\"\n help_text += '<b><a href=\"' + help_url + '\" target=\"_blank\">' + library_name + \"</a></b>\"\n if self.filename:\n dots = \"\"\n if len(self.filename) > 30:\n dots = \"...\"\n help_text += f\"<br/><code><i>({dots}{self.filename[-30:]})</i></code>\"\n\n allt = [\n \"<table>\",\n \"<tr>\",\n \"<td>\",\n image,\n \"</td>\",\n \"<td style='text-align: center; vertical-align: center;'><br/>\",\n help_text,\n \"<table>\",\n \"<tr><td><b> nr. of objects </b></td><td>\"\n + str(self.GetNumberOfPaths())\n + \"</td></tr>\",\n \"<tr><td><b> position </b></td><td>\" + str(self.GetPosition()) + \"</td></tr>\",\n \"<tr><td><b> diagonal size </b></td><td>\"\n + vedo.utils.precision(self.diagonal_size(), 5)\n + \"</td></tr>\",\n \"<tr><td><b> bounds </b> <br/> (x/y/z) </td><td>\" + str(bounds) + \"</td></tr>\",\n \"</table>\",\n \"</table>\",\n ]\n return \"\\n\".join(allt)",
"def render(self):\n return render_to_string(\n self.template_name, self.get_context_data(), request=self.request\n )",
"def to_html(self) -> str:\n if self.count:\n class_name = 'branch-taken'\n symbol = '▷' if self.type_ == 'branch' else '○'\n else:\n class_name = 'branch-not-taken'\n symbol = '▶' if self.type_ == 'branch' else '●'\n\n info_text = ' (' + self.info + ')' if self.info else ''\n return '<span class=\"branch {}\" title=\"{} {}{} × {}\">{}</span>'.format(\n class_name, self.type_, self.id_, info_text, self.count, symbol\n )",
"def output_open_html(self):\n text = self.token[\"text\"]\n tag = self.token[\"tag\"]\n if self._parse_block_html and tag not in _pre_tags:\n text = self.inline(text)\n extra = self.token.get(\"extra\") or \"\"\n html = \"<%s%s>%s</%s>\" % (tag, extra, text, tag)\n return self.renderer.block_html(html)"
] | [
"0.76847166",
"0.7620541",
"0.7579551",
"0.7579551",
"0.7545735",
"0.7432328",
"0.7409317",
"0.7385336",
"0.7385336",
"0.7304668",
"0.7304668",
"0.72542566",
"0.7233775",
"0.7104469",
"0.7065664",
"0.70531815",
"0.7014759",
"0.69775945",
"0.6977155",
"0.69219375",
"0.6887626",
"0.6865015",
"0.68577087",
"0.6832544",
"0.6793131",
"0.6786746",
"0.6755257",
"0.6700565",
"0.6677593",
"0.6667174",
"0.66605365",
"0.66569227",
"0.6654718",
"0.6650187",
"0.6647357",
"0.66471726",
"0.6644631",
"0.66151345",
"0.657141",
"0.6570393",
"0.6562595",
"0.6546755",
"0.65458214",
"0.65410066",
"0.6525329",
"0.64996165",
"0.64961267",
"0.6492899",
"0.64831465",
"0.6472499",
"0.64690435",
"0.64618444",
"0.64429647",
"0.6442671",
"0.64419985",
"0.64315027",
"0.6429977",
"0.6429977",
"0.6429977",
"0.6429977",
"0.6429977",
"0.6429977",
"0.64285254",
"0.64199877",
"0.6414929",
"0.6407741",
"0.63980436",
"0.6386511",
"0.63800627",
"0.6378482",
"0.6371753",
"0.6363004",
"0.6348087",
"0.6346591",
"0.6306697",
"0.6293999",
"0.62862784",
"0.628417",
"0.62665707",
"0.626092",
"0.626092",
"0.62424207",
"0.62371033",
"0.6219805",
"0.62149316",
"0.6206116",
"0.61945295",
"0.6184101",
"0.6172638",
"0.6152366",
"0.61520904",
"0.6150414",
"0.6139339",
"0.6136774",
"0.61351085",
"0.6126126",
"0.61231214",
"0.6119979",
"0.61137086",
"0.61115175"
] | 0.61986023 | 86 |
Function for rendering HTML code of this element | def html(self):
lbl = escape(self._label)
dis = ('disabled' if not self._enabled else '')
typ = ('password' if self._password else 'text')
nam = escape(self._name)
val = escape(self._value)
return '%s <input name="%s" %s type="%s" value="%s" size="%i">' % (lbl, nam, dis, typ, val, self._size) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __html__(self):\n return str(self)",
"def rawHTMLrendered(self):",
"def _repr_html_(self):\n return self.__repr__()",
"def _repr_html_(self):\n return self.__repr__()",
"def __html__(self):\n return self.html",
"def html(self):\n bop = ('<b>' if self._bold else '')\n iop = ('<i>' if self._italic else '')\n icl = ('</i>' if self._italic else '')\n bcl = ('</b>' if self._bold else '')\n txt = escape(self._text)\n s = '%s%s%s%s%s' % (bop, iop, txt, icl, bcl)\n return '%s' % s",
"def html(self) -> str:\n if self._inner_element:\n return self.start_tag + self._inner_element.html + self.end_tag\n return super().html",
"def _repr_html_(self):\n\n return self._repr__base(rich_output=True)",
"def _repr_html_(self):\n\n return self._repr__base(rich_output=True)",
"def html(self) -> SafeString:\n return format_html(self.__html__())",
"def html(self) -> SafeString:\n return format_html(self.__html__())",
"def render(self, value, context=None):\n if self.raw_html is not None:\n return format_html(self.raw_html)\n else:\n return ''",
"def _repr_html_(self):\n return self.data.to_html()",
"def _repr_html_(self) -> str:\n output_html = self.template_base.render(context=self.context)\n return output_html",
"def html(self) -> str:\n return self._html",
"def render_html(self):\n return self.template.render(content=self.content, **self.styles)",
"def get_inner_html(self):\n\n pass",
"def __html__(self):\n if not self.hasArticle:\n return None\n\n if self.bbcode_is_active:\n return self._bbcodeAsHtml\n\n return self.html",
"def render(self):\n self.rendered = self.value\n return self.rendered",
"def __html__(self) -> str:\n components = [\n self.attributee_html,\n f'\"{self.linked_title}\"',\n self.date.string if self.date else '',\n ]\n return self.components_to_html(components)",
"def _repr_html_(self):\n return util.tree_sequence_html(self)",
"def __html__(self) -> str:\n components = [\n self.attributee_html,\n self.linked_title if self.title else 'untitled document',\n self.date.string if self.date else '',\n self.descriptive_phrase,\n f'archived in {self.collection}' if self.collection else '',\n ]\n return self.components_to_html(components)",
"def _repr_html_(self):\n return self._frame._repr_html_()",
"def get_html(self):\r\n pass",
"def _repr_html_(self):\n return util.tree_html(self)",
"def html(self):\n return self._html",
"def renderCode(self):\n className = type(self).__name__\n raise NotImplementedError('Method renderCode needs to be implemented by ' + className)",
"def render(self):\n start_tag = format_html('<div {}>', mark_safe(' '.join(self.field_attrs)))\n output = [start_tag]\n for widget in self:\n output.append(force_text(widget))\n output.append('</div>')\n return mark_safe('\\n'.join(output))",
"def getHtml(self):\n return self.html",
"def code(self):\n return '{}\\n<script>{}</script>'.format(self.html, self.js)",
"def to_html(self, data=None, **kwargs) -> str:\n html = self.create_container()\n return html",
"def render(self):\n return mark_safe(u'%s' % u'\\n'.join([u'<td class=\"qcradiobutton\">%s</td>'\n % force_unicode(w) for w in self]))",
"def get_html_string(self, **kwargs):\n ...",
"def render(self):\n raise NotImplementedError",
"def get_html(self):\r\n context = {\r\n 'display_name': self.display_name_with_default,\r\n 'element_id': self.element_id,\r\n 'instructions_html': self.instructions,\r\n 'content_html': self._render_content()\r\n }\r\n\r\n return self.system.render_template('annotatable.html', context)",
"def get_outer_html(self):\n\n pass",
"def innerHTML(self) -> str:\n if self._inner_element:\n return self._inner_element.innerHTML\n return super().innerHTML",
"def _repr_html_(self):\n return \"<td><b>{0}</b></td><td>{1}</td>\".format(self.id, self.title)",
"def render(self, value):\r\n return value",
"def render(self):\n return mark_safe(u'\\n'.join([force_unicode(w) for w in self]))",
"def generate_html(self):\n html_text_1 = \"\"\"\n <div class=\"concept\">\n\n \t\t<div class=\"concept-title\">\n\n \t\t\t\t\"\"\" + self.title\n\n html_text_2 = \"\"\"\n \t\t</div>\n\n \t\t<div class=\"concept-description\">\n\n\t\t <p>\n\t\t\t\n \t\t \t\t\"\"\" + self.description + \"\"\" \n \n </p>\"\"\"\n\n html_text_3 = '''\n\n \t\t</div>\n\n </div>'''\n\n return html_text_1 + html_text_2 + html_text_3",
"def value_as_html(self):\n property_name = \"_%s_as_html\" % self.attribute.type\n return getattr(self, property_name, self.value_as_text)",
"def as_html(self):\r\n return mark_safe(' '.join(['%s=\"%s\"' % (k, escape(v if not callable(v) else v()))\r\n for k, v in six.iteritems(self)]))",
"def render(self):\n raise NotImplementedError()",
"def render(self) -> str:\n\n return \"\\n\".join(self.render_iter())",
"def render(self):",
"def get_html_string_representation(self):\n return self.map.get_root().render()",
"def get_html(self, *args, **kwargs):\n return Text(self.get_data(*args, **kwargs), escape=False)",
"def innerHTML(element):\n return element.encode_contents()",
"def render(self):\n try:\n if self.permit():\n return self.renderer.render(self)\n except AttributeError:\n if self.renderer is None:\n raise NotImplementedError(\"Should have implemented a renderer for {0}\".format(self.name))\n else:\n raise\n return ''",
"def _repr_html_(self):\n import jinja2 # noqa\n\n call_result = self._get_call_result()\n\n id_result = str(id(self) + np.random.random()).replace(\".\", \"rr\")\n\n params = {\n \"result\": self,\n \"id_result\": id_result,\n \"call_result\": call_result,\n \"json_result\": json.dumps(self.json_, indent=2),\n }\n return jinja2.Template(RESULT_HTML_TEMPLATE).render(**params)",
"def rawHTML(self):\n #TODO : do checking for scripts and hacks here?\n return mark_safe(self.html)",
"def markup(self):\n return '%s%s%s' % (\n self.options['markup_prefix'],\n self._markup,\n self.options['markup_suffix'],\n )",
"def _repr_html_(self): # pragma: no cover\n return Utils.render_html('extent.html', extent=self)",
"def _repr_html_(self):\n return (\n f'<b>{self.__class__.__name__}</b>'\n f'<br> <b>defined by:</b> {self._str_meta_()}'\n f'<br> <b>with columns:</b> {self._str_colnames()}'\n f'<br> {len(self)} objects'\n f'<br> {self._html_table()}'\n )",
"def to_html(self) -> str:\n return f'''\n <a href=\"{self.link}\"> ({self.source_name}, {self.timestamp.strftime('%Y')}) </a>\n '''",
"def render(self):\n pass",
"def render(self):\n pass",
"def render(self):\n pass",
"def render(self):\n pass",
"def render(self):\n pass",
"def render(self):\n pass",
"def render_content(self):\n return mark_safe(markdown(self.content))",
"def _render_content(self):\r\n xmltree = etree.fromstring(self.content)\r\n xmltree.tag = 'div'\r\n if 'display_name' in xmltree.attrib:\r\n del xmltree.attrib['display_name']\r\n\r\n index = 0\r\n for el in xmltree.findall('.//annotation'):\r\n self._render_annotation(index, el)\r\n index += 1\r\n\r\n return etree.tostring(xmltree, encoding='unicode')",
"def render(self, **kwargs) -> str:\n return self.renderable(**kwargs).render()",
"def render(self,value):\n self.content += value\n if self._rendernl:\n self.content += self._rendernl",
"def _repr_html_(self):\n if self.container_id():\n return \"<i>This widget is already shown in this notebook</i>\"\n \n container_id = self.id + '_container'\n def set_cointainer_id():\n self.container_id._set(container_id)\n # Set container id, this gets applied in the next event loop\n # iteration, so by the time it gets called in JS, the div that\n # we define below will have been created.\n from ..app import call_later\n call_later(0.1, set_cointainer_id) # todo: always do calls in next iter\n return \"<div class='flx-container' id=%s />\" % container_id",
"def unrendered(self) -> str:",
"def raw_html(self):\n if self._html:\n return self._html\n else:\n return lxml.html.tostring(self.element, encoding=self.encoding)",
"def get_html(self):\r\n return u'This is supposed to be test html.'",
"def _render(self) -> str:\n html = self._template.render(self._transient_context)\n self._transient_context = None\n return html",
"def __repr__(self):\n\n template = \"\"\"\n - inline : {}\n \"\"\"\n\n return super().__repr__() + template.format(self.inline)",
"def __str__(self):\n t = Template(\n \"\"\"\n <h4>$title</h4>\n $imgs\n $footnotes\n <hr/>\"\"\")\n # Return result.\n return t.substitute({\n \"title\": self.title,\n \"imgs\": self.render_images(),\n \"footnotes\": self.render_footnotes()\n })",
"def render(self):\n raise RenderNotImplemented('Render function is not implemented.')",
"def render(self):\n self._render_text = self.content.replace('\\n', '<br>') # deal with new line\n return render_str(\"post.html\", p = self)",
"def __repr__(self) -> str:\n return f\"{self.text}\"",
"def to_html(self) -> str:\n coverage_class = 'zero' if self.called == 0 else 'all'\n return '''<tr id=\"func-{}\" class=\"cov-health-{}\">\n <td><a href=\"#line-{}\">{}</a></td>\n <td>{}</td><td>{}%</td><td>{}%</td>\n </tr>\\n'''.format(\n self.name, coverage_class, self.linenum, self.pretty_name, self.called,\n self.returned, self.blocks\n )",
"def _repr_html_(self):\n # pylint: disable=protected-access\n return self.folium_map._repr_html_()\n # pylint: enable=protected-access",
"def render_html(self, renderer, response_msg=''):\r\n # render ourself as a <span> + our content\r\n tree = etree.Element('span')\r\n\r\n # problem author can make this span display:inline\r\n if self.xml.get('inline', ''):\r\n tree.set('class', 'inline')\r\n\r\n for item in self.xml:\r\n # call provided procedure to do the rendering\r\n item_xhtml = renderer(item)\r\n if item_xhtml is not None:\r\n tree.append(item_xhtml)\r\n tree.tail = self.xml.tail\r\n\r\n # Add a <div> for the message at the end of the response\r\n if response_msg:\r\n tree.append(self._render_response_msg_html(response_msg))\r\n\r\n return tree",
"def __repr__(self):\n return self.text",
"def __repr__(self):\n return self.text",
"def part_render(self, attr, *a, **kw):\r\n style = kw.get('style', 'html')\r\n template = self.template(style)\r\n dt = template.get_def(attr)\r\n return unsafe(dt.render(thing = self, *a, **kw))",
"def _repr_html_(self):\n nb_ticks = 7\n delta_x = math.floor(self.width / (nb_ticks - 1))\n x_ticks = [(i) * delta_x for i in range(0, nb_ticks)]\n delta_val = delta_x * (self.vmax - self.vmin) / self.width\n val_ticks = [round(self.vmin + (i) * delta_val, 1) for i in range(0, nb_ticks)]\n\n return (\n f'<svg height=\"40\" width=\"{self.width}\">'\n + \"\".join(\n [\n (\n '<line x1=\"{i}\" y1=\"15\" x2=\"{i}\" '\n 'y2=\"27\" style=\"stroke:{color};stroke-width:2;\" />'\n ).format(\n i=i * 1,\n color=self.rgba_hex_str(\n self.vmin + (self.vmax - self.vmin) * i / (self.width - 1),\n ),\n )\n for i in range(self.width)\n ],\n )\n + '<text x=\"0\" y=\"38\" style=\"text-anchor:start; font-size:11px; font:Arial\">{}</text>'.format( # noqa\n self.vmin,\n )\n + \"\".join(\n [\n (\n '<text x=\"{}\" y=\"38\"; style=\"text-anchor:middle; font-size:11px; font:Arial\">{}</text>' # noqa\n ).format(x_ticks[i], val_ticks[i])\n for i in range(1, nb_ticks - 1)\n ],\n )\n + '<text x=\"{}\" y=\"38\" style=\"text-anchor:end; font-size:11px; font:Arial\">{}</text>'.format(\n self.width,\n self.vmax,\n )\n + '<text x=\"0\" y=\"12\" style=\"font-size:11px; font:Arial\">{}</text>'.format(\n self.caption,\n )\n + \"</svg>\"\n )",
"def render(self):\r\n super().render()",
"def htmlise(s):\n return '<div><pre class=\"tablecell\">' + html.escape(s) + '</pre></div>'",
"def _render(self) -> None:\n pass",
"def html(self):\n dis = ('disabled' if not self._enabled else '')\n met = ('post' if self._html_post else 'get')\n act = escape(self._action)\n txt = escape(self._text)\n return '<button %s formaction=\"%s\" formmethod=\"%s\">%s</button>' % (dis, act, met, txt)",
"def _repr_(self):\n return repr(self.element())",
"def to_html(self):\n return self.serializer.render(self.formatter.formatMessage(self.oldmsg))",
"def as_html(self): # pragma: no cover\n\n return render_to_string(\n self._meta.template,\n { \"table\": self } )",
"def html(self, children, ordering):\n\n raise NotImplementedError",
"def render(self):\n self._render_text = self.content.replace('\\n', '<br>')\n return TemplateFile.jinja_render_str(\"post.html\", p=self)",
"def to_html(self) -> str:\n source_name = escape(self.source_name)\n (covered, lines) = self.coverage_stats()\n lines_stats = \"{} / {} ({} lines of code)\".format(covered, lines, len(self.source_code))\n (br_covered, br_count, calls_covered, calls_count) = self.branch_stats()\n branch_stats = \"{} / {}\".format(br_covered, br_count)\n call_stats = \"{} / {}\".format(calls_covered, calls_count)\n (fn_covered, fn_count) = self.function_stats()\n fn_stats = \"{} / {}\".format(fn_covered, fn_count)\n\n self.decode_cpp_function_names()\n\n result = [\"\"\"\n <!DOCTYPE html>\n <html>\n <head>\n <meta charset=\"utf-8\">\n <title>Coverage report of file \"\"\" + source_name + \"\"\"</title>\n <style type=\"text/css\">\n /*<![CDATA[*/\n .cov-health-zero td { color: white; }\n .cov-health-zero a { color: #CCCCFF; }\n .cov-health-zero a:visited { color: #FFCCFF; }\n .cov-health-zero:nth-child(odd) td { background-color: #CC0000; }\n .cov-health-zero:nth-child(even) td { background-color: #DD0000; }\n .cov-health-na td { color: silver; }\n .cov-health-na td:nth-child(2) { visibility: hidden; }\n .branch { cursor: help; }\n .branch-taken { color: silver; }\n .branch-taken:hover { color: black; }\n .branch-not-taken { color: red; }\n .branch-not-taken:hover { color: maroon; }\n #source tbody td:last-child, #funcs tbody td:first-child\n { text-align: left; font-family: monospace; white-space: pre; }\n .sortable { border-collapse: collapse; }\n div { width: 100%; overflow: hidden; }\n .sortable td { text-align: right; padding-left: 2em; }\n .sortable tbody tr:nth-child(odd) { background-color: #FFFFCC; }\n .sortable tbody tr:nth-child(even) { background-color: #FFFFDD; }\n #source tbody tr:hover td:last-child { font-weight: bold; }\n #source tbody td:first-child { max-width: 7em; font-size: smaller; word-wrap: break-word; }\n #source tbody td:nth-child(2) { font-size: smaller; color: silver; }\n #summary { float: right; border-collapse: collapse; }\n #summary td { border: 1px solid black; }\n caption { font-weight: bold; }\n /*]]>*/\n </style>\n <script src=\"sorttable.js\"></script>\n </head>\n <body>\n <p><a href=\"index.html\">⇐ Back</a> | Go to line #<input type=\"number\" id=\"goto\" /></p>\n <h1>\"\"\" + source_name + \"\"\"</h1>\n <div>\n <table id=\"summary\">\n <caption>Summary</caption>\n <tr><td>Lines</td><td>\"\"\" + lines_stats + \"\"\"</td></tr>\n <tr><td>Branches</td><td>\"\"\" + branch_stats + \"\"\"</td></tr>\n <tr><td>Calls</td><td>\"\"\" + call_stats + \"\"\"</td></tr>\n <tr><td><a href=\"#functions\">Functions</a></td><td>\"\"\" + fn_stats + \"\"\"</td></tr>\n </ul>\n </table>\n <table class=\"sortable\" id=\"source\">\n <thead><tr><th>Branches</th><th>Cov</th><th>Line</th><th class=\"sorttable_nosort\">Source</th></tr></thead>\n <tbody>\n \"\"\"]\n result.extend(line.to_html() for line in self.source_code)\n result.append(\"\"\"\n </tbody>\n </table>\n </div>\n <h2 id=\"functions\">Functions</h2>\n <div>\n <table class=\"sortable\" id=\"funcs\">\n <thead><tr><th>Function</th><th>Calls</th><th>Ret.</th><th>Blk. Exec.</th></tr></thead>\n <tbody>\"\"\")\n result.extend(func.to_html() for func in self.source_functions)\n result.append(\"\"\"\n </tbody>\n </table>\n </div>\n <script>\n //<![CDATA[\n document.getElementById('goto').onchange = function()\n {\n location = \"#line-\" + this.value;\n }\n //]]>\n </script>\n </body>\n </html>\n \"\"\")\n return '\\n'.join(result)",
"def asHTML(self):\n return self.context.xml_get('xml_content')",
"def _html(self, message):\n content = f\"<html><body><h1>{message}</h1></body></html>\"\n return content.encode(\"utf8\")",
"def get_html(self):\r\n return self.system.render_template('lti.html', self.get_context())",
"def to_html(self, content, request, **parameters):\n raise NotImplementedError",
"def _repr_html_(self):\n import io\n import base64\n from PIL import Image\n\n library_name = \"vedo.assembly.Assembly\"\n help_url = \"https://vedo.embl.es/docs/vedo/assembly.html\"\n\n arr = self.thumbnail(zoom=1.1, elevation=-60)\n\n im = Image.fromarray(arr)\n buffered = io.BytesIO()\n im.save(buffered, format=\"PNG\", quality=100)\n encoded = base64.b64encode(buffered.getvalue()).decode(\"utf-8\")\n url = \"data:image/png;base64,\" + encoded\n image = f\"<img src='{url}'></img>\"\n\n # statisitics\n bounds = \"<br/>\".join(\n [\n vedo.utils.precision(min_x, 4) + \" ... \" + vedo.utils.precision(max_x, 4)\n for min_x, max_x in zip(self.bounds()[::2], self.bounds()[1::2])\n ]\n )\n\n help_text = \"\"\n if self.name:\n help_text += f\"<b> {self.name}:   </b>\"\n help_text += '<b><a href=\"' + help_url + '\" target=\"_blank\">' + library_name + \"</a></b>\"\n if self.filename:\n dots = \"\"\n if len(self.filename) > 30:\n dots = \"...\"\n help_text += f\"<br/><code><i>({dots}{self.filename[-30:]})</i></code>\"\n\n allt = [\n \"<table>\",\n \"<tr>\",\n \"<td>\",\n image,\n \"</td>\",\n \"<td style='text-align: center; vertical-align: center;'><br/>\",\n help_text,\n \"<table>\",\n \"<tr><td><b> nr. of objects </b></td><td>\"\n + str(self.GetNumberOfPaths())\n + \"</td></tr>\",\n \"<tr><td><b> position </b></td><td>\" + str(self.GetPosition()) + \"</td></tr>\",\n \"<tr><td><b> diagonal size </b></td><td>\"\n + vedo.utils.precision(self.diagonal_size(), 5)\n + \"</td></tr>\",\n \"<tr><td><b> bounds </b> <br/> (x/y/z) </td><td>\" + str(bounds) + \"</td></tr>\",\n \"</table>\",\n \"</table>\",\n ]\n return \"\\n\".join(allt)",
"def render(self):\n return render_to_string(\n self.template_name, self.get_context_data(), request=self.request\n )",
"def to_html(self) -> str:\n if self.count:\n class_name = 'branch-taken'\n symbol = '▷' if self.type_ == 'branch' else '○'\n else:\n class_name = 'branch-not-taken'\n symbol = '▶' if self.type_ == 'branch' else '●'\n\n info_text = ' (' + self.info + ')' if self.info else ''\n return '<span class=\"branch {}\" title=\"{} {}{} × {}\">{}</span>'.format(\n class_name, self.type_, self.id_, info_text, self.count, symbol\n )",
"def output_open_html(self):\n text = self.token[\"text\"]\n tag = self.token[\"tag\"]\n if self._parse_block_html and tag not in _pre_tags:\n text = self.inline(text)\n extra = self.token.get(\"extra\") or \"\"\n html = \"<%s%s>%s</%s>\" % (tag, extra, text, tag)\n return self.renderer.block_html(html)"
] | [
"0.76847166",
"0.7620541",
"0.7579551",
"0.7579551",
"0.7545735",
"0.7432328",
"0.7409317",
"0.7385336",
"0.7385336",
"0.7304668",
"0.7304668",
"0.72542566",
"0.7233775",
"0.7104469",
"0.7065664",
"0.70531815",
"0.7014759",
"0.69775945",
"0.6977155",
"0.69219375",
"0.6887626",
"0.6865015",
"0.68577087",
"0.6832544",
"0.6793131",
"0.6786746",
"0.6755257",
"0.6700565",
"0.6677593",
"0.6667174",
"0.66605365",
"0.66569227",
"0.6654718",
"0.6650187",
"0.6647357",
"0.66471726",
"0.6644631",
"0.66151345",
"0.657141",
"0.6570393",
"0.6562595",
"0.6546755",
"0.65458214",
"0.65410066",
"0.6525329",
"0.64996165",
"0.64961267",
"0.6492899",
"0.64831465",
"0.6472499",
"0.64690435",
"0.64618444",
"0.64429647",
"0.6442671",
"0.64419985",
"0.64315027",
"0.6429977",
"0.6429977",
"0.6429977",
"0.6429977",
"0.6429977",
"0.6429977",
"0.64285254",
"0.64199877",
"0.6414929",
"0.6407741",
"0.63980436",
"0.6386511",
"0.63800627",
"0.6378482",
"0.6371753",
"0.6363004",
"0.6348087",
"0.6346591",
"0.6306697",
"0.6293999",
"0.62862784",
"0.628417",
"0.62665707",
"0.626092",
"0.626092",
"0.62424207",
"0.62371033",
"0.6219805",
"0.62149316",
"0.6206116",
"0.61986023",
"0.61945295",
"0.6184101",
"0.6172638",
"0.6152366",
"0.61520904",
"0.6150414",
"0.6139339",
"0.6136774",
"0.61351085",
"0.6126126",
"0.61231214",
"0.6119979",
"0.61137086",
"0.61115175"
] | 0.0 | -1 |
Method to get the credentials from ~/.mofplusrc | def credentials_from_rc(self):
mprc_filename = os.environ["HOME"]+'/.mofplusrc'
with open(mprc_filename, 'r') as mprc:
username = mprc.readline().split()[0]
pw = mprc.readline().split()[0]
return username, pw | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_credentials():\n #home_dir = os.path.expanduser('~')\n home_dir = os.path.expanduser('/home/pi/')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'gmail-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(self):\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir, self.CRED_FILENAME)\r\n \r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\r\n flow.user_agent = self.APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(self):\n home_dir = os.path.expanduser(\"~\")\n credential_dir = os.path.join(home_dir, \".credentials\")\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, \"autoto.json\")\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, self.auth_flags)\n print(\"Storing credentials to \" + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser(os.getcwd())\n credential_dir = os.path.join(home_dir, '.credentials')\n print(credential_dir)\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n # normal, sane way of doing this that really shouldn't be changed\n #home_dir = os.path.expanduser('~')\n #credential_dir = os.path.join(home_dir, '.credentials')\n #if not os.path.exists(credential_dir):\n # os.makedirs(credential_dir)\n #credential_path = os.path.join(credential_dir,'calendar-python-quickstart.json')\n\n # stupid hacky way that I came up with to fix an issue with running this app as root\n credential_path = os.path.join('./credentials','calendar-python-quickstart.json') \n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(config['client secret file'], SCOPES)\n flow.user_agent = APPLICATION_NAME\n if args:\n credentials = tools.run_flow(flow, store, args)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'calendar-python-quickstart.json')\r\n\r\n store = oauth2client.file.Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials",
"def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'bis-python-quickstart.json')\r\n\r\n store = oauth2client.file.Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'reseller-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'credentialv_modify.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,'drive-python-quickstart.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'thejam_calendar.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def credentials():\n\n username = os.environ.get('OS_USERNAME')\n password = os.environ.get('OS_PASSWORD')\n tenant_name = (os.environ.get('OS_TENANT_NAME') or\n os.environ.get('OS_PROJECT_NAME'))\n auth_url = os.environ.get('OS_AUTH_URL')\n\n config = configparser.RawConfigParser()\n if config.read(_CREDS_FILE):\n username = username or config.get('admin', 'user')\n password = password or config.get('admin', 'pass')\n tenant_name = tenant_name or config.get('admin', 'tenant')\n auth_url = auth_url or config.get('auth', 'uri')\n\n return {\n 'username': username,\n 'password': password,\n 'tenant_name': tenant_name,\n 'uri': auth_url\n }",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sally.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'appsactivity-python-showtime.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n print('Storing credentials to ' + credential_path)\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(self):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'homework_logger-gmail-api.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\n flow.user_agent = self.APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'grader.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, tools.argparser.parse_args(args=[]))\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'gmail-python-spam-filter.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(args.clientSecretFile, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if args:\n credentials = tools.run_flow(flow, store, args)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = './ignore' #os.path.expanduser('./')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = './ignore' #os.path.expanduser('./')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def getCredentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'client_secret_OCR.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n print(\"Current folder: \" + os.getcwd())\n flow = client.flow_from_clientsecrets(\n \"../../\" + CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_creds():\n\tcredentials = None\n\tif os.path.exists('token.pickle'):\n\t\twith open('token.pickle', 'rb') as token:\n\t\t\tcredentials = pickle.load(token)\n\t# If there are no (valid) credentials available, let the user log in.\n\tif not credentials or not credentials.valid:\n\t\tif credentials and credentials.expired and credentials.refresh_token:\n\t\t\tcredentials.refresh(Request())\n\t\telse:\n\t\t\tflow = InstalledAppFlow.from_client_secrets_file('config/sa.json', SCOPES)\n\t\t\tcredentials = flow.run_local_server(port=0)\n\t\t# Save the credentials for the next run\n\t\twith open('token.pickle', 'wb') as token:\n\t\t\tpickle.dump(credentials, token)\n\treturn credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'fb-drive.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def credentials(self):\n return CurrentProject().config.credentials[self.key]",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(prefs_file):\n with open(prefs_file, \"rb\") as pl:\n if six.PY2:\n prefs = plistlib.readPlist(pl)\n else:\n prefs = plistlib.load(pl)\n\n try:\n jamf_url = prefs[\"JSS_URL\"]\n except KeyError:\n jamf_url = \"\"\n try:\n jamf_user = prefs[\"API_USERNAME\"]\n except KeyError:\n jamf_user = \"\"\n try:\n jamf_password = prefs[\"API_PASSWORD\"]\n except KeyError:\n jamf_password = \"\"\n return jamf_url, jamf_user, jamf_password",
"def get_credentials():\n\thome_dir = os.path.expanduser('~')\n\tcredential_dir = os.path.join(home_dir, '.credentials')\n\tif not os.path.exists(credential_dir):\n\t\tos.makedirs(credential_dir)\n\tcredential_path = os.path.join(credential_dir, \n\t\t\t\t\t\t\t\t\t'facebook_updater.json')\n\t\t\t\t\t\t\t\t\t\n\tstore = oauth2client.file.Storage(credential_path)\n\tcredentials = store.get()\n\tif not credentials or credentials.invalid:\n\t\tflow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n\t\tflow.user_agent = APPLICATION_NAME\n\t\tif flags:\n\t\t\tcredentials = tools.run_flow(flow, store, flags)\n\t\tprint ('Storing credentials to ' + credential_path)\n\treturn credentials",
"def get_creds():\n with open(CREDS_PATH, 'r') as creds_file:\n creds = json.load(creds_file)\n return creds['uname'], creds['pword']",
"def get_credentials(self):\r\n \r\n try:\r\n import argparse\r\n #flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\r\n if self.noauth == True:\r\n flags = tools.argparser.parse_args(args=['--noauth_local_webserver'])\r\n else:\r\n flags = tools.argparser.parse_args(args=[])\r\n except ImportError:\r\n flags = None \r\n \r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,'sheets.googleapis.com-allstarbot.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n secret = Path(self.CLIENT_SECRET_FILE)\r\n if secret.exists():\r\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\r\n else:\r\n print(\"client_secret.json not found, using env vars\")\r\n if not os.environ.get('client_id') or not os.environ.get('client_secret'): \r\n print(\"env vars client_id and client_secret not found. canceling\")\r\n raise Exception(\"client secret error\")\r\n else:\r\n flow = OAuth2WebServerFlow(\r\n os.environ.get('client_id'),\r\n os.environ.get('client_secret'),\r\n self.SCOPES) \r\n \r\n flow.params['access_type'] = 'offline'\r\n flow.user_agent = self.APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'clockwise.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'google-photos-stats.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, flags)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(\n credential_dir, 'sheets.googleapis.com-python-quickstart.json'\n )\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(resource_path(CLIENT_SECRET_FILE), SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n credential_dir = os.path.realpath('.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path) # stores the users credentials --> TODO: put in database\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n\n credentials = tools.run_flow(flow, store, flags)\n\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n try:\n import argparse\n flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\n except ImportError:\n flags = None\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'appsactivity-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(GoogleGsuiteAPI.CLIENT_SECRET_FILE, GoogleGsuiteAPI.SCOPES)\n flow.user_agent = GoogleGsuiteAPI.APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n #home_dir = os.path.expanduser('~')\n home_dir = (HOME_DIR)\n credential_dir = os.path.join(home_dir, '.credentials')\n print(\"Credentials folder: \",credential_dir)\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(self):\n home_dir = os.path.expanduser('~')\n # credential_dir = os.path.join(home_dir, '.credentials')\n credential_dir = '.credentials'\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'drive-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(GoogleDocsConverter.CLIENT_SECRET_FILE, GoogleDocsConverter.SCOPES)\n flow.user_agent = GoogleDocsConverter.APPLICATION_NAME\n if self.flags:\n credentials = tools.run_flow(flow, store, self.flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(self):\n #\n # Why is this not read from the yaml file?\n path = Path(path_expand(self.credential_file)).resolve()\n if not os.path.exists(path):\n os.makedirs(path)\n\n credentials_path = (path / 'google-drive-credentials.json').resolve()\n print(credentials_path)\n\n store = Storage(credentials_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file,\n self.scopes)\n flow.user_agent = self.application_name\n #\n # SHOUDL THE FLAGS NOT BE SET IN THE YAML FILE OR DOCOPTS OFTHE COMMAND?\n #\n if self.flags:\n credentials = tools.run_flow(flow, store, self.flags)\n\n return credentials",
"def get_credentials():\n store = Storage(CREDENTIAL_PATH)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, None)\n return credentials",
"def _config_credentials_get():\n user = input(\"username:\")\n password = getpass.getpass()\n url = input(\"url:\")\n return user, password, url",
"def get_credentials():\n # Get the credential\n if os.path.exists(os.getenv(\"GCP_AUTOMATION_CONFIG\")):\n credential_location = os.getenv(\"GCP_AUTOMATION_CONFIG\")\n with open(credential_location) as f:\n credential_location = json.load(f)\n credential = credential_location['Config'][0]['Authentication']\n log.info(f\"Retrieved credentail location as {credential}\")\n else:\n raise ValueError(\"Error in get_credentials function when calling 'GCP_AUTOMATION_CONFIG'\")\n\n # Construct the credentials request\n try:\n # Turn provided string into a filepath\n credentials = service_account.Credentials.from_service_account_file(\n filename=credential,\n scopes=[\"https://www.googleapis.com/auth/cloud-platform\"],\n )\n log.info(\"Credentials object constructed from service account file\")\n return credentials\n except Exception as e:\n return e",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'admin-directory_v1-NestedGroupSync.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n print 'Storing credentials to' + credential_path\n return credentials",
"def __get_credentials_from_config(self):\n cr = ConfigFileReader()\n\n self.username = cr.get_value(Config.EDUROAM_USER)\n debug(\"Username set to : \" + self.username)\n self.password = cr.get_value(Config.EDUROAM_PWD)",
"def GetUserCredentials():\n email = options.email\n if email is None:\n email = GetEmail(\"Email (login for uploading to %s)\" % options.server)\n password = getpass.getpass(\"Password for %s: \" % email)\n return (email, password)",
"def _get_credentials(flags):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-visualizerhelptext.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(self):\n if getattr(self, 'credentials', None):\n return self.credentials\n\n scopes = settings.SCOPES\n client_secret_file = settings.CLIENT_SECRET_FILE\n application_name = 'Google Sheets API Python Quickstart'\n\n home_dir = os.path.expanduser(settings.CREDENTIALS_DIRECTORY)\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(client_secret_file, scopes)\n flow.user_agent = application_name\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n # print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n credential_dir = os.getcwd()\n credential_path = os.path.join(credential_dir,\n 'smarking_error_check.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def _get_credentials(self):\n if self.config_file:\n with open(self.config_file) as f:\n config_str = f.read()\n credentials_dict = json.loads(config_str)\n self.credentials = credentials_dict[self.account][self.auth_type]\n else:\n self.credentials = {\n \"account\": os.environ.get('SNOWSQL_ACCOUNT'),\n \"user\": os.environ.get('SNOWSQL_USER'),\n \"password\": os.environ.get('SNOWSQL_PWD')\n }",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.logSheets.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(self):\n #### DONT EDIT.\n SCOPES = ['https://spreadsheets.google.com/feeds',\n 'https://www.googleapis.com/auth/drive']\n CLIENT_SECRET_FILE = 'client_secret.json'\n APPLICATION_NAME = 'reporter'\n ####\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'reporter_creds.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store)\n return credentials",
"def _get_credentials(self):\n\n scopes = 'https://www.googleapis.com/auth/drive'\n client_secret_file = '%s/config/client_secret.json' % PROJECT_DIR\n application_name = 'Drive API Quickstart'\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n\n credential_path = os.path.join(credential_dir, 'drive-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(client_secret_file, scopes)\n flow.user_agent = application_name\n\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n\n return credentials",
"def get_credentials():\n store = Storage(CLIENT_CREDENTIALS_FILE)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + CLIENT_CREDENTIALS_FILE)\n return credentials",
"def GetUserCredentials(self):\r\n # Create a local alias to the email variable to avoid Python's crazy\r\n # scoping rules.\r\n global keyring\r\n email = self.email\r\n if email is None:\r\n email = GetEmail(\"Email (login for uploading to %s)\" % self.server)\r\n password = None\r\n if keyring and not email in self.accounts_seen:\r\n try:\r\n password = keyring.get_password(self.host, email)\r\n except:\r\n # Sadly, we have to trap all errors here as\r\n # gnomekeyring.IOError inherits from object. :/\r\n print \"Failed to get password from keyring\"\r\n keyring = None\r\n if password is not None:\r\n print \"Using password from system keyring.\"\r\n self.accounts_seen.add(email)\r\n else:\r\n password = getpass.getpass(\"Password for %s: \" % email)\r\n if keyring:\r\n answer = raw_input(\"Store password in system keyring?(y/N) \").strip()\r\n if answer == \"y\":\r\n keyring.set_password(self.host, email, password)\r\n self.accounts_seen.add(email)\r\n return (email, password)",
"def get_credentials(self):\n return self.credentials",
"def get_credentials( flags=None ):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def auth(self):\n return self.creds(\"[email protected]\", cookie=\"USERTOKEN: authcookie\")",
"def get_creds():\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('inputs/token.pickle'):\n with open('inputs/token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'inputs/credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('inputs/token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n return creds",
"def _GetCredentials():\n return service_account.Credentials.from_service_account_file(\n KEY_FILE, scopes=_SCOPES)",
"def newcred(self):\n return {'login': input('username: '),\n 'password': getpass.getpass()}",
"def get_smb_credentials(prefs_file):\n with open(prefs_file, \"rb\") as pl:\n if six.PY2:\n prefs = plistlib.readPlist(pl)\n else:\n prefs = plistlib.load(pl)\n\n try:\n smb_url = prefs[\"SMB_URL\"]\n except KeyError:\n smb_url = \"\"\n try:\n smb_user = prefs[\"SMB_USERNAME\"]\n except KeyError:\n smb_user = \"\"\n try:\n smb_password = prefs[\"SMB_PASSWORD\"]\n except KeyError:\n smb_password = \"\"\n return smb_url, smb_user, smb_password",
"def get_credentials(commandline_flags=None):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if commandline_flags:\n credentials = tools.run_flow(flow, store, commandline_flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def _get_root_credentials():\n orthomcl_credentials_file = resource_filename(__name__, 'credentials/orthomcl.cfg')\n\n # Copy template config file to actual search path when file can not be found\n if not os.path.exists(orthomcl_credentials_file):\n shutil.copy(orthomcl_credentials_file + '.sample', orthomcl_credentials_file)\n log.info('Copied .sample file to %s', orthomcl_credentials_file)\n\n # Parse configuration file\n config = SafeConfigParser()\n config.read(orthomcl_credentials_file)\n host = config.get('mysql', 'host')\n port = config.getint('mysql', 'port')\n user = config.get('mysql', 'user')\n passwd = config.get('mysql', 'pass')\n\n # Fall back to environment value for password when available\n if passwd == 'pass' and 'mysql_password' in os.environ:\n passwd = os.environ['mysql_password']\n\n return Credentials(host, port, user, passwd)",
"def get_credentials():\n credentials_path = os.path.join(CREDENTIALS_DIR, CREDENTIALS_FILE)\n store = oauth2client.file.Storage(credentials_path)\n credentials = store.locked_get()\n\n if not credentials or credentials.invalid:\n client_secret_path = os.path.join(CREDENTIAL_DIR, CLIENT_SECRET_FILE)\n flow = client.flow_from_clientsecrets(client_secret_path, \n scope='https://www.googleapis.com/auth/admin.directory.resource.calendar',\n redirect_uri='urn:ietf:wg:oauth:2.0:oob')\n\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n\n print(\"Storing credentials to: \" + credentials_path)\n\n\n return credentials",
"def _get_credential(self):\n creds = None\n\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', self.config['SCOPES'])\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n self.service = build('drive', 'v3', credentials=creds)",
"def get_credentials(self):\n try:\n with open(self.credentials_file, 'r') as fh_credentials:\n credentials_dict = json.loads(fh_credentials.read())\n return credentials_dict\n except IOError:\n self.reset_credentials()\n with open(self.credentials_file, 'r') as fh_credentials:\n return json.loads(fh_credentials.read())",
"def get_credentials():\n\n SCOPES = 'https://www.googleapis.com/auth/gmail.readonly '\n\n while not os.path.exists(args.clientSecretFile):\n logging.fatal(\"Client secrets file does not exist: %s . You probably need to download this from the Google API console.\", args.clientSecretFile)\n sleep(10)\n\n credentials = None\n\n if os.path.exists(args.credentialsPath):\n credentials = Credentials.from_authorized_user_file(args.credentialsPath, SCOPES)\n\n if not credentials or not credentials.valid:\n flow = InstalledAppFlow.from_client_secrets_file(args.clientSecretFile, SCOPES)\n flow.user_agent = 'prometheus-gmail-exporter'\n\n credentials = flow.run_local_server(port=args.oauthBindPort, bind_addr = args.oauthBindAddr, host = args.oauthHost)\n #credentials = flow.run_local_server()\n\n logging.info(\"Storing credentials to %s\", args.credentialsPath)\n\n with open(args.credentialsPath, 'w', encoding='utf8') as token:\n token.write(credentials.to_json())\n\n\n return credentials",
"def get_credentials(path='~/.pgpass', db=DB):\n\n # Load credentials from path\n with open(os.path.expanduser(path), 'r') as file:\n host, port, _, user, password = file.read().strip().split(':')\n \n return host, port, user, password, db",
"def get_credentials() -> client.Credentials:\n\n credential_path = os.path.join(HOME_DIR, \"google-credentials.json\")\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(os.path.join(HOME_DIR, CLIENT_SECRET_FILE), SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n # This attempts to open an authorization page in the default web browser, and asks the user\n # to grant the bot access to their data. If the user grants permission, the run_flow()\n # function returns new credentials.\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print(\"Storing credentials to \" + credential_path)",
"def credentials_from_cmd(self):\n username = raw_input(\"Email:\")\n pw = getpass.getpass()\n return username, pw",
"def get_credentials(account):\n credential_dir = os.path.join(HOME_DIR, META_DIR, account, \"credentials\")\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'pyDrive.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(self, **kwargs):\n creds_file = os.path.join(kwargs['user_dir'], 'credentials.json')\n\n # Getting credentials from Storage\n store = file.Storage(creds_file)\n creds = store.get()\n\n # Validating or refreshing credentials, if necessary\n if creds is None or creds.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file,\n self.scopes)\n creds = tools.run_flow(flow, store)\n elif creds.access_token_expired:\n creds.refresh(httplib2.Http())\n else:\n pass\n\n return creds",
"def getCredentials(self):\n if self.result(): # Accepted?\n username = self.username_le.text()\n password = \"\"\n if self.askpassword:\n password = self.password_le.text()\n\n return username, password\n\n raise CredentialDialogReject()",
"def get_credentials(provider, filename):\n\n import configparser\n from getpass import getpass\n cp = configparser.ConfigParser()\n cp.read(filename)\n provider = 'switch'\n return (cp.get(provider, 'project') + ':' + cp.get(provider, 'username'), getpass(), cp.get(provider, 'region'),\n cp.get(provider, 'keypair'), cp.get(provider, 'secgrp'))",
"def get_credentials(self):\n credential_path = 'annette/data/gmail-credentials.json'\n\n store = Storage(credential_path)\n credentials = store.get()\n flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\n\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file, self.scopes)\n flow.user_agent = self.application_name\n credentials = tools.run_flow(flow, store, flags)\n _utils.logger.debug('Storing credentials to ' + credential_path)\n\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('gmail', 'v1', http=http, cache_discovery=False)\n return service"
] | [
"0.739547",
"0.7365536",
"0.73126113",
"0.73126113",
"0.7311609",
"0.7309989",
"0.7300369",
"0.7288538",
"0.72769576",
"0.72184205",
"0.71966267",
"0.71909285",
"0.71909285",
"0.71909285",
"0.71909285",
"0.71909285",
"0.7185963",
"0.71790165",
"0.71647274",
"0.71396244",
"0.71333355",
"0.7099693",
"0.709013",
"0.7079833",
"0.706036",
"0.7058538",
"0.7058538",
"0.7058538",
"0.7058538",
"0.70563006",
"0.7049346",
"0.7049346",
"0.70405847",
"0.7029963",
"0.70280796",
"0.70280796",
"0.7025201",
"0.70192415",
"0.7006583",
"0.69966793",
"0.69869316",
"0.69866264",
"0.69807357",
"0.6975652",
"0.69658357",
"0.6961238",
"0.6956743",
"0.6931868",
"0.6930298",
"0.6911417",
"0.6885136",
"0.6882803",
"0.6882803",
"0.6882803",
"0.6882803",
"0.6877156",
"0.68757623",
"0.68623924",
"0.6855999",
"0.68471974",
"0.68469745",
"0.68469745",
"0.6846673",
"0.68406063",
"0.68188244",
"0.68122035",
"0.679408",
"0.6779516",
"0.6766271",
"0.67619413",
"0.6756963",
"0.6753811",
"0.67413646",
"0.67400444",
"0.6722073",
"0.67207795",
"0.6719508",
"0.6700156",
"0.6675082",
"0.6660046",
"0.66545224",
"0.6630009",
"0.6624818",
"0.66120774",
"0.6597216",
"0.6588898",
"0.65724075",
"0.65577364",
"0.65347534",
"0.6482794",
"0.64821416",
"0.64748675",
"0.6465308",
"0.64476365",
"0.6427367",
"0.641554",
"0.6414604",
"0.6376088",
"0.63666177",
"0.6350414"
] | 0.857187 | 0 |
Method to get the credentials from the command line | def credentials_from_cmd(self):
username = raw_input("Email:")
pw = getpass.getpass()
return username, pw | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _config_credentials_get():\n user = input(\"username:\")\n password = getpass.getpass()\n url = input(\"url:\")\n return user, password, url",
"def get_credentials(options, environment):\n if options[\"--username\"] or options[\"--auth\"]:\n if not options[\"--username\"]:\n options[\"<username>\"] = lib.prompt(\n \"Please enter the username for %s...\" % environment\n )\n if not options[\"--password\"]:\n options[\"<password>\"] = lib.prompt(\n \"Please enter the password for %s...\" % environment, secret=True\n )\n return options",
"def get_credentials(commandline_flags=None):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if commandline_flags:\n credentials = tools.run_flow(flow, store, commandline_flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n username = input(\"Username: \")\n password = getpass.getpass(prompt='Password: ')\n return username, password",
"def get_creds_from_args(args):\n if args.prefs:\n (jamf_url, jamf_user, jamf_password) = get_credentials(args.prefs)\n else:\n jamf_url = \"\"\n jamf_user = \"\"\n jamf_password = \"\"\n\n # CLI arguments override any values from a prefs file\n if args.url:\n jamf_url = args.url\n elif not jamf_url:\n jamf_url = input(\"Enter Jamf Pro Server URL : \")\n if args.user:\n jamf_user = args.user\n elif not jamf_user:\n jamf_user = input(\n \"Enter a Jamf Pro user with API rights to upload a package : \"\n )\n if args.password:\n jamf_password = args.password\n elif not jamf_password:\n jamf_password = getpass.getpass(\n \"Enter the password for '{}' : \".format(jamf_user)\n )\n\n # encode the username and password into a basic auth b64 encoded string so that we can get the session token\n enc_creds = encode_creds(jamf_user, jamf_password)\n\n return jamf_url, jamf_user, jamf_password, enc_creds",
"def get_args():\n parser = build_arg_parser()\n\n args = parser.parse_args()\n\n return prompt_for_password(args)",
"def GetUserCredentials():\n email = options.email\n if email is None:\n email = GetEmail(\"Email (login for uploading to %s)\" % options.server)\n password = getpass.getpass(\"Password for %s: \" % email)\n return (email, password)",
"def get_credentials():\n try:\n import argparse\n flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\n except ImportError:\n flags = None\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'appsactivity-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(GoogleGsuiteAPI.CLIENT_SECRET_FILE, GoogleGsuiteAPI.SCOPES)\n flow.user_agent = GoogleGsuiteAPI.APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def getconfig(self):\n self.cmdargs.parse_args(self.args)\n config = self._getconfig(self.sources)\n\n if self.needlogin:\n config.credentials = { \n k: getattr(config, self.credentialKey[k].name)\n for k in self.authenticatorInfo.getCredentialKeys(config.auth)\n }\n\n config._freeze_varnames()\n return (self.client, config)",
"def _get_credentials(flags):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-visualizerhelptext.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def credentials():\n\n username = os.environ.get('OS_USERNAME')\n password = os.environ.get('OS_PASSWORD')\n tenant_name = (os.environ.get('OS_TENANT_NAME') or\n os.environ.get('OS_PROJECT_NAME'))\n auth_url = os.environ.get('OS_AUTH_URL')\n\n config = configparser.RawConfigParser()\n if config.read(_CREDS_FILE):\n username = username or config.get('admin', 'user')\n password = password or config.get('admin', 'pass')\n tenant_name = tenant_name or config.get('admin', 'tenant')\n auth_url = auth_url or config.get('auth', 'uri')\n\n return {\n 'username': username,\n 'password': password,\n 'tenant_name': tenant_name,\n 'uri': auth_url\n }",
"def get_args(self):\n args = self._parser.parse_args()\n return self._prompt_for_password(args)",
"def get_credentials(self):\r\n \r\n try:\r\n import argparse\r\n #flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\r\n if self.noauth == True:\r\n flags = tools.argparser.parse_args(args=['--noauth_local_webserver'])\r\n else:\r\n flags = tools.argparser.parse_args(args=[])\r\n except ImportError:\r\n flags = None \r\n \r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,'sheets.googleapis.com-allstarbot.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n secret = Path(self.CLIENT_SECRET_FILE)\r\n if secret.exists():\r\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\r\n else:\r\n print(\"client_secret.json not found, using env vars\")\r\n if not os.environ.get('client_id') or not os.environ.get('client_secret'): \r\n print(\"env vars client_id and client_secret not found. canceling\")\r\n raise Exception(\"client secret error\")\r\n else:\r\n flow = OAuth2WebServerFlow(\r\n os.environ.get('client_id'),\r\n os.environ.get('client_secret'),\r\n self.SCOPES) \r\n \r\n flow.params['access_type'] = 'offline'\r\n flow.user_agent = self.APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials",
"def get_credentials( flags=None ):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def getcreds():\n global user\n global password\n if not user:\n user = input(\"Please enter your username:\\n\")\n if not password:\n password = getpass.getpass(\"Please enter password:\\n\")",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'grader.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, tools.argparser.parse_args(args=[]))\n print('Storing credentials to ' + credential_path)\n return credentials",
"def credentials(self) -> Optional[pulumi.Input['CredentialsArgs']]:\n return pulumi.get(self, \"credentials\")",
"def get_credentials():\n # normal, sane way of doing this that really shouldn't be changed\n #home_dir = os.path.expanduser('~')\n #credential_dir = os.path.join(home_dir, '.credentials')\n #if not os.path.exists(credential_dir):\n # os.makedirs(credential_dir)\n #credential_path = os.path.join(credential_dir,'calendar-python-quickstart.json')\n\n # stupid hacky way that I came up with to fix an issue with running this app as root\n credential_path = os.path.join('./credentials','calendar-python-quickstart.json') \n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(config['client secret file'], SCOPES)\n flow.user_agent = APPLICATION_NAME\n if args:\n credentials = tools.run_flow(flow, store, args)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'bis-python-quickstart.json')\r\n\r\n store = oauth2client.file.Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials",
"def get_creds():\n with open(CREDS_PATH, 'r') as creds_file:\n creds = json.load(creds_file)\n return creds['uname'], creds['pword']",
"def get_credentials(self):\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir, self.CRED_FILENAME)\r\n \r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\r\n flow.user_agent = self.APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials",
"def get_credentials():\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'appsactivity-python-showtime.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n print('Storing credentials to ' + credential_path)\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n #home_dir = os.path.expanduser('~')\n home_dir = os.path.expanduser('/home/pi/')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'gmail-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser(os.getcwd())\n credential_dir = os.path.join(home_dir, '.credentials')\n print(credential_dir)\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,'drive-python-quickstart.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials",
"def _get_credentials(self):\n if self.config_file:\n with open(self.config_file) as f:\n config_str = f.read()\n credentials_dict = json.loads(config_str)\n self.credentials = credentials_dict[self.account][self.auth_type]\n else:\n self.credentials = {\n \"account\": os.environ.get('SNOWSQL_ACCOUNT'),\n \"user\": os.environ.get('SNOWSQL_USER'),\n \"password\": os.environ.get('SNOWSQL_PWD')\n }",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sally.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def credentials_from_rc(self):\n mprc_filename = os.environ[\"HOME\"]+'/.mofplusrc'\n with open(mprc_filename, 'r') as mprc:\n username = mprc.readline().split()[0]\n pw = mprc.readline().split()[0]\n return username, pw",
"def get_credentials(provider, filename):\n\n import configparser\n from getpass import getpass\n cp = configparser.ConfigParser()\n cp.read(filename)\n provider = 'switch'\n return (cp.get(provider, 'project') + ':' + cp.get(provider, 'username'), getpass(), cp.get(provider, 'region'),\n cp.get(provider, 'keypair'), cp.get(provider, 'secgrp'))",
"def newcred(self):\n return {'login': input('username: '),\n 'password': getpass.getpass()}",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n store = Storage(CREDENTIAL_PATH)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, None)\n return credentials",
"def main(username, pw):\n pass",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(args.clientSecretFile, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if args:\n credentials = tools.run_flow(flow, store, args)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(self):\n home_dir = os.path.expanduser(\"~\")\n credential_dir = os.path.join(home_dir, \".credentials\")\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, \"autoto.json\")\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, self.auth_flags)\n print(\"Storing credentials to \" + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'reseller-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'credentialv_modify.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_args():\n\n parser = argparse.ArgumentParser(description=\"Get DC, Clusters, Hosts and VM in JSON.\")\n parser.add_argument('-H', '--host', nargs=1, required=True, help='The vCenter to connect to',\n dest='host', type=str)\n parser.add_argument('-p', '--password', nargs=1, required=False,\n help='The password with which to connect to the VC. If not specified, the user is prompted at runtime for a password',\n dest='password', type=str)\n parser.add_argument('-u', '--user', nargs=1, required=True, help='The username with which to connect to the host',\n dest='username', type=str)\n args = parser.parse_args()\n return args",
"def cfg_credentials(context):\n arguments = {\n '--config': context.config_file,\n 'authorize': False,\n 'account_summary': False\n }\n pychex_cli = PychexCli(arguments)\n pychex_cli.read_config()\n # Check that the values pulled from the read_config method match what we\n # know\n print(pychex_cli.username)\n assert pychex_cli.username == context.username\n assert pychex_cli.security_image_path == context.security_image_path\n assert pychex_cli.password == context.password\n # Check that the unencrypted values are not present\n with open(arguments['--config']) as cfg:\n cfg_txt = cfg.read()\n assert cfg_txt.find(context.username) == -1\n assert cfg_txt.find(context.security_image_path) == -1\n assert cfg_txt.find(context.password) == -1",
"def getCredentials(self):\n if self.result(): # Accepted?\n username = self.username_le.text()\n password = \"\"\n if self.askpassword:\n password = self.password_le.text()\n\n return username, password\n\n raise CredentialDialogReject()",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(self):\n return self.credentials",
"def get_credentials():\n store = Storage(CLIENT_CREDENTIALS_FILE)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + CLIENT_CREDENTIALS_FILE)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'thejam_calendar.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(self):\n credential_path = 'annette/data/gmail-credentials.json'\n\n store = Storage(credential_path)\n credentials = store.get()\n flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\n\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file, self.scopes)\n flow.user_agent = self.application_name\n credentials = tools.run_flow(flow, store, flags)\n _utils.logger.debug('Storing credentials to ' + credential_path)\n\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('gmail', 'v1', http=http, cache_discovery=False)\n return service",
"def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'calendar-python-quickstart.json')\r\n\r\n store = oauth2client.file.Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials",
"def _get_credentials(self):\n\n scopes = 'https://www.googleapis.com/auth/drive'\n client_secret_file = '%s/config/client_secret.json' % PROJECT_DIR\n application_name = 'Drive API Quickstart'\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n\n credential_path = os.path.join(credential_dir, 'drive-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(client_secret_file, scopes)\n flow.user_agent = application_name\n\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n\n return credentials",
"def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'gmail-python-spam-filter.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials",
"def get_creds(\n config: Config=default):\n config_path = config.credential_path\n scopes = config.scopes\n\n logger.info('loading token')\n logger.debug(f'config_path: {config_path}')\n config_path = Path(config_path).expanduser()\n store = file.Storage(config_path/'token.json')\n creds = store.get()\n\n if not creds or creds.invalid:\n # Ask the user to give the correct permissions.\n logger.info('loading credentials')\n flow = client.flow_from_clientsecrets(\n config_path/'client_id.json',\n scopes)\n\n arguments = sys.argv\n sys.argv = sys.argv[0:1]\n # This line is why we need to remove the arguments from sys.argv\n # If you find a better way to get it to work, i'm buying it\n creds = tools.run_flow(flow, store)\n sys.argv = arguments\n\n return creds",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n\n SCOPES = 'https://www.googleapis.com/auth/gmail.readonly '\n\n while not os.path.exists(args.clientSecretFile):\n logging.fatal(\"Client secrets file does not exist: %s . You probably need to download this from the Google API console.\", args.clientSecretFile)\n sleep(10)\n\n credentials = None\n\n if os.path.exists(args.credentialsPath):\n credentials = Credentials.from_authorized_user_file(args.credentialsPath, SCOPES)\n\n if not credentials or not credentials.valid:\n flow = InstalledAppFlow.from_client_secrets_file(args.clientSecretFile, SCOPES)\n flow.user_agent = 'prometheus-gmail-exporter'\n\n credentials = flow.run_local_server(port=args.oauthBindPort, bind_addr = args.oauthBindAddr, host = args.oauthHost)\n #credentials = flow.run_local_server()\n\n logging.info(\"Storing credentials to %s\", args.credentialsPath)\n\n with open(args.credentialsPath, 'w', encoding='utf8') as token:\n token.write(credentials.to_json())\n\n\n return credentials",
"def get_credentials(self):\n #\n # Why is this not read from the yaml file?\n path = Path(path_expand(self.credential_file)).resolve()\n if not os.path.exists(path):\n os.makedirs(path)\n\n credentials_path = (path / 'google-drive-credentials.json').resolve()\n print(credentials_path)\n\n store = Storage(credentials_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file,\n self.scopes)\n flow.user_agent = self.application_name\n #\n # SHOUDL THE FLAGS NOT BE SET IN THE YAML FILE OR DOCOPTS OFTHE COMMAND?\n #\n if self.flags:\n credentials = tools.run_flow(flow, store, self.flags)\n\n return credentials",
"def credentials(self) -> pulumi.Input['ContainerRegistryBasicCredentialsArgs']:\n return pulumi.get(self, \"credentials\")",
"def get_credentials():\n home_dir = './ignore' #os.path.expanduser('./')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = './ignore' #os.path.expanduser('./')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def credentials(self):\n return CurrentProject().config.credentials[self.key]",
"def get_credentials(service_name=\"dataforSeo\", uname=\"[email protected]\"):\n pw = keyring.get_password(service_name, uname)\n return [uname, pw]",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'clockwise.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(args, my_dirname):\n\n credential_dir = os.path.join(my_dirname, '.credentials')\n if not os.path.exists(credential_dir):\n os.mkdir(credential_dir)\n credential_path = os.path.join(credential_dir, 'sheets.googleapis.com-cotus-checker.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n try:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, args)\n print('Storing credentials to ' + credential_path)\n except (oauth2client.clientsecrets.InvalidClientSecretsError, json.decoder.JSONDecodeError):\n pass\n return credentials",
"def auth(self):\n return self.creds(\"[email protected]\", cookie=\"USERTOKEN: authcookie\")",
"def get_cli_arguments(self):\n pass",
"def get_credentials(path='~/.pgpass', db=DB):\n\n # Load credentials from path\n with open(os.path.expanduser(path), 'r') as file:\n host, port, _, user, password = file.read().strip().split(':')\n \n return host, port, user, password, db",
"def get_credentials(self):\n home_dir = os.path.expanduser('~')\n # credential_dir = os.path.join(home_dir, '.credentials')\n credential_dir = '.credentials'\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'drive-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(GoogleDocsConverter.CLIENT_SECRET_FILE, GoogleDocsConverter.SCOPES)\n flow.user_agent = GoogleDocsConverter.APPLICATION_NAME\n if self.flags:\n credentials = tools.run_flow(flow, store, self.flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials(self):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'homework_logger-gmail-api.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\n flow.user_agent = self.APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n # Get the credential\n if os.path.exists(os.getenv(\"GCP_AUTOMATION_CONFIG\")):\n credential_location = os.getenv(\"GCP_AUTOMATION_CONFIG\")\n with open(credential_location) as f:\n credential_location = json.load(f)\n credential = credential_location['Config'][0]['Authentication']\n log.info(f\"Retrieved credentail location as {credential}\")\n else:\n raise ValueError(\"Error in get_credentials function when calling 'GCP_AUTOMATION_CONFIG'\")\n\n # Construct the credentials request\n try:\n # Turn provided string into a filepath\n credentials = service_account.Credentials.from_service_account_file(\n filename=credential,\n scopes=[\"https://www.googleapis.com/auth/cloud-platform\"],\n )\n log.info(\"Credentials object constructed from service account file\")\n return credentials\n except Exception as e:\n return e",
"def get_credentials_from_file(credentials_file):\n # Change the scope username and password variables to global\n global username\n global password\n try:\n # Open and reads the credentials.pwd file and save the lines in the username and password\n with open(os.path.dirname(__file__) + credentials_file) as credential_file:\n credentials = credential_file.readlines()\n username = credentials[0].strip()\n password = credentials[1].strip()\n\n credential_file.close()\n except FileNotFoundError as error:\n print(error)\n sys.exit(1)",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def getCredentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def getCredentials(flags=None):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-quickstart.json')\n\n store = oafile.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n try:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n except oauth2client.clientsecrets.InvalidClientSecretsError:\n raise oauth2client.clientsecrets.InvalidClientSecretsError(\n \"The client secret file couldn't be found. Go to: https://developers.google.com/drive/v3/web/quickstart/python\")\n\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def readopts(self):\n parser = OptionParser()\n parser.add_option(\"--dbname\", action=\"store\", type=\"string\", dest=\"dbname\", default=None)\n\n parser.add_option(\"--user\",\n action=\"store\",\n type=\"string\",\n dest=\"user\",\n default=None)\n\n parser.add_option(\"--password\",\n action=\"store\",\n type=\"string\",\n dest=\"password\",\n default=None)\n\n parser.add_option(\"--host\",\n action=\"store\",\n type=\"string\",\n dest=\"host\",\n default=None)\n\n parser.add_option(\"--port\",\n action=\"store\",\n type=\"string\",\n dest=\"port\",\n default=None)\n\n (options, args) = parser.parse_args()\n\n if options.dbname is None:\n print \"dbname is mandatory\"\n exit(1)\n\n conf = \"dbname=%s\" % options.dbname\n for parm in ['user', 'password', 'host', 'port']:\n if options.__dict__[parm] is not None:\n conf = \"%s %s=%s\" % (conf, parm, options.__dict__[parm])\n return conf",
"def get_credentials(server: str) -> Tuple[str, int, str]:\n\n try:\n host, port, passwd = Credentials.from_string(server)\n except InvalidCredentials:\n try:\n host, port, passwd = CONFIG.servers[server]\n except KeyError:\n LOGGER.error('No such server: %s.', server)\n exit(2)\n\n if passwd is None:\n try:\n passwd = getpass('Password: ')\n except (KeyboardInterrupt, EOFError):\n print()\n LOGGER.error('Aborted by user.')\n exit(3)\n\n return (host, port, passwd)",
"def check_for_credential_file(self):\r\n if 'AWS_CREDENTIAL_FILE' in os.environ:\r\n path = os.environ['AWS_CREDENTIAL_FILE']\r\n path = os.path.expanduser(path)\r\n path = os.path.expandvars(path)\r\n if os.path.isfile(path):\r\n fp = open(path)\r\n lines = fp.readlines()\r\n fp.close()\r\n for line in lines:\r\n if line[0] != '#':\r\n if '=' in line:\r\n name, value = line.split('=', 1)\r\n if name.strip() == 'AWSAccessKeyId':\r\n if 'aws_access_key_id' not in self.args:\r\n value = value.strip()\r\n self.args['aws_access_key_id'] = value\r\n elif name.strip() == 'AWSSecretKey':\r\n if 'aws_secret_access_key' not in self.args:\r\n value = value.strip()\r\n self.args['aws_secret_access_key'] = value\r\n else:\r\n print 'Warning: unable to read AWS_CREDENTIAL_FILE'",
"def get_credentials():\n credential_dir = os.getcwd()\n credential_path = os.path.join(credential_dir,\n 'smarking_error_check.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def _set_credentials(args):\n if hasattr(args, 'username') and hasattr(args, 'apikey') \\\n and args.username and args.apikey:\n config.update({'username': args.username})\n config.update({'apikey': args.apikey})\n elif os.path.exists(os.path.expanduser('~/.jarvice.cfg')):\n CParser = configparser.ConfigParser()\n CParser.read([os.path.expanduser('~/.jarvice.cfg'), ])\n config.update({'username': CParser.get('auth', 'username')})\n config.update({'apikey': CParser.get('auth', 'apikey')})\n else:\n sys.stderr.write(\"username and apikey must be passed as arguments \" \n \"or set in ~/.jarvice.cfg\")\n sys.exit(1)",
"def get_credentials():\n credential_dir = os.path.realpath('.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path) # stores the users credentials --> TODO: put in database\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n\n credentials = tools.run_flow(flow, store, flags)\n\n print('Storing credentials to ' + credential_path)\n return credentials",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'admin-directory_v1-NestedGroupSync.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n print 'Storing credentials to' + credential_path\n return credentials",
"def test_getcredentials_from_env(netrc):\n netrc.side_effect = FileNotFoundError(\"\")\n server = KasServer()\n assert server._username == USERNAME\n assert server._password == PASSWORD",
"def credential(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"credential\")",
"def GetUserCredentials(self):\r\n # Create a local alias to the email variable to avoid Python's crazy\r\n # scoping rules.\r\n global keyring\r\n email = self.email\r\n if email is None:\r\n email = GetEmail(\"Email (login for uploading to %s)\" % self.server)\r\n password = None\r\n if keyring and not email in self.accounts_seen:\r\n try:\r\n password = keyring.get_password(self.host, email)\r\n except:\r\n # Sadly, we have to trap all errors here as\r\n # gnomekeyring.IOError inherits from object. :/\r\n print \"Failed to get password from keyring\"\r\n keyring = None\r\n if password is not None:\r\n print \"Using password from system keyring.\"\r\n self.accounts_seen.add(email)\r\n else:\r\n password = getpass.getpass(\"Password for %s: \" % email)\r\n if keyring:\r\n answer = raw_input(\"Store password in system keyring?(y/N) \").strip()\r\n if answer == \"y\":\r\n keyring.set_password(self.host, email, password)\r\n self.accounts_seen.add(email)\r\n return (email, password)",
"def __get_credentials_from_config(self):\n cr = ConfigFileReader()\n\n self.username = cr.get_value(Config.EDUROAM_USER)\n debug(\"Username set to : \" + self.username)\n self.password = cr.get_value(Config.EDUROAM_PWD)",
"def get_credentials(self, **kwargs):\n creds_file = os.path.join(kwargs['user_dir'], 'credentials.json')\n\n # Getting credentials from Storage\n store = file.Storage(creds_file)\n creds = store.get()\n\n # Validating or refreshing credentials, if necessary\n if creds is None or creds.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file,\n self.scopes)\n creds = tools.run_flow(flow, store)\n elif creds.access_token_expired:\n creds.refresh(httplib2.Http())\n else:\n pass\n\n return creds",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'client_secret_OCR.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n print(\"Current folder: \" + os.getcwd())\n flow = client.flow_from_clientsecrets(\n \"../../\" + CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def _add_cred_variables(self):\n self.credentialKey = {}\n authInfo = None\n if self.client:\n try:\n authInfo = self.client.getAuthenticatorInfo()\n except VersionMethodError:\n pass\n authArgOpts = dict(help=\"authentication plugin\")\n if authInfo:\n self.authenticatorInfo = AuthenticatorInfo(authInfo)\n authArgOpts['choices'] = self.authenticatorInfo.getAuthNames()\n else:\n self.authenticatorInfo = LegacyAuthenticatorInfo()\n\n var = self.add_variable('auth', (\"-a\", \"--auth\"), authArgOpts,\n envvar='ICAT_AUTH')\n var.postprocess = _post_auth\n for key in self.authenticatorInfo.getCredentialKeys(hide=False):\n self._add_credential_key(key)\n hidden = self.authenticatorInfo.getCredentialKeys(hide=True)\n if hidden:\n var = self.add_variable('promptPass', (\"-P\", \"--prompt-pass\"), \n dict(help=\"prompt for the password\", \n action='store_const', const=True), \n type=boolean, default=False)\n var.postprocess = _post_promptPass\n for key in hidden:\n self._add_credential_key(key, hide=True)",
"def get_creds():\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('inputs/token.pickle'):\n with open('inputs/token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'inputs/credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('inputs/token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n return creds",
"def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'google-photos-stats.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, flags)\n print('Storing credentials to ' + credential_path)\n return credentials",
"def read_command_line_arguments() -> Tuple[\n LocalConfig, AuthConfig, Optional[List[str]]\n]:\n parser = argparse.ArgumentParser()\n parser.add_argument(\n \"config\", type=str, help=\"Path to the main config file\"\n )\n parser.add_argument(\n \"auth\", type=str, help=\"Path to the authentication config file\"\n )\n parser.add_argument(\n \"--execute-now\",\n type=str,\n help=\"\"\"A set of channel names to execute immediately, or none to\n determine automatically based on the current time.\"\"\",\n nargs=\"*\",\n choices=notification_channels.keys(),\n )\n args = parser.parse_args()\n\n config_file = read_local_config(args.config)\n auth_file = read_local_auth(args.auth)\n\n return config_file, auth_file, args.execute_now"
] | [
"0.7299473",
"0.7106037",
"0.6985046",
"0.6973925",
"0.6960516",
"0.6940761",
"0.6813968",
"0.67990017",
"0.6796827",
"0.6795659",
"0.6782839",
"0.67600983",
"0.6745715",
"0.6667258",
"0.6666305",
"0.66566455",
"0.662464",
"0.66160524",
"0.65844256",
"0.6557866",
"0.65372974",
"0.6520266",
"0.65107536",
"0.6488358",
"0.6486944",
"0.6486944",
"0.64834213",
"0.6466547",
"0.6453333",
"0.6443119",
"0.6438202",
"0.6434004",
"0.6429035",
"0.64234316",
"0.6414504",
"0.6414504",
"0.63976866",
"0.6394447",
"0.63826126",
"0.63785636",
"0.6377502",
"0.63731056",
"0.636102",
"0.6348426",
"0.6334085",
"0.63302755",
"0.63302755",
"0.63302755",
"0.63302755",
"0.63302755",
"0.6322522",
"0.63205427",
"0.63186115",
"0.6306715",
"0.6300275",
"0.6298954",
"0.6279193",
"0.62621635",
"0.62586194",
"0.6254924",
"0.6254924",
"0.6254924",
"0.6254924",
"0.62407964",
"0.62387925",
"0.62303424",
"0.622938",
"0.622938",
"0.622291",
"0.62158567",
"0.6209483",
"0.6199801",
"0.6193652",
"0.6187446",
"0.61851025",
"0.6175183",
"0.6169814",
"0.6150177",
"0.6146438",
"0.6138967",
"0.6130333",
"0.6117864",
"0.6110972",
"0.61064637",
"0.6085028",
"0.60832506",
"0.60578954",
"0.6056485",
"0.6045192",
"0.6042995",
"0.60341316",
"0.6033234",
"0.6031355",
"0.6025983",
"0.60250795",
"0.5990754",
"0.5987756",
"0.5981032",
"0.59793454",
"0.5960329"
] | 0.74783903 | 0 |
Method to check if the connection to MFP is alive | def check_connection(self):
try:
self.mfp.add(2,2)
logger.info("Connection to user API established")
except xmlrpclib.ProtocolError:
logger.error("Not possible to connect to MOF+. Check your credentials")
exit()
return | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_alive():\n\n ## ---------------------------------------------------------------\n \n cmd = dict()\n cmd[\"type_\"] = \"is_alive\"\n cmd[\"name_\"] = \"\"\n\n s = socket.socket(\n socket.AF_INET,\n socket.SOCK_STREAM\n )\n try:\n s.connect((getml.host, getml.port))\n except ConnectionRefusedError:\n return False\n\n comm.send_string(s, json.dumps(cmd))\n\n s.close()\n\n return True",
"def __CheckConnectStatus(self):\r\n if not self.tn:\r\n print \"Connection is down!\"\r\n return False\r\n else:\r\n print \"Connection is alive!\"\r\n return True",
"def nat_waitforconn_alive():\r\n return NAT_STATE_DATA[\"mux\"] != None and NAT_STATE_DATA[\"mux\"].isAlive()",
"def status_check(self):\n try:\n client = self.connect()\n client.sys.is_initialized() # make an actual network connection\n return True\n except:\n return False",
"def is_alive(self):\n pass",
"def check_availability(self):\n\t\tif not self.connection_is_usable:\n\t\t\treturn False\n\t\twith self.client_lock:\n\t\t\tif self.stream is None:\n\t\t\t\treturn False\n\t\t\tif self.last_ping is None or self.last_ping.age() >= self.ping_max_age:\n\t\t\t\tself.last_ping = SendPing(self, self.ping_timeout)\n\t\t\tlast_ping = self.last_ping\n\t\treturn last_ping.answered(self.ping_timeout)",
"def is_alive(self) -> bool:\n\n\n try:\n self.sock.settimeout(2)\n except OSError:\n\n return False\n\n try:\n self.talk('/system/identity/print')\n\n except (socket.timeout, IndexError, BrokenPipeError):\n\n self.close()\n return False\n\n self.sock.settimeout(None)\n return True",
"def isAlive(self):\r\n # Just use connectionInit, that is our internal variable\r\n return self.connectionInit",
"def is_alive(self):\n ret = subprocess.call(\n shlex.split(\"ping -c 1 -W 2 %s\" % self.ip_address),\n stdout=open('/dev/null', 'w'),\n stderr=subprocess.STDOUT,\n )\n \n if ret == 0:\n return True\n else:\n return False",
"def is_alive(self):\n return True",
"def check_status(self):\n try:\n self.server.ping()\n return True\n except Exception as e:\n return False",
"async def check_connection_status(self):\n while True:\n if not self.connected:\n self.log.error(\"Lost connection to spa, attempting reconnect.\")\n await self.connect()\n await asyncio.sleep(10)\n continue\n if (self.lastupd + 5 * self.sleep_time) < time.time():\n self.log.error(\"Spa stopped responding, requesting panel config.\")\n await self.send_panel_req(0, 1)\n await asyncio.sleep(self.sleep_time)",
"def is_alive(self):\n if self.status == 1:\n return True\n else:\n return False",
"def is_alive(self) -> bool:\n self.check_is_alive()\n return self.__is_alive",
"def check_connection(self):\n return False",
"def is_alive(self):",
"def isConnected(self):\n if self._session is None:\n return False\n return self._session.isalive() is True",
"def check_connection(self):\n pass",
"def check( self ):\n\n if ( self.alive is not None ) \\\n and ( time.time() > ( self.alive + self.timeout ) ):\n return False\n return True",
"def is_connected():\r\n global connection\r\n if connection is None:\r\n return False\r\n else:\r\n return True",
"def is_alive(self):\n conn = HTTPConnection(self.browser.host, self.browser.port)\n conn.request(\"HEAD\", \"/invalid\")\n res = conn.getresponse()\n return res.status == 404",
"def is_connected():\n import socket\n try:\n host = socket.gethostbyname(\"www.gov.uk\")\n socket.create_connection((host, 80), 2)\n return True\n except:\n pass\n return False",
"def isConnected():",
"def is_alive(self, site):\n try:\n return requests.get(site).status_code == 200\n except Exception:\n pass",
"def is_alive(self):\n return self._is_alive",
"def check_connectivity(self):\n return self.connected",
"def is_connected(self):\n return self._connection and self._connection.is_open",
"def health_check(self):\n headers = {\"NDS-Proxy-Ping\": \"NPP\"}\n url = \"http://{host}:{port}/upm\".format(host=self.upm_host, port=self.upm_port)\n is_available, http_code = http_utilities.get(url, headers=headers)\n\n if http_code == 200:\n self.log.info(\"The UPM is available\")\n return True\n else:\n self.log.error(\"The UPM is not available\")\n return False",
"def is_connected(self):\n if self.server: return True\n return False",
"def ping(conn: psycopg2.connect) -> bool:\n\n is_alive = False\n with conn.cursor() as cur:\n cur.execute(\"SELECT 1\")\n if cur.description is not None:\n fetched = cur.fetchall()\n try:\n is_alive = fetched[0][0] == 1\n except IndexError:\n pass\n return is_alive",
"def verify_connection(self):\n return self.device.verify_connection()",
"def is_connected(self):\r\n return self.__socket is not None",
"def is_alive(self):\n return self.alive",
"def is_alive(self):\n return self.alive",
"def is_connected():\n \n try:\n socket.create_connection((\"www.google.com\", 80))\n return True\n except OSError:\n pass\n return False",
"def is_connected(self):\n return \"_connection\" in self.__dict__",
"def __check_ping(self):\n if not self.communications.ping():\n self.communications.ping(True)",
"def is_alive(self):\n if self.device is None:\n return {'is_alive': False}\n try:\n # SSH\n # Try sending ASCII null byte to maintain the connection alive\n null = chr(0)\n self.device.write_channel(null)\n return {\n 'is_alive': self.device.remote_conn.transport.is_active()\n }\n except (socket.error, EOFError, OSError):\n # If unable to send, we can tell for sure that the connection is unusable\n return {'is_alive': False}",
"def CheckWirelessConnectingMessage(self):\n if not self.wifi.connecting_thread == None:\n stat = self.wifi.connecting_thread.GetStatus()\n return stat\n else:\n return False",
"def isconnected(self) -> bool:",
"def is_connected(self):\n\t\tif self._connection is None:\n\t\t\treturn False\n\n\t\treturn True",
"def is_connected(self):\n\t\treturn call_sdk_function('PrlSrv_IsConnected', self.handle)",
"def isAlive(self):\n return self.is_alive()",
"def CheckIfConnecting(self):\n if self.CheckIfWiredConnecting() or self.CheckIfWirelessConnecting():\n return True\n else:\n return False",
"def is_alive(self):\n if (self._s.fileno()>0 and self._running and self._listen):\n return True\n else:\n return False",
"def CheckWiredConnectingMessage(self):\n if self.wired.connecting_thread:\n return self.wired.connecting_thread.GetStatus()\n else:\n return False",
"def ready(self):\n\n if not self.running:\n return False\n\n try:\n response = requests.get(\n 'http://{}:{}/v1/kv/health'.format(\n self.running_host,\n self.running_port\n )\n )\n except requests.ConnectionError:\n return False\n\n if response.status_code == 404:\n return True\n elif response.status_code == 500:\n return False\n else:\n return False",
"def check_conn():\n try:\n urllib2.urlopen(\"http://www.google.com\", timeout=5)\n return True\n except urllib2.URLError:\n pass\n return False",
"def check_port(self):\r\n\t\treturn(self.connect.is_open)",
"def is_open(self):\n\t\treturn self.conn.open",
"def check_ping(self):\n # If we're still connecting, deny the connection\n if self.state == self.STATE_CONNECTING:\n if self.duration() > self.main_factory.websocket_connect_timeout:\n self.serverReject()\n elif self.state == self.STATE_OPEN:\n if (time.time() - self.last_data) > self.main_factory.ping_interval:\n self._sendAutoPing()\n self.last_data = time.time()",
"def check_heartbeat(self):\n return True",
"def is_connected(self):\n return self._socket is not None",
"def _is_alive(self) -> bool:\n\n if self._on:\n return True\n\n try:\n os.kill(self.proc.pid, 0)\n except (OSError, ProcessLookupError):\n return False\n\n return True",
"def is_connected(self):\n if self._socket:\n return True\n else:\n return False",
"def get_connected(self) -> bool:\n try:\n return self._background_process.is_alive()\n except AttributeError:\n return False",
"def checkServerThread(self):\r\n\r\n # check if the server is alive\r\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\r\n\r\n result = 1\r\n try:\r\n result = sock.connect_ex((\"dealookup.com\", 80))\r\n except:\r\n result = 1 \r\n\r\n # server is not live \r\n if result != 0:\r\n result = 1\r\n\r\n self.checkResultSignal.emit(result)",
"def is_connected(self):\n\t\treturn bool(call_sdk_function('PrlVmDev_IsConnected', self.handle))",
"def is_alive(self):\n return hasattr(self, 'alive') and self.alive",
"def is_alive(self):\n return hasattr(self, 'alive') and self.alive",
"def is_connected(self):\n if not self._host:\n if self._database:\n return True\n else:\n return False\n else:\n return self._factory.is_connected()",
"def _CheckConnect(self):\n try:\n resp = requests.get(self._target_url, timeout=2)\n if resp.headers['Maximum-Bytes']:\n self._max_bytes = int(resp.headers['Maximum-Bytes'])\n return resp.status_code == 200\n except requests.exceptions.ConnectionError:\n return False\n except Exception as e:\n self.exception('Unexpected test connect failure: %s', str(e))\n return False",
"def isonline():\n\n conn = httplib.HTTPConnection(\"www.google.com\", timeout=5)\n try:\n conn.request(\"HEAD\", \"/\")\n conn.close()\n return True\n except:\n conn.close()\n return False",
"def isconnected(self) -> bool:\n ...",
"def ready(self):\n\n if not self.running:\n return False\n\n try:\n response = requests.get(\n 'http://{}:{}'.format(\n self.running_host,\n self.running_port\n )\n )\n except requests.ConnectionError:\n return False\n\n if response.status_code == 404:\n return True\n elif response.status_code == 500:\n return False\n else:\n return False",
"def _IsReady(self):\n if self.ip_address is None:\n self._GetIpAddress()\n if self.ip_address is not None:\n url = 'http://%s' % (self.ip_address)\n r = requests.get(url)\n if r.status_code == 200:\n return True\n return False",
"def is_alive(self) -> bool:\n return self._main_thread.is_alive()",
"def alive(self):\n return self._thread.is_alive()",
"def running(self):\n\n return can_connect_to(APIConsumer.host, APIConsumer.port)",
"def connection_open(self):\n return self.conn_status == self.CONN_OPEN",
"def ping(self):\n return True",
"def ping(self):\n return True",
"def is_connected(self, test=False):\n return self._server.is_connected()",
"def connected(self):\n\n if self._connection:\n if self._connection.is_closed == True:\n return False\n else:\n return True\n else:\n return False",
"def isConnected(self):\n\n return self._connection is not None",
"def is_connected(self):\n return self.hub.is_connected and self.client.is_running",
"def isConnected(self):\n return self.__cooperationClient.hasConnections()",
"def is_alive(self):\n return (self.read_name() != '')",
"def alive(self):\n return self._process.is_alive()",
"def _check_for_life_signs(self):\n self._lock.acquire()\n if not self._running.is_set():\n return False\n try:\n if self._writes_since_check == 0:\n self.send_heartbeat()\n if self._reads_since_check == 0:\n self._threshold += 1\n if self._threshold >= 2:\n self._running.set()\n message = (\n 'Connection dead, no heartbeat or data received in >= '\n '%ds' % (\n self._interval * 2\n )\n )\n why = AMQPConnectionError(message)\n if self._exceptions is None:\n raise why\n self._exceptions.append(why)\n return False\n else:\n self._threshold = 0\n finally:\n self._reads_since_check = 0\n self._writes_since_check = 0\n self._lock.release()\n if self._timer:\n self._start_new_timer()\n return True",
"def reconnecting(self) -> bool:",
"def check(self):\n return self.connected",
"def is_alive(self):\n try:\n return self.get_life() > 0\n except KeyError:\n return True",
"def is_open(self):\n result = None\n if(self._type_connection == \"COM\"):\n result = self._connection.is_open\n else:\n msg = \"99, No se puede validar la conexion para {}.\".format(self._type_connection)\n raise ValueError(msg)\n\n return result",
"def mwa_available():\n try:\n urllib2.urlopen(pref('ServerURL'), timeout=1)\n return True\n except urllib2.HTTPError, e:\n if str(e.code) == \"401\":\n return True\n else:\n return False\n except urllib2.URLError as err: \n return False",
"def wifi_connectivity_verify(self):\n self.sendline(\"iw %s link\" % self.iface_wifi)\n matched = self.expect([\"Connected\", \"Not connected\", pexpect.TIMEOUT])\n if matched == 0:\n return True\n else:\n return False",
"def available(self) -> bool:\n return self.thermostat[\"runtime\"][\"connected\"]",
"def is_open(self):\n return self.connection.isOpen()",
"def is_server_alive(self):\n self.log.info('Checking if the server is available via SSH')\n is_available = ssh_utilities.is_server_alive(remote_host=self.upm_host,\n remote_username=self.upm_username,\n remote_password=self.upm_password)\n if not is_available:\n message = 'The server is not available via SSH'\n assert False, message\n self.log.info('The server is available via SSH')\n return True",
"def is_connected(self):\n if self.connected and self.connack_rec:\n return 1\n return 0",
"def available(self):\n from pyhs3 import STATE_LISTENING\n return self._connection.api.state == STATE_LISTENING",
"def alive(p):\n return p.is_alive()",
"def ServerIsReady( self ):\n return self.ServerIsHealthy()",
"def online(self):\n return False",
"def check_socket(self):\n return self.__send_command(cmd=\"PING\")",
"def is_connected(self) -> bool:",
"def is_alive(self) -> bool:\n if self._thread is None:\n return False\n return self._thread.is_alive()",
"def _is_connection_stale(self):\n\n if time.time() - self.last_ping > HEART_BEAT_PING_TIME:\n self._ping()\n\n return (time.time() - self.last_pong) > HEART_BEAT_PING_TIME + HEART_BEAT_PONG_TIME",
"def is_connected(self) -> bool:\n return (\n self._last_seen is not None\n and (dt_util.utcnow() - self._last_seen)\n < self._router.consider_home_interval\n )",
"def central_server_alive(cls, timeout=1):\n central_server_address, _ = cls.get_central_address()\n\n try:\n requests.get(central_server_address, timeout=timeout, verify=False)\n except (Timeout, ConnectionError):\n return False\n\n return True"
] | [
"0.7821559",
"0.7482397",
"0.74798656",
"0.7405552",
"0.7365014",
"0.73151857",
"0.7309089",
"0.72766185",
"0.7247258",
"0.7214135",
"0.7205073",
"0.71816295",
"0.7174634",
"0.71540594",
"0.7139404",
"0.7105354",
"0.71004045",
"0.7088418",
"0.707445",
"0.7042662",
"0.7011688",
"0.7008315",
"0.6983461",
"0.69778585",
"0.69776165",
"0.69519377",
"0.6933308",
"0.6923262",
"0.69153625",
"0.69057024",
"0.69045943",
"0.6894903",
"0.6878601",
"0.6878601",
"0.68698615",
"0.6861249",
"0.68509734",
"0.68373394",
"0.6833134",
"0.6808572",
"0.6802412",
"0.68022233",
"0.67943406",
"0.67716575",
"0.67655176",
"0.6764556",
"0.67261416",
"0.67167044",
"0.6716004",
"0.67012125",
"0.6700439",
"0.6694178",
"0.6690929",
"0.66898465",
"0.66847056",
"0.6681742",
"0.6669915",
"0.6666445",
"0.6658107",
"0.6658107",
"0.66447794",
"0.6640657",
"0.6631116",
"0.66139996",
"0.661072",
"0.6602702",
"0.6602547",
"0.65961456",
"0.65959954",
"0.6595707",
"0.6572985",
"0.6572985",
"0.6569922",
"0.6554582",
"0.6553252",
"0.65454316",
"0.65439266",
"0.6540861",
"0.65408355",
"0.65324455",
"0.65305936",
"0.6522622",
"0.6515601",
"0.651495",
"0.6513985",
"0.65123254",
"0.6511929",
"0.6510295",
"0.6502943",
"0.6501459",
"0.6500716",
"0.64967227",
"0.6495994",
"0.6493523",
"0.6487018",
"0.64800614",
"0.64788914",
"0.6478709",
"0.64754605",
"0.64711434"
] | 0.69009495 | 31 |
Prints the MFP banner | def print_banner(self):
print ":##::::'##::'#######::'########:::::::::::::::'###::::'########::'####:\n\
:###::'###:'##.... ##: ##.....::::'##::::::::'## ##::: ##.... ##:. ##::\n\
:####'####: ##:::: ##: ##::::::::: ##:::::::'##:. ##:: ##:::: ##:: ##::\n\
:## ### ##: ##:::: ##: ######:::'######::::'##:::. ##: ########::: ##::\n\
:##. #: ##: ##:::: ##: ##...::::.. ##.::::: #########: ##.....:::: ##::\n\
:##:.:: ##: ##:::: ##: ##::::::::: ##:::::: ##.... ##: ##::::::::: ##::\n\
:##:::: ##:. #######:: ##:::::::::..::::::: ##:::: ##: ##::::::::'####:\n\
:..:::::..:::.......:::..:::::::::::::::::::..:::::..::..:::::::::....:" | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def print_banner(message):\n\n print(\"#############################################################################\")\n print(message)",
"def banner():\n print \"\"\" \n _____ __ \n |_ _|_ _ ___ / _| __ _ \n | |/ _` / __| |_ / _` |\n | | (_| \\__ \\ _| (_| |\n |_|\\__,_|___/_| \\__,_|\n \n \"\"\"\n print \"Welcome to use am-auto-start!\"\n print \"For more infomation --> www.tasfa.cn!!\"\n print \"<--------------------------------------------------->\"",
"def Banner():\n main_banner = pyfiglet.figlet_format(\" UTM NAT\", font = \"slant\")\n sub_banner1 = pyfiglet.figlet_format(\"tool\", font = \"isometric1\")\n sub_banner2 = \" -Generate a CSV file of Sophos UTM NAT statements-\"\n sub_banner3 = \" via REST API using the power of Python\"\n\n print()\n print('=' * 62)\n print(main_banner)\n print(sub_banner1)\n print()\n print(sub_banner2)\n print(sub_banner3)\n print()\n print('=' * 62)\n print()",
"def present_banner():\n writer(BANNER, FORMAT[\"BANNER\"])\n writer(\" \" * 30 + f\"version {VERSION}\")",
"def show_banner():\n print(\"\"\"\n _ _ _ _ _____ _______\n| | | | / \\ | | |_ _\\ \\ / / ____|\n| |_| | / _ \\ | | | | \\ \\ / /| _|\n| _ |/ ___ \\| |___ | | \\ V / | |___\n|_| |_/_/ \\_\\_____|___| \\_/ |_____|\n\n\nA super fast asynchronous http and https prober, to check who is (h)alive.\nDeveloped by gnc\n \"\"\")",
"def _print_banner(out_file, banner_text):\n banner_separator = \"\".ljust(len(banner_text), \"=\")\n\n out_file.write(\"\\n{}\\n{}\\n{}\\n\".format(\n banner_separator,\n banner_text,\n banner_separator))",
"def banner():\n print(\"\\033[32m\")\n print(\" ___ _ ___ _ _ _\")\n print(\" | _ )_ _ _ _| |_ ___ | __| |_ ___ _ _ _ _ __ _| | | | ___ ___ _ __\")\n print(\" | _ \\ '_| || | _/ -_) | _|| _/ -_) '_| ' \\/ _` | | | |__/ _ \\/ _ \\ '_ \\\\\")\n print(\" |___/_| \\_,_|\\__\\___| |___|\\__\\___|_| |_||_\\__,_|_|_|____\\___/\\___/ .__/\")\n print(\" |___| |_|\")\n print(\"\\033[0m\")",
"def print_banner(text):\n print(Figlet(font='smslant').renderText(text))",
"def print_banner(description):\n banner = len(description)\n if banner > 200:\n banner = 200\n\n # First banner\n print(\"\\n\")\n for _ in range(banner):\n print(\"*\", end=\"\")\n\n # Add description\n print(\"\\n%s\" % description)\n\n # Final banner\n for _ in range(banner):\n print(\"*\", end=\"\")\n print(\"\\n\")",
"def print_header(banner_name):\n print()\n print()\n print(\"----------------------------------------------------\")\n print(\" {0}\".format(banner_name))\n print(\"-----------------------------------------------------\")\n print()",
"def banner(message, border = '-'):\n line = border * len(message)\n print(line)\n print(message)\n print(line)",
"def banner_ascii():\n print(\"\")\n print(f\"\\n{RED} Steganography Tool{RESET}\")\n print(f\"{RED} Made By {RESET}\")\n print(f\"{RED} Ehthe Samul Islam Laskar USN:1DS16CS712 {RESET}\")\n print(f\"{RED} B Padma USN:1DS19CS420{RESET}\")\n print(f\"{RED} Nikhil D Kanyal USN:1DS17CS731{RESET}\")\n print(f\"{YELLOW}Type 'help' to see commands{RESET}\")",
"def splash_screen():\n figlet = Figlet(font=\"slant\")\n banner = figlet.renderText(\"TechX API Gateway\")\n print(banner)\n print(\"[+] 2020 TechX API Gateway www.cisco.com\\n\")",
"def welcome_banner():\n print('\\t*' * 10)\n print('\\t\\tWelcome!')\n print('\\tPut your knowledge to the test with this Ultimate Quiz Questions!')\n print('\\t*' * 10)\n print()",
"def print_banner(dog=True):\n banner = \"\"\n if dog:\n banner += \" ____,'`-,\\n\"\n banner += \" _,--' ,/::.;\\n\"\n banner += \" ,-' ,/::,' `---.___ ___,_\\n\"\n banner += \" | ,:';:/ ;'\\\"';\\\"`--./ ,-^.;--.\\n\"\n banner += \" |: ,:';,' ' `. ;` `-.\\n\"\n banner += \" \\\\:.,:::/;/ -:. ` | ` `-.\\n\"\n banner += \" \\\\:::,'//__.; ,; , , :.`-. :. | ; :.\\n\"\n banner += \" \\\\,',';/O)^. :' ; : '__` ` :::`. .:' )\\n\"\n banner += \" |,' |\\\\__,: ; ; '/O)`. :::`; ' ,'\\n\"\n banner += \" |`--'' \\\\__,' , ::::( ,'\\n\"\n banner += \" ` , `--' ,: :::,'\\\\ ,-'\\n\"\n banner += \" | ,; , ,::' ,::: |,'\\n\"\n banner += \" |,: .( ,:::| `\\n\"\n banner += \" ::'_ _ :: ,::/:|\\n\"\n banner += \" ,',' `-' \\\\ `. ,:::/,:|\\n\"\n banner += \" | : _ _ | ' ,::,' :::\\n\"\n banner += \" | \\\\ O`'O ,', , :,' ;::\\n\"\n banner += \" \\\\ `-'`--',:' ,' , ,,' ::\\n\"\n banner += \" ``:.:.__ ',-',' ::'\\n\"\n banner += \" -hrr- `--.__, ,::. ::'\\n\"\n banner += \" |: ::::. ::'\\n\"\n banner += \" |: :::::: ,::'\\n\"\n banner += \"########################################################\\n\"\n banner += \"# ruffer-overflow v0.2 #\\n\"\n banner += \"# don't \\\"bark\\\" up the wrong tree. #\\n\"\n banner += \"#======================================================#\\n\"\n banner += \"# weak-sauce tool for buffer-overflow #\\n\"\n banner += \"# please don't crime with it. #\\n\"\n banner += \"########################################################\\n\"\n print(banner)",
"def banner(self, banner):\n self._banner = banner",
"def banner():\n\n def random_color():\n valid_colors = (\"red\", \"green\", \"yellow\", \"blue\", \"magenta\", \"cyan\")\n return random.choice(valid_colors)\n\n autoRecon = rf\"\"\"\n _____________ ____ ________________\n /___/___ \\ / / | /___/__ \\ Mr.P-Millz _____\n O.G./ / _ \\______/__/ |______|__|_____ * \\_________________/__/ |___\n __/__/ /_\\ \\ | | \\ __\\/ _ \\| | __/ __ \\_/ ___\\/ _ \\| |\n | | ___ \\| | /| | ( |_| ) | | \\ ___/\\ \\__( |_| ) | |\n |___|____/\\__\\____|____/_|__|\\_\\____/|__|____|_ /\\___ |\\___ \\____/|___| /\n gtihub.com/Knowledge-Wisdom-Understanding \\___\\/ \\__\\/ \\__\\_/ v{V} \\___\\/\n\n\"\"\"\n\n def print_art(msg, color):\n colored_art = colored(msg, color=color)\n print(colored_art)\n\n color = random_color()\n print_art(autoRecon, color)",
"def banner(name):\n print \"#\"\n print \"# {0}\".format(name.encode('utf-8'))\n print \"#\"\n return name",
"def my_banner(bannerString):\n print(len(bannerString) * \"!\")\n print(bannerString)\n print(len(bannerString) * \"!\")",
"def banner(self):\n return self._banner",
"def banner():\n return \" » \".join(\n [\n f\"Robot Framework Kernel [{__version__}]\",\n f\"Robot Framework [{robot.__version__}]\",\n f\"ipykernel [{ipykernel.__version__}]\",\n f\"IPython [{IPython.__version__}]\",\n f\"Python [{sys.version}]\",\n ]\n )",
"def print_header(self):\n print()\n print(\"=\"*25)\n print()\n print(\"Have fun in your blackjack round!\")\n print()\n print(\"=\"*25)",
"def print_banner(filename: str, template: str = DEFAULT_BANNER_TEMPLATE) -> None:\n if not os.path.isfile(filename):\n logger.warning(\"Can't find logo banner at %s\", filename)\n return\n\n with open(filename, \"r\") as f:\n banner = f.read()\n\n formatted_banner = template.format(banner)\n print(formatted_banner)",
"def print_banner(title):\n\n title = \" \" + title + \" \"\n\n nequals = ncolumns - len(title)\n nleft = nequals // 2\n\n print((\"=\" * (nleft + nequals %2)) + title + (\"=\" * nleft))",
"def logo():\n print (\"\"\"\\\n _ _\n| |_ ___ ___ ___ _ _ _| |\n| | . | | -_| | | . |\n|_|_|___|_|_|___|_ |___|\n |___|\n \"\"\")\n print ('Author: Peter Sooky <[email protected]>')\n print ('Honeyd-python {0}'.format(honeyd.__version__))",
"def printlogo():\n print(\"\")\n print(\" ;;;;;;;;;;;;;;;;;;; \")\n print(\" ;;;;;;;;;;;;;;;;;;; \")\n print(\" ; ; \")\n print(\" ; bandaid ; \")\n print(\" ; ; \")\n print(\" ; +-----------+ ; \")\n print(\" ; |by JC 2020 | ; \")\n print(\" ; +-----------+ ; \")\n print(\" ; ; \")\n print(\",;;;;; ,;;;;; \")\n print(\";;;;;; ;;;;;; \")\n print(\"`;;;;' `;;;;' \")\n print(\"\")",
"def print_banner_block(text, width=80, mark=\"-\", end=\"#\"):\n num_spaces_total = width - len(text) - 2\n num_spaces_left = num_spaces_total // 2\n num_spaces_right = num_spaces_total - num_spaces_left\n banner_with_spaces = end + \" \" * num_spaces_left\n banner_with_spaces += text\n banner_with_spaces += \" \" * num_spaces_right + end\n border = end + mark * (width - 2) + end\n print(border)\n print(banner_with_spaces)\n print(border)",
"def bbs_show_banner(tn, short = True):\n lines = cmd.lban(tn, short_banner = short)\n for line in lines:\n print(filter_tags(line))",
"def print_the_header():\n print('-------------------')\n print(' Weather APP')\n print('-------------------')\n print()",
"def print_banner(\n cls,\n agent_label,\n inbound_transports,\n outbound_transports,\n public_did,\n admin_server=None,\n banner_length=40,\n border_character=\":\",\n ):\n print()\n with Banner(border=border_character, length=banner_length) as banner:\n # Title\n banner.title(agent_label or \"ACA\")\n # Inbound transports\n banner.subtitle(\"Inbound Transports\")\n internal_in_transports = [\n f\"{transport.scheme}://{transport.host}:{transport.port}\"\n for transport in inbound_transports.values()\n if not transport.is_external\n ]\n if internal_in_transports:\n banner.list(internal_in_transports)\n external_in_transports = [\n f\"{transport.scheme}://{transport.host}:{transport.port}\"\n for transport in inbound_transports.values()\n if transport.is_external\n ]\n if external_in_transports:\n banner.subtitle(\" External Plugin\")\n banner.list(external_in_transports)\n\n # Outbound transports\n banner.subtitle(\"Outbound Transports\")\n internal_schemes = set().union(\n *(\n transport.schemes\n for transport in outbound_transports.values()\n if not transport.is_external\n )\n )\n if internal_schemes:\n banner.list([f\"{scheme}\" for scheme in sorted(internal_schemes)])\n\n external_schemes = set().union(\n *(\n transport.schemes\n for transport in outbound_transports.values()\n if transport.is_external\n )\n )\n if external_schemes:\n banner.subtitle(\" External Plugin\")\n banner.list([f\"{scheme}\" for scheme in sorted(external_schemes)])\n\n # DID info\n if public_did:\n banner.subtitle(\"Public DID Information\")\n banner.list([f\"DID: {public_did}\"])\n\n # Admin server info\n banner.subtitle(\"Administration API\")\n banner.list(\n [f\"http://{admin_server.host}:{admin_server.port}\"]\n if admin_server\n else [\"not enabled\"]\n )\n\n banner.version(__version__)\n\n print()\n print(\"Listening...\")\n print()",
"def welcomeMessage(self) -> None:\n\n # creating absolute path to the the banner that was passed in the constructor\n runtime_file_path: str = os.path.abspath(__file__)\n runtime_file_folder: str = os.path.dirname(runtime_file_path)\n banner_file_path: str = os.path.join(runtime_file_folder, self.banner)\n\n # printing actual welcome message\n cprint(\"WELCOME TO:\", 'red', attrs=['bold'])\n banner = open(banner_file_path, \"r\")\n for line in banner:\n cprint(line.strip(\"\\n\"), 'yellow', attrs=['bold'])\n names: str = \"By: Casper Haan (1969853haan) & Luc Hundscheid (ik weet je studentnummer niet meer)\"\n print(f\"\\n{names}\\n\")\n cprint(72 * \"~\", 'magenta', attrs=['bold'])\n print()\n self.board.printBoard()",
"def banner(self):\n\t\trundays = 0\n\t\tsqr = self.sqc.cursor()\n\t\tsqr.execute(\"SELECT value FROM sord WHERE name = 'gdays'\")\n\t\tfor value in sqr.fetchall():\n\t\t\trundays = value[0]\n\t\tthismsg = \"\\r\\n\"+self.cntransi(self.ESC+\"32mSaga Of The Red Dragon\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32m\"+self.ESC+\"1m\"+self.config.host)+\"\\r\\n\"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mCompiled June 25, 2009: Version \"+self.ESC+\"1m\"+self.ESC+\"37m\"+self.config.version+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"22m\"+self.ESC+\"32m(c) pre-2009 by Someone Else\\r\\n\\r\\n\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32m\"+self.ESC+\"1m\"+self.ESC+\"37mREGISTERED TO \"+self.ESC+\"0m\"+self.ESC+\"1m\"+self.ESC+\"34m\"+self.config.admin+self.ESC+\"0m\")+\"\\r\\n\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mThe current game has been running for \"+self.ESC+\"1m\"+str(rundays)+self.ESC+\"22m game days.\\r\\n\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mPlayers are deleted after \"+self.ESC+\"1m\"+str(self.config.delinactive)+self.ESC+\"22m real days of inactivity.\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mPlayers are enjoying \"+self.ESC+\"1m\"+str(self.config.ffight)+self.ESC+\"22m forest fights per day.\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mPlayers are enjoying \"+self.ESC+\"1m\"+str(self.config.pfight)+self.ESC+\"22m player fights per day.\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mPlayers are enjoying \"+self.ESC+\"1m\"+str(self.config.bankinterest)+\"%\"+self.ESC+\"22m interest at the bank per day.\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.cntransi(self.ESC+\"32mThe current game day is \"+self.ESC+\"1m\"+str(self.config.daylength)+self.ESC+\"22m real hours long.\\r\\n\"+self.ESC+\"0m\")+\"\\r\\n\"\n\t\tthismsg += self.ESC+\"32m (\"+self.ESC+\"1mE\"+self.ESC+\"22m)nter the realm of the Dragon\"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"32m (\"+self.ESC+\"1mL\"+self.ESC+\"22m)ist Warriors\"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"32m (\"+self.ESC+\"1mI\"+self.ESC+\"22m)nstructions\"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"32m (\"+self.ESC+\"1mQ\"+self.ESC+\"22m)uit the game server\\r\\n\"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"32m Your choice, warrior? [\"+self.ESC+\"1mE\"+self.ESC+\"22m]: \"+self.ESC+\"0m\"+self.ESC+\"0m \"\n\t\tsqr.close()\n\t\treturn thismsg",
"def _print_header():\n print()\n print(\n \" ┌─────────────────────── Measurements in BPM ─────────────────────┐\"\n )\n print(\n \"ID Date Activity Distance Elevation Start Duration 5s 30s 60s 5m 10m 20m 30m 60m 90m 120m\"\n )\n _print_separator()",
"def banner_wrapper(banner_url):\n # so simple\n return '{url}<img src=\"{url}\" alt=\"{alt}\">'.format(\n url=banner_url,\n alt='Banner'\n )",
"def getBanner(outputScan):\n try:\n return str(outputScan.split(\", Banner: \", 1)[1][:12])\n #banner = re.search(r\"[0-9A-F]{12}\",outputScan, re.MULTILINE).group()\n #return str(banner)\n except Exception as e:\n print '\\033[91m'+\"ERROR_BANNER\"\n return \"BANNER_ERROR\"",
"def print_intro(self):\n \n print('Did you know mammals tend to have the shortest migration routes because walking takes more energy than flying or swimming?')",
"def print_cover(outfile: TextIO) -> None:\n outfile.write(\" <div id=\\\"cover_page\\\">\\n\")\n for i in range(1, 29):\n outfile.write(\" <img id=\\\"cover_img\" + str(i) + \"\\\" class=\\\"cover_img\\\" \"\n \"src=\\\"media/cover images/cover\" + str(i) + \".jpg\\\" />\\n\")\n outfile.write(\" <p class=\\\"cover_title\\\">\" + init_data().site_title + \"</p>\\n\")\n outfile.write(\" <p class=\\\"cover_author\\\">\" + init_data().site_author + \"</p>\\n\")\n outfile.write(\" </div>\\n\")\n outfile.write(\"\\n\")",
"def print_banner_line(text, width=80, mark=\"-\", end=\"#\"):\n num_marks_total = width - len(text) - 4\n num_marks_left = num_marks_total // 2\n num_marks_right = num_marks_total - num_marks_left\n banner_with_marks = end + mark * num_marks_left\n banner_with_marks += \" %s \" % text\n banner_with_marks += mark * num_marks_right + end\n print(banner_with_marks)",
"def description() -> str:\n content = \"Demonstrates usage of blackboard namespaces.\\n\"\n content += \"\\n\"\n\n if py_trees.console.has_colours:\n banner_line = console.green + \"*\" * 79 + \"\\n\" + console.reset\n s = banner_line\n s += console.bold_white + \"Blackboard\".center(79) + \"\\n\" + console.reset\n s += banner_line\n s += \"\\n\"\n s += content\n s += \"\\n\"\n s += banner_line\n else:\n s = content\n return s",
"def display(self):\n statement = f\"\"\"\n ------\n By {self.prescribed_by.name.upper()}\n ------\n Patient Detail!\n Name: {self.prescribed_to.name.capitalize()}\n Age: {self.prescribed_to.age}\n Gender: {self.prescribed_to.gender}\n Prescribed Medicines!\"\"\"\n print(statement)\n self.display_cure()",
"def handle(self):\n if len(self.banner) <= 75:\n self.respond(\"220 %s\" %str(self.banner))\n else:\n self.push('220-%s\\r\\n' %str(self.banner))\n self.respond('220 ')",
"def get_banner(conn) -> str:\n banner_data = conn.recv(1024)\n banner = banner_data.decode().strip()\n print('Banner: {}'.format(banner))\n return banner",
"def show_landing(self):\n print(\"Hooray, the Eagle has landed!\")",
"def display(self,message):\r\n \r\n print(message)",
"def drawLogo(self):\n print(\"__________ __________ \")\n print(\"\\______ \\_____ _______ ____\\______ \\ ____ ____ ____ ______\")\n print(\" | | _/\\__ \\\\_ __ \\_/ __ \\| | _// _ \\ / \\_/ __ \\ / ___/\")\n print(\" | | \\ / __ \\| | \\/\\ ___/| | ( <_> ) | \\ ___/ \\___ \\ \")\n print(\" |______ /(____ /__| \\___ >______ /\\____/|___| /\\___ >____ >\")\n print(\" \\/ \\/ \\/ \\/ \\/ \\/ \\/ \")",
"def print_game_logo():\n\n HANGMAN_ASCII_ART = r\"\"\"\n _ _\n | | | |\n | |__| | __ _ _ __ __ _ _ __ ___ __ _ _ __\n | __ |/ _` | '_ \\ / _` | '_ ` _ \\ / _` | '_ \\\n | | | | (_| | | | | (_| | | | | | | (_| | | | |\n |_| |_|\\__,_|_| |_|\\__, |_| |_| |_|\\__,_|_| |_|\n __/ |\n |___/\n\"\"\"\n \n clear_player_screen()\n print_centered(HANGMAN_ASCII_ART)\n\n return None",
"def printHeader(self):\n\t\tkarmaDetails = \"\"\n\t\tif self.hnUserName != \"\":\n\t\t\tkarmaDetails = \" | \" + self.hnUserName + \" (\" + str(self.karma) + \")\"\n\t\n\t\tfor i in range(0,60):\n\t\t\tprint \"\"\n\t\tprint \"Showing \" + self.newestOrTop + \" stories. | Last updated \" + self.getLastRefreshedTime() + karmaDetails\n\t\tprint \"\"",
"def description() -> str:\n content = \"Demonstrates usage of blackbord remappings.\\n\"\n content += \"\\n\"\n content += \"Demonstration is via an exemplar behaviour making use of remappings..\\n\"\n\n if py_trees.console.has_colours:\n banner_line = console.green + \"*\" * 79 + \"\\n\" + console.reset\n s = banner_line\n s += console.bold_white + \"Blackboard\".center(79) + \"\\n\" + console.reset\n s += banner_line\n s += \"\\n\"\n s += content\n s += \"\\n\"\n s += banner_line\n else:\n s = content\n return s",
"def start_print(outfile: TextIO) -> None:\n outfile.write(\"<!DOCTYPE HTML>\\n\")\n outfile.write(\"<html lang=\\\"en\\\">\\n\")\n outfile.write(\" <head>\\n\")\n outfile.write(\" <meta charset=\\\"utf-8\\\" />\\n\")\n outfile.write(\" <title>Fiddler Crabs</title>\\n\")\n outfile.write(\" <link rel=\\\"stylesheet\\\" href=\\\"resources/uca_style.css\\\" />\\n\")\n outfile.write(\" <link rel=\\\"stylesheet\\\" href=\\\"resources/print.css\\\" />\\n\")\n outfile.write(\" <link rel=\\\"stylesheet\\\" href=\\\"resources/font-awesome/css/fontawesome.min.css\\\" />\\n\")\n outfile.write(\" <link rel=\\\"stylesheet\\\" href=\\\"resources/font-awesome/css/solid.min.css\\\" />\\n\")\n outfile.write(\" <link rel=\\\"stylesheet\\\" href=\\\"resources/font-awesome/css/brands.min.css\\\" />\\n\")\n outfile.write(\" <link rel=\\\"stylesheet\\\" href=\\\"resources/font-awesome/css/regular.min.css\\\" />\\n\")\n outfile.write(\" <link rel=\\\"stylesheet\\\" href=\\\"resources/flag-icon-css/css/flag-icons.min.css\\\" />\\n\")\n outfile.write(\" </head>\\n\")\n outfile.write(\"\\n\")\n outfile.write(\" <body>\\n\")",
"def display_message():\n\tprint(\"Learnt to write functions, which are named blocks of code that are designed to do one specific job.\")",
"def banner(comment_mark: str) -> str:\n result = ('{} This is generated code. If it\\'s broken, then you'\n ' should\\n').format(comment_mark)\n result += ('{} fix the generation script, not this file.\\n'\n ).format(comment_mark)\n result += '\\n\\n'\n return result",
"def display_message():",
"def display_starting_message(): # opening message\n starting_message = \"Is your cat plotting to kill you?? \\nLet's find out. \\n(Please note that this is merely a pythonic presentation of an app created by The Oatmeal. \\nI do not claim credit for its brilliance. I'm just trying to learn Python.)\"\n print(starting_message)",
"def welcome():\n print(\"\"\"\n\n-----------------------------------\n Welcome to the Tip Calculator \n-----------------------------------\n\"\"\")",
"def __print_logo(self, left=5, top=2, bottom=2):\n\n print('\\n' * top, end=\"\")\n\n for line in self.__logo:\n print((\" \" * left) + line, end=\"\")\n\n print('\\n' * bottom, end=\"\")",
"def get_banner(self,context,request):\n ba = queryMultiAdapter((context,request), interfaces.IBanner)\n if not ba:\n return ''\n return ba()",
"def display_message():\n\tprint(\"In this chapter we will be learning how to write functions\")",
"def print_header():\n print(\"STEM Center Temperature Project\")\n print(\"Shaotong Wen\")",
"def render_banner(self, width=300, height=85):\n img_path = IMG_PATH + os.sep + CARD_BANNER\n banner_img = Image.open(img_path)\n banner_img = banner_img.resize((width, height))\n return banner_img",
"def about(display=True):\n\n ABOUT_TEXT = \"\"\"\nPre-release version %s (%s) of Topographica; an updated\nversion may be available from topographica.org.\n\nThis program is free, open-source software available under the BSD\nlicense (http://www.opensource.org/licenses/bsd-license.php).\n\"\"\"%(release,version)\n if display:\n print ABOUT_TEXT\n else:\n return ABOUT_TEXT",
"def test_display_banner(self):\n old_trial_start = timezone.now() - datetime.timedelta(days=constants.TRIAL_DAYS)\n account = AccountFactory(\n status=Account.AccountStatus.TRIALING, user__date_joined=old_trial_start\n )\n request = self.rf.get(\"/\")\n request.account = account\n context = {\"request\": request}\n\n context = accounts_tags.trial_banner(context)\n\n assert context[\"display_banner\"]",
"def print_welcome():\n print(\"Welcome to Langton's ant simulator! Choose option: \")\n print(\"1 -> Create white blank picture\")\n print(\"2 -> Load file\")\n print(\"3 -> Generate picture with given probability\")",
"def print_header():\n\n print(\"\"\"\n _____ _ ____ _____ ____ ____ _____ ____ _____\n /__ __\\/ \\/ _\\ /__ __\\/ _ \\/ _\\ /__ __\\/ _ \\/ __/ 1 | 2 | 3\n / \\ | || / _____ / \\ | / \\|| / _____ / \\ | / \\|| \\ 4 | 5 | 6\n | | | || \\_\\____\\| | | |-||| \\_\\____\\| | | \\_/|| /_ 7 | 8 | 9\n \\_/ \\_/\\____/ \\_/ \\_/ \\|\\____/ \\_/ \\____/\\____|\n\n To play Tic-Tac-Toe, you need to get three in a row...\n Your choices are defined, they must be from 1 to 9...\n \"\"\")",
"def p_banner():\n return random.choice([banner, banner_two, banner_three, banner_four, banner_five])",
"def print_overview_slide():\r\n print '<div id=\"overview\" class=\"step\" ' \\\r\n ' data-x=\"3000\" data-y=\"1500\" data-scale=\"10\">'\r\n print '</div>'",
"def splash_screen():\n print(Fore.YELLOW + Style.BRIGHT + \"\\n\" + ProjInfo.LOGO + Style.RESET_ALL)\n print_version_info(False)",
"def Dragon_Blade(self):\t\t\n\t\tprint(self.name.Title() + \" Dragon blade!\")",
"def __display_login_info(self):\n print(f'\\nYour card has been created\\n'\n f'Your card number:\\n'\n # f'{self.__card_display()}\\n' # uncomment this line and comment out line below for pretty display\n f'{self.card_number}\\n'\n f'Your card PIN:\\n'\n f'{self.__account_pin}\\n', )",
"async def gen_banner(self, member):\n base = deepcopy(self.images[randint(0, len(self.images) - 1)])\n\n # Draw the username\n idraw = ImageDraw.Draw(base)\n idraw.text(self.banner_cfg[\"TextPos\"], member.name, fill=tuple(self.banner_cfg[\"Text_Color\"]), font=self.font)\n \n\n # Get user avatar\n avatar_url = member.avatar_url\n if(avatar_url==None):\n avatar_url = member.default_avatar_url\n # Wow, we can really just load it asynchronously from the API now? That's dope\n avatar = await avatar_url.read()\n # We need to save it as a file in memory to get the size so we can load it as an image.\n with io.BytesIO() as fb:\n fb.write(avatar)\n fb.seek(0, 0)\n avatar = Image.open(fb)\n avatar = avatar.resize(self.banner_cfg[\"AvatarSize\"])\n if (self.banner_cfg[\"Rounded\"][\"is_rounded\"]):\n avatar = self.round_corners(avatar, self.banner_cfg[\"Rounded\"][\"px\"])\n # Now that we have our avatar, we can slap it into our banner.\n final = Image.new(\"RGBA\", base.size)\n final.paste(avatar, self.banner_cfg[\"AvatarPos\"])\n if(self.banner_cfg[\"AvatarLayer\"]==\"front\"):\n final = Image.alpha_composite(base, final)\n if(self.banner_cfg[\"AvatarLayer\"]==\"back\"):\n final = Image.alpha_composite(final, base)\n \n # Lastly, let's package it as a file to be uploaded.\n with io.BytesIO() as fb:\n final.save(fb, format=\"png\")\n fb.seek(0, 0)\n \n return discord.File(fb, filename=\"Welcome.png\")",
"def create_banner_list():\n template_vars = {\n 'title' : 'Banners - ' + sitesettings.SITE_NAME,\n 'siteurl' : sitesettings.SITE_URL,\n 'sitename' : sitesettings.SITE_NAME,\n 'meta_desc' : 'List of step-up banners in Final Fantasy Brave Exvius (FFBE)',\n 'last_four_banners' : nav.get_last_four_banners('all'),\n 'all_banner_info' : get_all_banner_info(),\n }\n\n bn_path = os.path.join(sitesettings.LOCAL_FILE_PATH, 'banner')\n\n if not os.path.exists(bn_path):\n os.makedirs(bn_path)\n\n template_file = 'bannerlist.html'\n html_file_loc = os.path.join(bn_path, 'index.html')\n generatehtml.generate_html(\n html_file_loc, template_file, template_vars, os.path.join(os.getcwd(), 'templates'))",
"def header(self):\n\t\tthismsg = \"\\r\\n\"+self.ESC + \"0m \" +self.A220 + self.A220 + self.A220 + self.A220 + self.A220 +\" \" + self.ESC + \"0m\\r\\n\"\n\t\tthismsg += \" \"+self.A220+self.A220+self.A219+self.A219+self.A219+self.A219+self.ESC+\"1;47m\"+self.A176+self.A177+self.A178+self.ESC+\"40m\"+self.A219+self.A219+self.A220+self.A220+self.ESC+\"0m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"1m \"+self.ESC+\"31mS\"+self.ESC+\"0;31mAGA\"+self.ESC+\"37m \"+self.A219+self.A219+self.ESC+\"30;47mo\"+self.ESC+\"37;40m\"+self.A219+self.ESC+\"1;47m\"+self.A176+self.A176+self.A177+self.A177+self.A178+self.ESC+\"40m\"+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.ESC+\"0m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"1m \"+self.ESC+\"31mO\"+self.ESC+\"0;31mF THE\"+self.ESC+\"37m \"+self.A219+self.A219+self.ESC+\"30;47mO\"+self.ESC+\"37;40m\"+self.A219+self.A219+self.ESC+\"1;47m\"+self.A176+self.A177+self.A177+self.A178+self.ESC+\"40m\"+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.ESC+\"0m \"+self.A220+self.A220+self.A220+self.ESC+\"1m\"+self.A220+self.A220+self.ESC+\"0m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"1m \"+self.ESC+\"31mR\"+self.ESC+\"0;31mED\"+self.ESC+\"37m \"+self.A219+self.ESC+\"30;47mo\"+self.ESC+\"37;40m\"+self.A219+self.A219+self.ESC+\"1;47m\"+self.A176+self.A176+self.A177+self.A178+self.ESC+\"40m\"+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.ESC+\"0m \"+self.A223+self.A219+self.ESC+\"1;47m\"+self.A176+self.A219+self.A219+self.A219+self.ESC+\"40m\"+self.A220+self.A220+\" \"+self.ESC+\"0;31m\"+self.A220+self.ESC+\"37m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"31m \"+self.ESC+\"1mD\"+self.ESC+\"0;31mRAGON 0.9.9\"+self.ESC+\"37m \"+self.A223+self.A219+self.A219+self.ESC+\"1;47m\"+self.A176+self.A177+self.A177+self.A178+self.A219+self.ESC+\"40m\"+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.A223+self.ESC+\"0m \"+self.A219+self.ESC+\"1;47m\"+self.A219+self.A219+self.ESC+\"40m\"+self.A223+self.A223+self.ESC+\"0;31m\"+self.A220+self.ESC+\"1;41m\"+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+self.ESC+\"37m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"31m concept\"+self.ESC+\"37m \"+self.A223+self.ESC+\"1;47m\"+self.A176+self.A177+self.A177+self.A178+self.A178+self.ESC+\"40m\"+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.A219+self.A223+self.ESC+\"0m \"+self.ESC+\"1m\"+self.A220+self.ESC+\"0m \"+self.ESC+\"1m\"+self.A220+self.ESC+\"0m \"+self.ESC+\"31m\"+self.A220+self.A220+self.ESC+\"1;41m\"+self.A176+self.A178+\" \"+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+self.A220+self.ESC+\"37m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"31m Seth Robinson \"+self.ESC+\"37m\"+self.A222+\" \"+self.A223+self.A223+self.ESC+\"1;47m\"+self.A178+self.ESC+\"40m\"+self.A219+self.A219+self.A219+self.A219+self.A223+self.A223+self.ESC+\"0m \"+self.A220+self.ESC+\"1;47m\"+self.A220+self.ESC+\"40m\"+self.A223+\" \"+self.ESC+\"0m\"+self.A220+self.A220+self.ESC+\"1;47m\"+self.A220+self.ESC+\"40m\"+self.A223+self.A223+\" \"+self.ESC+\"0m\"+self.A223+self.A219+self.A220+self.ESC+\"1m\"+self.A220+\" \"+self.ESC+\"0;31m\"+self.A223+self.ESC+\"1;41m\"+self.A176+self.A177+self.A178+\" \"+self.ESC+\"0;31m\"+self.A223+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+self.ESC+\"37m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"31m by\"+self.ESC+\"0m \"+self.A219+\" \"+self.A220+self.ESC+\"1;47m\"+self.A220+self.ESC+\"40m\"+self.A223+\" \"+self.ESC+\"0m\"+self.A220+self.ESC+\"1;47m\"+self.A220+self.A220+self.A219+self.ESC+\"40m\"+self.A223+self.ESC+\"0m \"+self.A223+self.ESC+\"1;47m\"+self.A176+self.A219+self.ESC+\"40m\"+self.A220+\" \"+self.ESC+\"0;31m\"+self.A223+self.ESC+\"1;41m\"+self.A177+self.A178+\" \"+self.ESC+\"0;37;40m \"+self.ESC+\"1;31;41m\"+self.A176+\" \"+self.A176+\" \"+self.A176+self.ESC+\"0;31m\"+self.A220+self.ESC+\"37m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"1;34m J\"+self.ESC+\"0;34m.\"+self.ESC+\"1mT\"+self.ESC+\"0;34m.\"+self.ESC+\"1mS\"+self.ESC+\"0;34mage\"+self.ESC+\"0m \"+self.A219+self.ESC+\"1;30;47m\"+self.A176+self.ESC+\"0;37;40m\"+self.A221+\" \"+self.A220+self.ESC+\"1;47m\"+self.A177+self.A176+self.ESC+\"40m\"+self.A223+\" \"+self.ESC+\"0m\"+self.A220+self.ESC+\"1;47m\"+self.A220+self.A220+self.A219+self.ESC+\"40m\"+self.A223+self.ESC+\"0m \"+self.A223+self.ESC+\"1;47m\"+self.A177+self.A219+self.A219+self.ESC+\"40m\"+self.A220+\" \"+self.ESC+\"0;31m\"+self.A223+self.ESC+\"1;41m\"+self.A177+self.A176+self.A178+\" \"+self.ESC+\"0;31m\"+self.A220+self.A223+self.ESC+\"1;41m\"+self.A176+self.A178+self.A176+self.A176+self.A177+self.A177+self.ESC+\"0;37;40m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += \" \"+self.A219+self.A219+\" \"+self.ESC+\"1;47m\"+self.A176+self.A177+self.A219+self.ESC+\"40m\"+self.A223+\" \"+self.ESC+\"0m\"+self.A223+self.ESC+\"1m\"+self.A223+self.ESC+\"41m\"+self.A223+self.ESC+\"0;31m\"+self.A220+self.A220+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A178+self.A177+self.A176+\" \"+self.A220+self.A220+self.A220+self.A220+self.A223+self.A220+self.ESC+\"1;41m\"+self.A176+self.ESC+\"0;31m\"+self.A220+self.ESC+\"1;41m\"+self.A177+self.ESC+\"0;31m\"+self.A220+self.ESC+\"41m \"+self.ESC+\"37;40m \"+self.ESC+\"31m\"+self.A223+self.ESC+\"1;41m\"+self.A177+self.A178+\" \"+self.A219+self.ESC+\"0;37;40m \"+self.ESC+\"1;31;41m\"+self.A178+self.A178+self.A176+self.A177+self.A177+self.A219+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.A219+self.ESC+\"1;30;47m\"+self.A176+self.ESC+\"0;37;40m \"+self.A219+self.A223+self.ESC+\"1m\"+self.A223+self.ESC+\"0;31m\"+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+self.A223+self.A223+\" \"+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+self.A223+self.A223+\" \"+self.A176+self.A176+\" \"+self.ESC+\"1;41m\"+self.A176+\" \"+self.A178+self.A178+self.A219+self.ESC+\"0;31m\"+self.A220+\" \"+self.A223+self.ESC+\"1;41m\"+self.A177+self.A176+\" \"+self.ESC+\"0;37;40m \"+self.ESC+\"1;31;41m\"+self.A176+self.A178+self.A176+self.A177+self.A177+self.A219+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.A219+self.ESC+\"1;30;47m\"+self.A176+self.ESC+\"0;37;40m \"+self.A219+\" \"+self.ESC+\"31m\"+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+\" \"+self.A220+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A178+self.A177+self.A176+self.ESC+\"37m \"+self.ESC+\"31m\"+self.A178+self.A177+self.A177+self.A223+\" \"+self.ESC+\"1;41m\"+self.A176+\" \"+self.A177+self.A178+self.A219+\" \"+self.ESC+\"0;31m\"+self.A220+\" \"+self.A223+self.ESC+\"1;41m\"+self.A177+self.A219+self.ESC+\"0;31m\"+self.A220+self.A223+self.ESC+\"1;41m\"+self.A178+\" \"+self.A177+self.A178+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"1;30;47m\"+self.A176+self.ESC+\"0;37;40m\"+self.A219+\" \"+self.A219+\" \"+self.ESC+\"31m\"+self.A220+self.ESC+\"41m \"+self.ESC+\"37;40m \"+self.ESC+\"31m\"+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A178+self.A177+self.A176+\" \"+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+self.A223+self.ESC+\"37m \"+self.ESC+\"1;31;41m\"+self.A176+\" \"+self.A178+self.A177+self.A219+self.A219+self.ESC+\"0;37;40m \"+self.ESC+\"1;31;41m\"+self.A176+self.A177+self.A178+self.ESC+\"0;37;40m \"+self.ESC+\"31m\"+self.A220+self.A220+self.A223+self.ESC+\"1;41m\"+self.A178+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"1;30;47m\"+self.A176+self.ESC+\"0;37;40m\"+self.A219+self.ESC+\"1;30;47m\"+self.A176+self.ESC+\"0;37;40m \"+self.ESC+\"31m\"+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A219+self.A178+self.A178+self.A177+self.A176+self.A223+\" \"+self.A220+self.A220+\" \"+self.A223+self.ESC+\"37m \"+self.ESC+\"1;31;41m\"+self.A176+\" \"+self.A177+self.A178+self.A219+self.ESC+\"0;37;40m \"+self.ESC+\"1;31;41m\"+self.A177+self.A178+self.A219+self.ESC+\"0;37;40m \"+self.ESC+\"1;31;41m\"+self.A176+self.A178+self.A178+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.A219+self.A219+self.A219+\" \"+self.ESC+\"31m\"+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A221+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A178+self.A223+self.A223+self.A223+\" \"+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+self.A223+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+\" \"+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A178+self.A178+self.A177+self.A223+\" \"+self.ESC+\"1;41m\"+self.A176+\" \"+self.A177+self.A219+self.A219+self.ESC+\"0;37;40m \"+self.ESC+\"1;31;41m\"+self.A176+self.A177+\" \"+self.ESC+\"0;31m\"+self.A220+self.ESC+\"1;41m\"+self.A176+self.A178+self.A178+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.A222+self.ESC+\"1;30;47m\"+self.A176+self.A177+self.ESC+\"0;37;40m\"+self.A221+\" \"+self.ESC+\"31m\"+self.A220+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+\" \"+self.ESC+\"0;31m\"+self.A223+\" \"+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+self.A220+\" \"+self.A223+self.A223+self.A220+\" \"+self.A223+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A178+self.A178+self.A178+self.ESC+\"37m \"+self.ESC+\"1;31;41m\"+self.A176+\" \"+self.A178+self.A219+self.ESC+\"0;37;40m \"+self.ESC+\"31m\"+self.A223+self.ESC+\"1;41m\"+self.A176+self.A177+self.A178+self.ESC+\"0;37;40m \"+self.ESC+\"1;31;41m\"+self.A176+self.A178+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.A222+self.ESC+\"1;30;47m\"+self.A176+self.A177+self.ESC+\"0;37;40m\"+self.A219+\" \"+self.ESC+\"31m\"+self.A220+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.ESC+\"0;31m\"+self.A223+self.A223+\" \"+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+self.A222+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+self.A220+\" \"+self.A223+self.A178+self.ESC+\"37m \"+self.ESC+\"1;31;41m\"+self.A176+self.A177+self.A178+self.A219+self.ESC+\"0;37;40m \"+self.ESC+\"1;31;41m\"+self.A176+self.A177+\" \"+self.A178+self.ESC+\"0;37;40m \"+self.ESC+\"1;31;41m\"+self.A176+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += \" \"+self.A219+self.ESC+\"1;30;47m\"+self.A177+self.ESC+\"0;37;40m\"+self.A219+\" \"+self.ESC+\"31m\"+self.A222+self.A219+self.ESC+\"1;41m\"+self.A176+self.A176+self.ESC+\"0;31m\"+self.A221+\" \"+self.ESC+\"1;5;32m\"+self.A220+self.A220+self.A223+\" \"+self.ESC+\"0;31;41m \"+self.ESC+\"40m\"+self.A178+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+self.A223+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+self.A222+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+\" \"+self.A220+self.A220+self.A220+self.A220+self.A223+self.ESC+\"1;41m\"+self.A176+self.A177+self.A178+self.ESC+\"0;31m\"+self.A220+\" \"+self.ESC+\"1;41m\"+self.A176+self.A177+\" \"+self.ESC+\"0;37;40m \"+self.ESC+\"1;31;41m\"+self.A176+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.A219+self.ESC+\"1;30;47m\"+self.A176+self.ESC+\"0;37;40m\"+self.A219+\" \"+self.ESC+\"31m\"+self.A222+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+\" \"+self.ESC+\"0;37;40m \"+self.ESC+\"1;5;32m\"+self.A219+self.A219+self.A223+\" \"+self.ESC+\"0;31m\"+self.A222+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A178+self.A220+self.A222+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+self.A222+self.A219+self.ESC+\"41m \"+self.ESC+\"37;40m \"+self.ESC+\"31;41m \"+self.ESC+\"40m\"+self.A178+self.A223+\" \"+self.A223+self.ESC+\"1;41m\"+self.A176+self.A177+self.A219+self.ESC+\"0;37;40m \"+self.ESC+\"1;31;41m\"+self.A176+self.A177+self.A219+self.A178+self.ESC+\"0;37;40m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += \" \"+self.A222+self.ESC+\"1;30;47m\"+self.A176+self.A177+self.ESC+\"0;37;40m\"+self.A219+\" \"+self.ESC+\"31m\"+self.A222+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A178+self.A176+self.ESC+\"0;37;40m \"+self.ESC+\"1;5;32m\"+self.A223+\" \"+self.ESC+\"0;31m\"+self.A220+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A178+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+\" \"+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+self.A222+self.A219+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+\" \"+self.A223+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A178+self.A223+\" \"+self.A223+self.ESC+\"1;41m\"+self.A176+self.A219+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.A177+self.A219+self.A178+self.ESC+\"37;40m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += \" \"+self.A219+self.ESC+\"1;30;47m\"+self.A176+self.ESC+\"0;37;40m\"+self.A221+\" \"+self.ESC+\"31m\"+self.A222+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A177+self.A176+self.ESC+\"0;31m\"+self.A221+self.A220+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.ESC+\"0;31m\"+self.A223+self.A223+\" \"+self.A222+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+self.A222+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.A176+self.ESC+\"0;31m\"+self.A220+self.A220+self.ESC+\"37m \"+self.ESC+\"1;31;41m\"+self.A176+self.A177+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.A176+self.A177+self.A219+self.A178+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.A219+self.ESC+\"1;30;47m\"+self.A177+self.ESC+\"0;37;40m \"+self.ESC+\"31m\"+self.A220+self.A220+self.A223+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A176+self.A222+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+self.ESC+\"37m \"+self.ESC+\"31m\"+self.A220+self.ESC+\"1;41m\"+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+self.A222+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A177+self.A177+self.A176+\" \"+self.ESC+\"0;37;40m \"+self.ESC+\"31m\"+self.A220+\" \"+self.ESC+\"1;41m\"+self.A176+self.A219+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.A177+\" \"+self.A219+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.A219+self.A221+\" \"+self.ESC+\"31;41m \"+self.ESC+\"37;40m \"+self.ESC+\"31;41m \"+self.ESC+\"40m\"+self.A177+self.A177+self.A176+self.A178+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.ESC+\"0;37;40m \"+self.ESC+\"31m\"+self.A220+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A177+self.A176+self.A176+\" \"+self.ESC+\"0;31m\"+self.A222+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A178+self.A177+self.A176+self.ESC+\"0;31m\"+self.A220+\" \"+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A178+self.A220+self.A220+\" \"+self.ESC+\"1;41m\"+self.A178+self.ESC+\"0;31m\"+self.A220+\" \"+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A177+self.A219+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"1m\"+self.A220+\" \"+self.ESC+\"0m\"+self.A223+\" \"+self.ESC+\"31;41m \"+self.ESC+\"40m\"+self.A223+\" \"+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A219+self.A178+self.A178+self.A177+self.A176+self.A176+self.A177+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.A178+self.A177+self.A177+self.A176+\" \"+self.ESC+\"40m\"+self.A220+self.A222+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A178+self.A177+self.A176+self.ESC+\"0;37;40m \"+self.ESC+\"31m\"+self.A223+self.A178+\" \"+self.ESC+\"1;41m\"+self.A177+self.A219+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.A176+self.A176+self.A178+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.A223+self.ESC+\"1m\"+self.A219+self.A220+\" \"+self.ESC+\"0;31;41m \"+self.A177+self.A178+self.A176+self.A176+\" \"+self.ESC+\"40m\"+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A219+self.A176+self.A178+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.ESC+\"0;37;40m \"+self.ESC+\"31m\"+self.A222+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A178+self.A177+self.A176+\" \"+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.A219+self.A178+self.A177+self.A176+self.ESC+\"37;40m \"+self.ESC+\"31m\"+self.A223+\" \"+self.ESC+\"1;41m\"+self.A176+self.ESC+\"0;31m\"+self.A223+\" \"+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.A177+self.A219+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += \" \"+self.A223+self.ESC+\"1;47m\"+self.A223+self.ESC+\"40m\"+self.A219+self.A223+self.ESC+\"0;31m\"+self.A220+self.ESC+\"1;41m\"+self.A178+self.A176+self.A176+\" \"+self.ESC+\"0;31m\"+self.A223+\" \"+self.ESC+\"1;37;47m\"+self.A222+self.ESC+\"40m\"+self.A221+self.A223+self.A220+\" \"+self.ESC+\"0;31m\"+self.A177+\" \"+self.A223+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A221+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.ESC+\"0;31m\"+self.A223+\" \"+self.A220+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A178+self.A177+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+self.A223+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A177+self.A176+\" \"+self.ESC+\"0;31m\"+self.A223+\" \"+self.ESC+\"1;41m\"+self.A176+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.A176+\" \"+self.A177+self.A219+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.A223+self.ESC+\"31m\"+self.A222+self.ESC+\"1;41m\"+self.A177+self.A176+\" \"+self.ESC+\"0;31m\"+self.A223+\" \"+self.ESC+\"1;37;47m\"+self.A222+self.ESC+\"40m\"+self.A221+\" \"+self.ESC+\"47m\"+self.A222+self.ESC+\"40m\"+self.A221+\" \"+self.ESC+\"0;31m\"+self.A178+self.A177+\" \"+self.A223+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.ESC+\"0;37;40m \"+self.ESC+\"33m\"+self.A220+\" \"+self.ESC+\"31m\"+self.A223+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A177+self.A176+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+self.A223+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A178+self.A177+self.A176+\" \"+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.ESC+\"40m\"+self.A220+self.A223+\" \"+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.A176+\" \"+self.A178+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"31;41m \"+self.A176+self.A176+self.ESC+\"37;40m \"+self.A220+self.ESC+\"1m\"+self.A219+self.ESC+\"0m\"+self.A223+self.ESC+\"1m\"+self.A219+self.A221+\" \"+self.A223+\" \"+self.A223+self.A220+\" \"+self.ESC+\"0;31;41m \"+self.ESC+\"40m\"+self.A223+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+\" \"+self.ESC+\"33m\"+self.A219+self.A219+\" \"+self.ESC+\"31m\"+self.A223+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A177+self.A176+\" \"+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.A178+self.A177+self.A176+self.ESC+\"40m\"+self.A220+\" \"+self.ESC+\"1;41m\"+self.A176+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.A176+self.A177+\" \"+self.A219+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += \" \"+self.A220+self.ESC+\"1m\"+self.A219+\" \"+self.A219+self.A221+\" \"+self.A223+\" \"+self.A220+\" \"+self.A223+self.A220+\" \"+self.ESC+\"0;31m\"+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+\" \"+self.ESC+\"1;33;43m\"+self.A177+self.A176+self.ESC+\"0;33m\"+self.A219+self.A219+\" \"+self.ESC+\"31m\"+self.A223+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A177+self.A176+\" \"+self.ESC+\"0;31m\"+self.A219+self.A220+self.A222+self.ESC+\"1;41m\"+self.A219+self.A177+self.A176+self.ESC+\"0;31m\"+self.A220+self.ESC+\"1;41m\"+self.A176+self.ESC+\"0;37;40m \"+self.ESC+\"31m\"+self.A220+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+\" \"+self.A178+self.A219+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += \" \"+self.A223+self.ESC+\"1m\"+self.A220+\" \"+self.A223+\" \"+self.A220+self.A223+self.A220+\" \"+self.A223+\" \"+self.A223+self.ESC+\"0;31m\"+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+self.A223+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+\" \"+self.ESC+\"1;33m\"+self.A223+self.ESC+\"43m\"+self.A219+self.A219+self.ESC+\"40m\"+self.A223+self.ESC+\"0;33m\"+self.A220+self.A219+\" \"+self.ESC+\"31;41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A177+self.A176+\" \"+self.ESC+\"0;31m\"+self.A222+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A177+self.A177+self.A176+self.ESC+\"0;31m\"+self.A223+\" \"+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.A177+self.A178+\" \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"1m\"+self.A220+\" \"+self.ESC+\"0;31m\"+self.A220+self.A220+self.A220+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A223+self.ESC+\"37m \"+self.ESC+\"1;33;43m\"+self.A177+self.A176+self.ESC+\"0;33m\"+self.A219+self.A219+self.A220+\" \"+self.ESC+\"31;41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A177+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+self.A222+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.A176+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.A176+self.A177+self.A178+\" \"+self.A219+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"30mÙ\"+self.ESC+\"31m\"+self.A222+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A177+self.A177+self.A176+self.A176+\" \"+self.ESC+\"0;31m\"+self.A223+self.A223+self.ESC+\"37m \"+self.ESC+\"1;33m\"+self.A223+self.ESC+\"43m\"+self.A219+self.ESC+\"0;33m\"+self.A223+self.A223+self.A223+self.A220+\" \"+self.ESC+\"31;41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A177+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+self.A222+self.A223+\" \"+self.A220+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A177+self.A176+\" \"+self.A219+\" \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"31m\"+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.ESC+\"37m \"+self.ESC+\"1;33m\"+self.A220+self.A220+self.ESC+\"43m\"+self.A176+self.ESC+\"0;33m\"+self.A219+self.A219+\" \"+self.ESC+\"31;41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A177+self.A176+\" \"+self.ESC+\"0;37;40m \"+self.ESC+\"31m\"+self.A220+self.ESC+\"1;41m\"+self.A176+self.A177+self.A178+self.A176+self.A176+\" \"+self.A219+self.ESC+\"0;37;40m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += \" \"+self.ESC+\"1;33;43m\"+self.A219+self.A178+self.A177+self.A176+self.ESC+\"0;33m\"+self.A219+\" \"+self.ESC+\"31m\"+self.A223+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A177+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+\" \"+self.A223+self.ESC+\"1;41m\"+self.A176+self.A177+self.A177+\" \"+self.ESC+\"0;31m\"+self.A223+self.ESC+\"37m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += \" \"+self.ESC+\"1;33;43m\"+self.A219+self.A178+self.ESC+\"40m\"+self.A223+self.ESC+\"0;33m\"+self.A223+self.A223+self.A220+self.A220+\" \"+self.ESC+\"31;41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A177+self.A177+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+\" \"+self.A223+self.ESC+\"1;41m\"+self.A177+self.A177+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"1;33m\"+self.A220+self.A220+self.ESC+\"43m\"+self.A219+self.A178+self.A177+self.A176+self.ESC+\"0;33m\"+self.A219+self.A220+\" \"+self.ESC+\"31;41m \"+self.ESC+\"1m\"+self.A219+self.A178+self.A177+self.A177+self.A176+\" \"+self.ESC+\"0;37;40m \"+self.ESC+\"31m\"+self.A223+self.ESC+\"41m \"+self.ESC+\"37;40m \"+self.ESC+\"31;41m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"1;33m\"+self.A220+self.ESC+\"43m\"+self.A219+self.ESC+\"40m\"+self.A223+self.A223+self.A223+self.ESC+\"0;33m\"+self.A223+self.A220+self.A220+self.A220+\" \"+self.ESC+\"31;41m \"+self.ESC+\"1m\"+self.A178+self.A177+self.A176+self.ESC+\"0;31m\"+self.A223+self.A223+self.A223+self.A223+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+\" \"+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+\" \"+self.A223+self.ESC+\"41m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"1;33m\"+self.A220+self.A220+self.ESC+\"43m\"+self.A219+self.A219+self.A178+self.A177+self.A176+self.ESC+\"0;33m\"+self.A219+self.A219+\" \"+self.ESC+\"31m\"+self.A223+self.A223+\" \"+self.A220+self.A220+self.A220+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+\" \"+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+\" \"+self.ESC+\"41m \"+self.ESC+\"40m\"+self.A220+self.A220+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"1;33;43m\"+self.A219+self.A219+self.ESC+\"40m\"+self.A223+self.A223+self.A223+self.ESC+\"0;33m\"+self.A223+self.A223+self.ESC+\"31m\"+self.A220+self.A220+self.ESC+\"41m \"+self.ESC+\"1m\"+self.A176+self.A178+self.A177+\" \"+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+\" \"+self.ESC+\"41m \"+self.ESC+\"37;40m \"+self.ESC+\"31m\"+self.A223+self.ESC+\"41m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"1;33m\"+self.A220+self.ESC+\"43m\"+self.A178+self.ESC+\"0;33m\"+self.A220+self.A219+self.A223+\" \"+self.ESC+\"31;41m \"+self.ESC+\"1m\"+self.A176+self.A178+self.A177+self.A177+self.A176+self.A176+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+\" \"+self.A223+self.ESC+\"41m \"+self.ESC+\"37;40m \"+self.ESC+\"31;41m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"1;33;43m\"+self.A178+self.A177+self.A176+self.ESC+\"0;33m\"+self.A223+self.A223+\" \"+self.ESC+\"31;41m \"+self.ESC+\"1m\"+self.A176+self.A178+self.A178+\" \"+self.A177+\" \"+self.A176+self.A176+\" \"+self.ESC+\"0;37;40m \"+self.ESC+\"31;41m \"+self.ESC+\"37;40m \"+self.ESC+\"31m\"+self.A223+self.ESC+\"41m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"1;33m\"+self.A220+self.A220+self.ESC+\"0;33m\"+self.A220+self.A220+self.A223+\" \"+self.ESC+\"31;41m \"+self.ESC+\"1m\"+self.A176+self.A219+self.A178+self.A178+self.A177+self.A177+self.A176+\" \"+self.A176+\" \"+self.A176+self.A176+\" \"+self.ESC+\"0;31m\"+self.A220+\" \"+self.ESC+\"41m \"+self.ESC+\"37;40m \"+self.ESC+\"31;41m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"0;37m \"+self.ESC+\"1;33m\"+self.A223+self.A223+self.A223+self.ESC+\"0;33m\"+self.A223+\" \"+self.ESC+\"30;41m \"+self.ESC+\"1;31mShatterstar [W/X] \"+self.ESC+\"0;37;40m \"+self.ESC+\"30;41m \"+self.ESC+\"37;40m \"+self.ESC+\"30;41m \"+self.ESC+\"0m\\r\\n\"\n\t\tthismsg += self.ESC+\"37m \"+self.ESC+\"0m\\r\\n\"\n\t\treturn thismsg",
"def Print(self):\n\n\t\tif self.verbose:\n\n\t\t print (\"\\033[1m[HEADER]\\033[0m\")\n\t\t print (\"code:\\t\\t%s\" % self.kod)\n\t \tprint (\"version:\\t%s\" % self.ver)\n\t\t print (\"date and time:\\t%s\" % self.probid)\n\t\t print (\"dump number:\\t%s\" % self.knod)\n\t \tprint (\"number of histories:\\t%s\" % self.nps)\n\t\t print (\"number of pseudorandom numbers used:\\t%s\" % self.rnr)\n\t\t print (\"title: %s\" % self.title)\n\n\t\t if self.ntal>1:\n\t\t\t\tprint self.ntal, 'tallies:', self.ntals\n\t \telse:\n\t\t\t\tprint self.ntal, 'tally:', self.ntals\n\n\n\t\t if self.npert != 0:\n\t\t\t\tprint(\"number of perturbations: %s\" % self.npert)",
"def display_banner_animation(banner_text):\n session, term = getsession(), getterminal()\n highlight = getattr(term, color_primary)\n lowlight = getattr(term, color_secondary)\n animation_length = len(banner_text)\n animation_speed = 0.1\n\n def get_garbage():\n # get some random \"cracking\" strings\n txt = list(string.printable)[:len(banner_text)]\n random.shuffle(txt)\n return txt\n\n get_x = lambda: (\n # get x-position of banner (center)\n (term.width // 2) - (len(banner_text) // 2))\n\n def display_header(xpos, banner_text):\n x_top_left = xpos - 1\n x_bot_right = xpos + (len(banner_text) - 5)\n return u'\\r\\n'.join((\n u'{xpos}{txt}'.format(xpos=term.move_x(x_top_left),\n txt=lowlight(u'┬─────')),\n u'',\n u'{xpos}{txt}'.format(xpos=term.move_x(x_bot_right),\n txt=lowlight(u'─────┴')),\n ))\n\n def decorate_guess(guess, actual):\n # return string where matching letters are highlighted\n attr = None\n rstr = u''\n for idx, ch_guess in enumerate(guess):\n # optimized attribute draws\n if ch_guess == actual[idx]:\n if attr != highlight:\n attr = highlight\n rstr += term.normal + attr\n else:\n if attr != lowlight:\n attr = lowlight\n rstr += term.normal + attr\n rstr += ch_guess\n return rstr\n\n def merge_garbage(prior_guess, garbage, actual):\n # return string with new garbage mixed in,\n # except where already matching\n next_guess = garbage[:]\n for idx, garbage_item in enumerate(garbage):\n if prior_guess[idx] == actual[idx]:\n next_guess[idx] = actual[idx]\n else:\n next_guess[idx] = garbage_item\n return next_guess\n\n def make_match(guess, actual):\n next_guess = guess[:]\n indicies = range(len(actual))\n random.shuffle(indicies)\n for idx in indicies:\n if next_guess[idx] != actual[idx]:\n next_guess[idx] = actual[idx]\n break\n return next_guess\n\n xpos = get_x()\n\n # display header,\n echo(display_header(xpos, banner_text))\n\n # move-to banner animation row\n echo(term.move_up())\n\n guess = get_garbage()\n for idx in range(0, animation_length):\n # check for screen resize\n if session.poll_event('refresh'):\n # remove artifacts, get new center\n echo(term.move_x(0) + term.clear_eol)\n xpos = get_x()\n\n echo(term.move_x(xpos))\n echo(decorate_guess(guess=guess, actual=banner_text))\n echo(term.clear_eol)\n\n if guess == banner_text:\n # \"cracked\"\n break\n\n if term.inkey(timeout=animation_speed):\n # user canceled animation\n break\n\n # mix in new garbage\n guess = merge_garbage(prior_guess=guess,\n garbage=get_garbage(),\n actual=banner_text)\n\n # ensure at least one new index is guessed\n guess = make_match(guess=guess,\n actual=banner_text)\n\n # end of animation\n echo(term.move_x(xpos))\n echo(highlight(banner_text))\n echo(term.move_x(0) + (term.move_down * 2))",
"def get_description(self):\n print(\"This Iron door.\")",
"def print_outgoing_msg():\n\n print(\"\"\"\n A LOVELY little potential energy surface has been successfully generated by the\n Lim, Launder, and Moore auto-plotter (LLAMA) vers. 0.3!\n\n ############################################################################### \n LLAMA 0.3 written By:\n [a] Andrew Launder and Kevin Moore\n Center for Computational Quantum Chemistry, \n Dept. of Chemistry, Univ. of Georgia, Athens, GA, United States\n [b] Victoria Lim\n Dept. of Chemistry, Belmont University, Nashville, TN, United States\n ###############################################################################\n\n Thank you for very much for plotting with us today! Please do so again soon!\n \"\"\")\n\n return None",
"def _display_message(message: str) -> None:\n print(message)",
"def show_lose_screen():\n print(\"\"\"\n \n _ _ __ _ _ __ __ ____ ____ _ _ \n( \\/ )/ \\ / )( \\ ( ) / \\ / ___)( __) (_)/ ) \n ) /( O )) \\/ ( / (_/\\( O )\\___ \\ ) _) _( ( \n(__/ \\__/ \\____/ \\____/ \\__/ (____/(____) (_)\\_) \n\"\"\")",
"def show_greeting(self):\n self.output(' ------------------------ ')\n self.output('You are now playing ' + self.name)\n self.output(self.greeting)\n self.output(' ------------------------ ')",
"def download_banner(self, banner_path):\n serie = self._root.find('Series')\n banner = unicode(serie.find('banner').text)\n if banner != '' and not os.path.isfile(banner_path):\n urllib.urlretrieve(self.URL_BANNER + banner, banner_path)",
"def show_game_mission():\n print_bold(\"Misija:\")\n print(\"\\tOdaberi kućicu u kojoj se Talion može odmoriti ...\")\n print_bold(\"SAVJET:\")\n print(\"PAZI kako biraš jer neprijatelji su blizu!\")\n print_dotted_line()",
"def boston_info():\n # rendering text\n return 'Welcome to the Boston Info Slack Bot'",
"def show_footprint(self, fpname):\n logging.debug(\"show_footprint entered\")\n # container_name = \"%s-metadata\" % footprint_name\n # container = self.cf.get_container(container_name)\n # index = container.get_object(\"index.json\")\n # config = json.loads(index.fetch())\n # \n # \n # \n # logging.info(\"loaded footprint configuration\")\n # return config\n fp = self.get_footprint(fpname, start=False)\n pt = fp.status()\n print pt",
"def drawDescription(self):\n print(\"\\nPress the following keys to run the features of the GoPiGo3.\")\n print(\"To move the motors, make sure you have a fresh set of batteries powering the GoPiGo3.\\n\")",
"def print_header():\n print()\n print(\"*\" * 45)\n print(\"Please, select algorithm:\")\n print(\"*\" * 45)",
"def display_menu(self):\n print(\"\"\"\nLogistic System Menu\n1. Add Vehicles\n2. Add Item To The Cart\n3. Complete The Order\n4. Track The Order\n5. Quit \"\"\")",
"def show_theme_message(width):\n print_dotted_line()\n print_bold(\"Attack of The Orcs v0.0.5:\")\n msg = (\n \"\"\"\n Hrabri vitez Talion vraća se iz boja i želi predahnuti u jednom seocetu. \n U seocu se nalazi 5 kućica i želi stupiti u jednu od njih. \n Hrabri vitez ne zna da u ovom području ima neprijatelja.\n Odluči se za jedna od vrata ...\n \"\"\")\n\n print(textwrap.fill(msg, width=width))",
"def print_header(self):\n print(\"Running {} simulations.\".format(self.num_simulations))\n print(\"{0:2}% bias for men\".format(self.promotion_bias))\n print(\"{0:2} promotion cycles\".format(self.iterations_per_simulation))\n print(\"{0:2}% attrition rate\".format(self.attrition))\n print",
"def full_logo(self):\n self.def_logo(0x21)\n self.clear()\n self.send('!\"Stratum 0 Hacker-#$space Braunschweig')\n self.reset_codepage()",
"def FlashBang(self):\t\t\n\t\tprint(self.name.Title() + \"FlashBang!\")",
"def display_hangman(self):\n print(Fore.CYAN + HANGMAN_PICS[self.stage])\n print('\\n')\n print(self.progress + Style.RESET_ALL)\n print('\\n')",
"def show_madlib():\n\n mad_name = request.args.get(\"person\")\n mad_color = request.args.get(\"color\")\n mad_noun = request.args.get(\"noun\")\n mad_planet = request.args.get(\"planet\")\n mad_adverb = request.args.get(\"adverb\")\n mad_adjectives = request.args.getlist(\"adjectives\")\n\n return render_template(\"madlib.html\",\n person=mad_name,\n color=mad_color,\n noun=mad_noun,\n planet=mad_planet,\n adverb=mad_adverb,\n adjectives=mad_adjectives,\n )",
"def footprint(dynamic_footprint_modifier=0.):",
"def _intro():\n # TODO: should we print to stderr ?\n print(logo)\n print(__version__)",
"def print_intro(self):\n \n print('Did you know that birds hold the record for longest animal migrations?')",
"def main():\n create_home()\n create_about()\n create_banner_list()\n\n for banner_id in appdata.banner_info:\n banner.create_banner_page(banner_id)",
"def print_post():\n print('| | |'),",
"def stink(self):\r\n print(\"Dear lord!\\n\")",
"def printPluginHeader(self):\n print \"%s, version %s\" % (desc, version)",
"def print_header_information():\n\t\tprint \"Elijah Molloy\"\n\t\tprint \"70-510 - Spring 1 - 2018\"\n\t\tprint \"PROGRAMMING ASSIGNMENT #4\\n\"",
"def display_message():\n\tmessage = \"I'm learning how to use function.\"\n\tprint(message)"
] | [
"0.7396634",
"0.73954254",
"0.727118",
"0.72663784",
"0.7056715",
"0.7042273",
"0.70386666",
"0.700003",
"0.675228",
"0.6678629",
"0.651769",
"0.6499614",
"0.6490508",
"0.6374026",
"0.63351125",
"0.62801325",
"0.62523764",
"0.623986",
"0.61925286",
"0.61004984",
"0.59268594",
"0.5877095",
"0.58561337",
"0.58067316",
"0.5794721",
"0.57774323",
"0.57552075",
"0.57110965",
"0.5668594",
"0.56129366",
"0.5588274",
"0.55556446",
"0.545307",
"0.54205143",
"0.5397672",
"0.5396407",
"0.538219",
"0.5372994",
"0.5349511",
"0.5345657",
"0.5320224",
"0.53011334",
"0.52938586",
"0.526285",
"0.5249145",
"0.51900375",
"0.51863134",
"0.5179144",
"0.5166375",
"0.5150166",
"0.51445043",
"0.5142954",
"0.51250404",
"0.51114523",
"0.50786126",
"0.50689137",
"0.50600183",
"0.50598925",
"0.50364614",
"0.50186265",
"0.50055486",
"0.50033927",
"0.5002383",
"0.50013953",
"0.49982452",
"0.49965644",
"0.4981885",
"0.49816144",
"0.4981218",
"0.49788034",
"0.49767724",
"0.4970195",
"0.49578583",
"0.4956279",
"0.4949513",
"0.4944004",
"0.49416432",
"0.49383613",
"0.49358833",
"0.49352694",
"0.4925886",
"0.49209294",
"0.4910285",
"0.49034193",
"0.48951787",
"0.48910692",
"0.4890827",
"0.488863",
"0.48865774",
"0.48848492",
"0.48798287",
"0.48787722",
"0.4876138",
"0.48720604",
"0.4866319",
"0.48598447",
"0.48577523",
"0.48570257",
"0.48516864",
"0.48492494"
] | 0.76449466 | 0 |
Downloads a topology in mfpx file format | def get_net(self,netname, mol = False):
lines = self.mfp.get_net(netname)
return lines | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def export_topology(topology, filename='topology.gml'):\n\n nx.write_gml(topology, filename)",
"def x_download():\n\t#_loadconfig()\n\tconf = _get_config()\n\t#print conf['xplane']\n\tdownload_url = conf['xplane']['download']\n\tlocal(\"wget -P %s %s\" % (navimport.conf.work_dir(\"/xplane_zips\"), download_url))",
"def download_data():\n urllib.request.urlretrieve('http://cs.iit.edu/~culotta/cs579/a1/edges.txt.gz', 'edges.txt.gz')",
"def getMpcorb(url='https://minorplanetcenter.net/iau/MPCORB/MPCORB.DAT.gz', fname='MPCORB.DAT.gz', verbose=True):\n\n #filename = wget.download(url)\n try:\n r = requests.get(url, allow_redirects=True)\n open(fname, 'wb').write(r.content)\n if (verbose):\n print('Download complete:', url)\n except:\n print(\"Error in getMpcorb: could not download \", fname, \" at \", url)\n raise\n return",
"def download(self, outputfile:str, **format_options) -> str:\n return self.session.download(self.graph, outputfile, format_options)",
"def download(self, outputfile:str, **format_options) -> str:\n return self.connection.download(self.graph, outputfile, format_options)",
"def download_model():\n logging.info(\"[genreml] Downloading model...\")\n with urllib.request.urlopen(config.FMAModelConfig.FMA_MODEL_URL) as f:\n data = f.read()\n open(config.FMAModelConfig.FMA_MODEL_PATH, 'wb').write(data)\n logging.info(\"[genreml] Model download complete\")",
"def download_model(\\\n download_base='http://download.tensorflow.org/models/object_detection/', \\\n model_name='ssd_mobilenet_v1_coco_11_06_2017'\\\n ):\n\n # add tar gz to the end of file name\n model_file = model_name + '.tar.gz'\n\n try:\n opener = urllib.request.URLopener()\n opener.retrieve(download_base + model_file, \\\n model_file)\n tar_file = tarfile.open(model_file)\n for f in tar_file.getmembers():\n file_name = os.path.basename(f.name)\n if 'frozen_inference_graph.pb' in file_name:\n tar_file.extract(f, os.getcwd())\n except Exception as e:\n raise",
"def to_omnetpp(topology, path=None):\n try:\n from mako.template import Template\n except ImportError:\n raise ImportError('Cannot import mako.template module. '\n 'Make sure mako is installed on this machine.')\n set_delays = True\n set_capacities = True\n # Check correctness of capacity and delay attributes\n if not 'capacity_unit' in topology.graph or not topology.graph['capacity_unit'] in capacity_units:\n warn('Missing or invalid capacity unit attribute in the topology. The '\n 'output file will be generated without link capacity attributes.')\n set_capacities = False\n if not 'delay_unit' in topology.graph or not topology.graph['delay_unit'] in time_units:\n warn('Missing or invalid delay unit attribute in the topology. The '\n 'output file will be generated without link delay attributes.')\n set_delays = False\n template = Template(__TEMPLATE)\n variables = {\n 'topology': topology,\n 'set_capacities': set_capacities,\n 'set_delays': set_delays,\n }\n ned = template.render(**variables)\n if path:\n with open(path, \"w\") as out:\n out.write(ned)\n else:\n print(ned)",
"def download_map_area():\n filename = \"data.osm\"\n if CONFIG[\"SELECTION\"] == \"CACHE\":\n if not os.path.exists(filename):\n raise ValueError(\"Cannot use SELECTION=CACHE if no {} file exists.\".format(filename))\n else:\n return None, filename, os.path.getsize(filename)\n elif CONFIG[\"SELECTION\"] == \"PRESELECTED\":\n data = CONFIG[\"TEMPLATE\"].format(*CONFIG[\"PRESELECTIONS\"][CONFIG[\"PRESELECTION\"]])\n elif CONFIG[\"SELECTION\"] == \"USER\":\n data = CONFIG[\"TEMPLATE\"].format(*CONFIG[\"USER_SELECTION\"])\n else:\n raise ValueError(\"SELECTION={}\".format(CONFIG[\"SELECTION\"]))\n \n #Get XML data\n r = requests.get('http://overpass-api.de/api/interpreter', params={\"data\": data}, stream=True)\n with open(filename, 'wb') as fobj:\n for chunk in r.iter_content(chunk_size=1024): \n if chunk:\n fobj.write(chunk)\n return r.status_code, filename, os.path.getsize(filename)",
"def download_tile(self, xtile, ytile):\n location = 'http://maps.six.nsw.gov.au/arcgis/rest/services/public/NSW_Imagery/MapServer/tile/'\n destination = 'downloaded_tiles/'\n save_name = str(self.zoom_level) + '_' + str(xtile) + '_' + str(ytile)\n tile_url = location + save_name.replace('_', '/')\n tile = requests.get(tile_url, stream=True)\n with open(destination + save_name + '.png', 'wb') as out_file:\n tile.raw.decode_content = True\n shutil.copyfileobj(tile.raw, out_file)\n tilepng = png.Reader(file=tile.raw)\n # shutil.copyfileobj(tilepng, out_file)\n del tile",
"def topology(self, topo_file: str, *args: str):\n pass",
"def download_genotype_data():\n print(\"downloading genotype data\")\n download_from_url(PSAM_PATH, dst=f\"{GENOTYPE_DATA_PATH}/{MERGED_GENOTYPE_FILE}.psam\", desc=\"downloading psam\")\n download_from_url(PVAR_PATH, dst=f\"{GENOTYPE_DATA_PATH}/{MERGED_GENOTYPE_FILE}.pvar.zst\",\n desc=\"downloading pvar\")\n download_from_url(PGEN_PATH, dst=f\"{GENOTYPE_DATA_PATH}/{MERGED_GENOTYPE_FILE}.pgen.zst\",\n desc=\"downloading pgen\")\n decompress_genotype_file(f\"{MERGED_GENOTYPE_FILE}.pvar\")\n decompress_genotype_file(f\"{MERGED_GENOTYPE_FILE}.pgen\")",
"def download(self):\n if not os.path.exists(self.pkg_dir):\n os.makedirs(self.pkg_dir)\n\n url = self.metadata_pkg[\"url\"]\n\n # Download modelpkg only if not already downloaded.\n if os.path.exists(self.file_path):\n self.is_downloaded = True\n else:\n print(f\"Fetching {os.path.basename(self.file_path)} model package from {url} to {self.file_path}\", flush=True)\n r = requests.get(url, stream=True)\n with open(self.file_path, \"wb\") as file_out:\n for chunk in r.iter_content(chunk_size=2048):\n file_out.write(chunk)\n r.close()\n self.is_downloaded = True",
"def _download_metafile(dataset, path=None):\n if not path:\n path = sunpy.config.get('downloads', 'sample_dir')\n base_url = 'https://spdf.gsfc.nasa.gov/pub/software/cdawlib/0MASTERS/'\n fname = dataset.lower() + '_00000000_v01.cdf'\n url = base_url + fname\n try:\n downloaded_file = pooch.retrieve(url=url, known_hash=None, fname=fname, path=path, progressbar=True)\n except ModuleNotFoundError:\n downloaded_file = pooch.retrieve(url=url, known_hash=None, fname=fname, path=path, progressbar=False)\n return downloaded_file",
"def download(self, outputfile: str, outputformat: str):\n pass",
"def openTMX(self, fn):\n\n #parse the TMX XML markup\n tree = ET.parse(fn)\n root = tree.getroot()\n self.size = int(root.attrib[\"width\"]), int(root.attrib[\"height\"])\n\n #find the offset at which the collision and behaviour layers tile data is stored\n collisionTilesetOffset = None\n behaviourTilesetOffset = None\n for ts in root.findall(\"tileset\"):\n if ts.attrib[\"name\"] == \"collision\":\n collisionTilesetOffset = int(ts.attrib[\"firstgid\"])-1\n elif ts.attrib[\"name\"] == \"behaviour\":\n behaviourTilesetOffset = int(ts.attrib[\"firstgid\"])-1\n if collisionTilesetOffset is None:\n raise error.DittoInvalidResourceException(fn, \"Collision tileset\")\n if behaviourTilesetOffset is None:\n raise error.DittoInvalidResourceException(fn, \"Behaviour tileset\")\n\n #create each layer, separating the collision and behaviour data\n self.layers = []\n self.collisionLayer = None\n self.behaviourLayer = None\n for layer in root.findall(\"layer\"):\n l = Layer()\n l.openTMXNode(layer)\n if l.level == -1: #collision layer indicated by level == -1\n self.collisionLayer = l\n elif l.level == -2:\n self.behaviourLayer = l\n else:\n self.layers.append(l)\n if self.collisionLayer is None:\n raise error.DittoInvalidResourceException(fn, \"Collision data layer\")\n if self.behaviourLayer is None:\n raise error.DittoInvalidResourceException(fn, \"Behaviour data layer\")\n\n #compensate for tilesets not starting at 1\n self.collisionLayer.offsetElements(collisionTilesetOffset)\n self.behaviourLayer.offsetElements(behaviourTilesetOffset)",
"def gexf_graph():\n # you must replace these lines and supply your own graph\n \n \n \n my_gexf = Gexf(\"JiajiaXie\", \"My awesome graph\")\n graph=my_gexf.addGraph(\"undirected\", \"static\", \"My awesome networks\")\n \n atr1=graph.addNodeAttribute('Type',type='string')\n\n\n for set in data_specific:\n if graph.nodeExists(set['set_num']) ==0:\n tm1=graph.addNode(set['set_num'], set['name'], r='0', g='0', b='0')\n tm1.addAttribute(atr1,\"set\")\n\n\n\n counter_test=1\n for set, part in data_parts.items():\n for key, part_list in part.items():\n interme =part_list['color']\n red=interme[0]+interme[1]\n green=interme[2]+interme[3]\n blue=interme[4]+interme[5]\n\n red_de=str(int(red,16))\n green_de=str(int(green,16))\n blue_de=str(int(blue,16))\n if graph.nodeExists(part_list['id'])==0:\n tm2=graph.addNode(part_list['id'], part_list['part_name'],r=red_de, g=green_de, b = blue_de)\n tm2.addAttribute(atr1,\"part\")\n\n\n counter_test+=1\n graph.addEdge(\"_\"+str(counter_test), set, part_list['id'], part_list['quantity'])\n\n\n\n f=open('bricks_graph.gexf','wb')\n my_gexf.write(f)\n\n\n return my_gexf.graphs[0]",
"def download_mojo(self, path=\".\", get_genmodel_jar=False, genmodel_name=\"\"):\n return ModelBase.download_mojo(self.leader, path, get_genmodel_jar, genmodel_name)",
"def gexf_graph():\n # you must replace these lines and supply your own graph\n gexf = Gexf(\"author\", \"title\")\n mygraph = gexf.addGraph(\"undirected\", \"static\", \"A web network\")\n atr_type = mygraph.addNodeAttribute('Type', type='string')\n atr_id = mygraph.addNodeAttribute('id', type='string')\n atr_label = mygraph.addNodeAttribute('label', type='string')\n atr_color_r = mygraph.addNodeAttribute('color_r', type='string', defaultValue='0')\n atr_color_g = mygraph.addNodeAttribute('color_g', type='string', defaultValue='0')\n atr_color_b = mygraph.addNodeAttribute('color_b', type='string', defaultValue='0')\n k = 0\n for i in range(min_parts()):\n tmp = mygraph.addNode(set_num[i], name[i], r=\"0\", g=\"0\", b=\"0\")\n tmp.addAttribute(atr_type, \"set\")\n tmp.addAttribute(atr_id, set_num[i])\n tmp.addAttribute(atr_label, name[i])\n for j in range(len(Parts[i][\"Parts\"])):\n if mygraph.nodeExists(Parts[i][\"Parts\"][j][\"number\"]+\"_\"+Parts[i][\"Parts\"][j][\"color\"][\"rgb\"])==0:\n temp = mygraph.addNode((Parts[i][\"Parts\"][j][\"number\"]+\"_\"+Parts[i][\"Parts\"][j][\"color\"][\"rgb\"]), Parts[i][\"Parts\"][j][\"name\"], r=str(int(Parts[i][\"Parts\"][j][\"color\"][\"rgb\"][0:2], 16)), g=str(int(Parts[i][\"Parts\"][j][\"color\"][\"rgb\"][2:4], 16)), b=str(int(Parts[i][\"Parts\"][j][\"color\"][\"rgb\"][4:6], 16)))\n temp.addAttribute(atr_type, \"part\")\n temp.addAttribute(atr_id, (Parts[i][\"Parts\"][j][\"number\"]+\"_\"+Parts[i][\"Parts\"][j][\"color\"][\"rgb\"]))\n temp.addAttribute(atr_label, Parts[i][\"Parts\"][j][\"name\"])\n temp.addAttribute(atr_color_r, Parts[i][\"Parts\"][j][\"color\"][\"rgb\"][0:2])\n temp.addAttribute(atr_color_g, Parts[i][\"Parts\"][j][\"color\"][\"rgb\"][2:4])\n temp.addAttribute(atr_color_b, Parts[i][\"Parts\"][j][\"color\"][\"rgb\"][4:6])\n mygraph.addEdge(str(k), set_num[i], (Parts[i][\"Parts\"][j][\"number\"]+\"_\"+Parts[i][\"Parts\"][j][\"color\"][\"rgb\"]), weight=Parts[i][\"Parts\"][j][\"quantity\"])\n k = k+1\n output_file = open(\"bricks_graph.gexf\", \"wb\")\n gexf.write(output_file)\n return -1",
"def download():\n basedir = os.path.dirname(os.path.dirname(__file__))\n print(basedir)\n datadir = os.path.join(basedir,\"data/NeonTreeEvaluation/\")\n print(\"Downloading data files to {}\".format(datadir)) \n eval_url = zenodo_url(concept_rec_id=\"3723356\", datadir=datadir)",
"def test_download_to_file(req, tmpdir):\n req.get(ENTREZ_URL, text='This works.')\n outdir = tmpdir.mkdir('outdir')\n filename = outdir.join('foo')\n expected = outdir.join('foo.gbk')\n config = core.Config(molecule='nucleotide', verbose=False)\n\n core.download_to_file('FOO', config, filename=filename)\n\n assert expected.check()",
"def download_mission(self):\n cmds = self.vehicle.commands\n cmds.download()\n # Wait until download is complete.\n cmds.wait_valid()",
"def generate_metadata(self):\n if self.options.mbtiles:\n return\n if not os.path.exists(self.output):\n os.makedirs(self.output)\n\n if self.options.profile == 'mercator':\n\n south, west = self.mercator.MetersToLatLon( self.ominx, self.ominy)\n north, east = self.mercator.MetersToLatLon( self.omaxx, self.omaxy)\n south, west = max(-85.05112878, south), max(-180.0, west)\n north, east = min(85.05112878, north), min(180.0, east)\n self.swne = (south, west, north, east)\n\n # Generate googlemaps.html\n if self.options.webviewer in ('all','google') and self.options.profile == 'mercator':\n if not self.options.resume or not os.path.exists(os.path.join(self.output, 'googlemaps.html')):\n f = open(os.path.join(self.output, 'googlemaps.html'), 'w')\n f.write( self.generate_googlemaps() )\n f.close()\n\n # Generate openlayers.html\n if self.options.webviewer in ('all','openlayers'):\n if not self.options.resume or not os.path.exists(os.path.join(self.output, 'openlayers.html')):\n f = open(os.path.join(self.output, 'openlayers.html'), 'w')\n f.write( self.generate_openlayers() )\n f.close()\n\n elif self.options.profile == 'geodetic':\n\n west, south = self.ominx, self.ominy\n east, north = self.omaxx, self.omaxy\n south, west = max(-90.0, south), max(-180.0, west)\n north, east = min(90.0, north), min(180.0, east)\n self.swne = (south, west, north, east)\n\n # Generate openlayers.html\n if self.options.webviewer in ('all','openlayers'):\n if not self.options.resume or not os.path.exists(os.path.join(self.output, 'openlayers.html')):\n f = open(os.path.join(self.output, 'openlayers.html'), 'w')\n f.write( self.generate_openlayers() )\n f.close()\n\n elif self.options.profile in ['raster','gearth','garmin']:\n\n west, south = self.ominx, self.ominy\n east, north = self.omaxx, self.omaxy\n\n self.swne = (south, west, north, east)\n\n # Generate openlayers.html\n if self.options.webviewer in ('all','openlayers'):\n if not self.options.resume or not os.path.exists(os.path.join(self.output, 'openlayers.html')):\n f = open(os.path.join(self.output, 'openlayers.html'), 'w')\n f.write( self.generate_openlayers() )\n f.close()\n\n\n # Generate tilemapresource.xml.\n if (self.options.tile_format != 'hybrid' and self.options.profile != 'garmin'\n and (not self.options.resume or not os.path.exists(os.path.join(self.output, 'tilemapresource.xml')))):\n f = open(os.path.join(self.output, 'tilemapresource.xml'), 'w')\n f.write( self.generate_tilemapresource())\n f.close()",
"def _download_obm_data_from_file_system(self) -> 'DataFrame':\n\n # note: fetch all OBM file part names\n url = f\"{self._wml_client.wml_credentials['url']}/v2/asset_files/auto_ml/{self.location.path.split('/auto_ml/')[-1]}/{self._run_id}/data/obm/features\"\n params = self._wml_client._params()\n params['flat'] = \"true\"\n\n response = requests.get(url,\n params=params,\n headers=self._wml_client._get_headers(),\n verify=False)\n\n if response.status_code != 200:\n raise ApiRequestFailure(u'Failure during {}.'.format(\"getting files information\"), response)\n\n file_names = [e['path'].split('/')[-1] for e in response.json()['resources'] if\n e['type'] == 'file' and e['path'].split('/')[-1].startswith('part')]\n\n # TODO: this can be done simultaneously (multithreading / multiprocessing)\n # note: download all data parts and concatenate them into one output\n parts = []\n for file_name in file_names:\n csv_response = requests.get(url + '/' + file_name,\n params=self._wml_client._params(),\n headers=self._wml_client._get_headers(),\n stream=True,\n verify=False)\n\n if csv_response.status_code != 200:\n raise ApiRequestFailure(u'Failure during {}.'.format(\"downloading model\"), csv_response)\n\n downloaded_asset = csv_response.content\n # note: read the csv/xlsx file from the memory directly into the pandas DataFrame\n buffer = io.BytesIO(downloaded_asset)\n parts.append(try_load_dataset(buffer=buffer))\n\n data = concat(parts)\n # --- end note\n return data",
"def download(self, session):\n target_path = self.get_target_full_dir()\n os.chdir(target_path)\n schema_get = session.get(self.get_full_url(), verify=False)\n target_name = self.get_target_name()\n logger.debug('Starting download of file {} to {}.'.format(target_name.upper(), target_path))\n with open(os.path.join(target_path, target_name), \"wb\") as code:\n code.write(schema_get.content)\n logger.info('{} file has been downloaded successfully.'.format(target_name.upper()))",
"def write_file(req, file_type, download, dataset, stream, period, root_name):\n# ~~~~ Loading up the GRIB file~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n#\n head, _ = path.splitext(root_name)\n\n if file_type == 'grib':\n\n if download:\n raise TelemacException(\\\n '... I am not programmed to '\n 'download grib files directly.\\n\\n')\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nLoading essentials from the GRIB\\n')\n grb2slf = Grib(dataset, req, stream)\n\n grb2slf.set_geometry()\n\n if stream == 'spec':\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nSpecial case for spectral file\\n')\n grb2slf.put_geometry('geo_'+head+'.slf')\n grb2slf.set_spectral()\n\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nConverting grib file(s) into SELAFIN\\n')\n grb2slf.put_content(root_name)\n\n# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n# ~~~~ Downloading the NetCDF file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n#\n# Unfortunately, I did not manage to access the NetCDF file remotely\n elif file_type == 'netcdf':\n\n ecmwf2slf = Ecmwf(period, req)\n if download:\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nMaking an ECMWF request\\n')\n ecmwf2slf.connect_to_ecmwf(\"datasets/%s\" % (req['dataset']))\n\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nHaving to download the ECMWF file first\\n')\n ecmwf2slf.download_ecmwf()\n print(\" ~> download completed.\")\n\n ecmwf2slf.open_ecmwf()\n ecmwf2slf.set_geometry()\n\n if stream == 'spec':\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nSpecial case for spectral file\\n')\n ecmwf2slf.put_geometry('geo_'+head+'.slf')\n ecmwf2slf.set_spectral()\n\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nConverting netcdf file into SELAFIN\\n')\n ecmwf2slf.put_content(root_name, stream)",
"def maybe_download():\n\n print(\"Downloading Inception 5h Model ...\")\n download.maybe_download_and_extract(url=data_url, download_dir=data_dir)",
"def saveUploadedTopology(self, file):\r\n filename = str(file)\r\n with open(os.path.join(main.settings.TOPOLOGY_DIR, filename), 'wb+') as destination:\r\n for chunk in file.chunks():\r\n destination.write(chunk)",
"def topology(self, topo_file: str, *args: str):\n self.scion_sh('topology', '-c', topo_file, '-d', *args)",
"def test_get_topology_template(self):\n pass",
"def write_ptm_gridfile(self,fn):\n vertex_hdr = \" Vertex Data: vertex_number, x, y\"\n poly_hdr = \" Polygon Data: polygon_number, number_of_sides,center_x, center_y, center_depth, side_indices(number_of_sides), marker(0=internal,1=open boundary)\"\n side_hdr = \" Side Data: side_number, side_depth, node_indices(2), cell_indices(2), marker(0=internal,1=external,2=flow boundary,3=open boundary)\"\n\n with open(fn,'wt') as fp:\n # write header counts\n fp.write(\" Number of Vertices\\n\")\n fp.write(\" %20d\\n\"%self.Nnodes())\n fp.write(\" Number of Polygons\\n\")\n fp.write(\" %20d\\n\"%self.Ncells())\n fp.write(\" Number of Sides\\n\")\n fp.write(\" %20d\\n\"%self.Nedges())\n fp.write(\" NODATA (land) value\\n\")\n fp.write(\" -9999.000000000\\n\")\n\n # write vertex info\n fp.write(vertex_hdr+\"\\n\")\n for v in range(self.Nnodes()):\n fp.write(\" %10d %16.7f %16.7f\\n\"%(v+1,\n self.nodes['x'][v,0],\n self.nodes['x'][v,1]))\n\n # write polygon info\n fp.write(poly_hdr+\"\\n\")\n cell_write_str1 = \" %10d %10d %16.7f %16.7f %16.7f \"\n cell_depths = self.cell_depths()\n for e in range(self.Ncells()):\n edges = self.cells['edges'][e,:]\n edges[edges<0] = -1\n edge_str = \" \".join( [\"%10d\"%(s+1) for s in edges] )\n edge_str = edge_str+\" %10d\\n\"%(self.cells['mark'][e])\n nsides = sum(edges>=0)\n fp.write(cell_write_str1%(e+1,\n nsides,\n self.cells['_center'][e,0],\n self.cells['_center'][e,1],\n cell_depths[e]))\n fp.write(edge_str)\n \n # write side info\n fp.write(side_hdr+\"\\n\")\n edge_depths = self.edge_depths()\n edge_write_str = \" %10d %16.7f %10d %10d %10d %10d %10d\\n\"\n for s in range(self.Nedges()):\n edges = self.edges['cells'][s,:]\n edges[edges<0] = -1 \n nodes = self.edges['nodes'][s,:]\n nodes[nodes<0] = -1\n fp.write(edge_write_str%(s+1,\n edge_depths[s],\n nodes[0]+1,\n nodes[1]+1,\n edges[0]+1,\n edges[1]+1,\n self.edges['mark'][s]))",
"def _download(self):\n self._system.download(\"http://geant4.web.cern.ch/geant4/support/source/\" + self._tar_name)",
"def download():\r\n reader = GSODDataReader()\r\n year_list = range(2001, 2012)\r\n austin = reader.collect_data(year_list, exact_station=True,\r\n station_name='AUSTIN CAMP MABRY', state='TX', country='US')\r\n houston = reader.collect_data(year_list, exact_station=True,\r\n station_name='HOUSTON/D.W. HOOKS', state='TX', country='US')\r\n new_york = reader.collect_data(year_list, exact_station=True,\r\n station_name='NEW YORK/LA GUARDIA', state='NY', country='US')\r\n newark = reader.collect_data(year_list, exact_station=True,\r\n station_name='NEWARK INTL AIRPORT', state='NJ', country='US')\r\n punta_arenas = reader.collect_data(year_list, exact_station=True,\r\n station_name='PUNTA ARENAS', country='CH')\r\n wellington = reader.collect_data(year_list, exact_station=True,\r\n station_name='WELLINGTON AIRPORT', country='NZ')\r\n store = HDFStore('weather.h5')\r\n store['austin'] = austin\r\n store['houston'] = houston\r\n store['nyc'] = new_york\r\n store['newark'] = newark\r\n store['punta_arenas'] = punta_arenas\r\n store['wellington'] = wellington\r\n store.close()",
"def download(self):\n pass",
"def download(self):\n pass",
"def read_szf_fmv_12(eps_file):\n data = {}\n metadata = {}\n\n n_lines = eps_file.mdr_counter\n n_node_per_line = eps_file.mdr[\"LONGITUDE_FULL\"].shape[1]\n idx_nodes = np.arange(n_lines).repeat(n_node_per_line)\n\n # extract metadata\n metadata[\"spacecraft_id\"] = np.int8(eps_file.mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(eps_file.mphr[\"ORBIT_START\"])\n metadata[\"state_vector_time\"] = datetime.strptime(\n eps_file.mphr[\"STATE_VECTOR_TIME\"][:-4], \"%Y%m%d%H%M%S\")\n\n fields = [\n \"processor_major_version\", \"processor_minor_version\",\n \"format_major_version\", \"format_minor_version\"\n ]\n for f in fields:\n metadata[f] = np.int16(eps_file.mphr[f.upper()])\n\n # extract time\n dt = np.datetime64(\n \"2000-01-01\") + eps_file.mdr[\"UTC_LOCALISATION\"][\"day\"].astype(\n \"timedelta64[D]\"\n ) + eps_file.mdr[\"UTC_LOCALISATION\"][\"time\"].astype(\"timedelta64[ms]\")\n data[\"time\"] = dt[idx_nodes]\n\n fields = [\n \"degraded_inst_mdr\", \"degraded_proc_mdr\", \"sat_track_azi\",\n \"beam_number\", \"flagfield_rf1\", \"flagfield_rf2\", \"flagfield_pl\",\n \"flagfield_gen1\"\n ]\n\n # 101 min = 6082 seconds\n # state_vector_time = ascending node crossing time - 1520.5,\n # time crossing at -90 lat\n orbit_start_time = metadata[\"state_vector_time\"] - timedelta(\n seconds=1520.5)\n orbit_end_time = orbit_start_time + timedelta(seconds=6082)\n\n data[\"orbit_nr\"] = np.ma.zeros(\n data[\"time\"].size, dtype=np.int32,\n fill_value=int32_nan) + metadata[\"orbit_start\"]\n data[\"orbit_nr\"][data[\"time\"] > orbit_end_time] += 1\n\n metadata[\"orbits\"] = {}\n for orbit_nr in np.unique(data[\"orbit_nr\"]):\n if orbit_nr == metadata[\"orbit_start\"]:\n metadata[\"orbits\"][orbit_nr] = (orbit_start_time, orbit_end_time)\n else:\n metadata[\"orbits\"][orbit_nr] = (orbit_end_time, orbit_end_time +\n timedelta(seconds=6082))\n\n # extract data\n for f in fields:\n if eps_file.mdr_sfactor[f.upper()] == 1:\n data[f] = eps_file.mdr[f.upper()].flatten()[idx_nodes]\n else:\n data[f] = (eps_file.mdr[f.upper()].flatten() * 1. /\n eps_file.mdr_sfactor[f.upper()])[idx_nodes]\n\n data[\"swath_indicator\"] = (data[\"beam_number\"].flatten() > 3).astype(\n np.uint8)\n data[\"as_des_pass\"] = (data[\"sat_track_azi\"] < 270).astype(np.uint8)\n\n fields = [(\"longitude_full\", long_nan), (\"latitude_full\", long_nan),\n (\"sigma0_full\", long_nan), (\"inc_angle_full\", uint_nan),\n (\"azi_angle_full\", int_nan), (\"land_frac\", uint_nan),\n (\"flagfield_gen2\", byte_nan)]\n\n for f, nan_val in fields:\n data[f] = eps_file.mdr[f.upper()].flatten()\n invalid = eps_file.mdr[f.upper()].flatten() == nan_val\n\n if eps_file.mdr_sfactor[f.upper()] != 1:\n data[f] = data[f] * 1. / eps_file.mdr_sfactor[f.upper()]\n\n data[f][invalid] = nan_val\n\n # modify longitudes from (0, 360) to (-180, 180)\n mask = np.logical_and(data[\"longitude_full\"] != long_nan,\n data[\"longitude_full\"] > 180)\n data[\"longitude_full\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n idx = (data[\"azi_angle_full\"] != int_nan) & (data[\"azi_angle_full\"] < 0)\n data[\"azi_angle_full\"][idx] += 360\n\n # set flags\n data[\"f_usable\"] = set_flags(data)\n\n return data, metadata",
"def download_caffe_model(model_name, meta_info, dst_dir='./model'):\n if not os.path.isdir(dst_dir):\n os.mkdir(dst_dir)\n model_name = os.path.join(dst_dir, model_name)\n assert 'prototxt' in meta_info, \"missing prototxt url\"\n prototxt = mx.test_utils.download(meta_info['prototxt'], model_name+'_deploy.prototxt')\n assert 'caffemodel' in meta_info, \"mssing caffemodel url\"\n caffemodel = mx.test_utils.download(meta_info['caffemodel'], model_name+'.caffemodel')\n assert 'mean' in meta_info, 'no mean info'\n mean = meta_info['mean']\n if isinstance(mean, str):\n mean = mx.test_utils.download(mean, model_name+'_mean.binaryproto')\n return (prototxt, caffemodel, mean)",
"def download(data_type, gs_aoi, main_dir):\n # Get URLs for tiles covered by a polygon:\n # ----------------------------------------\n tiles = get_tile_names(gs_aoi)\n print('Found {} products'.format(len(tiles['tile_names'])))\n\n # Make sure temporary folder for download exists:\n # -----------------------------------------------\n dwn_dir = join(main_dir, data_type)\n if not exists(dwn_dir):\n makedirs(dwn_dir)\n\n # Proceed to download:\n # --------------------\n if data_type == 'DTM':\n # DOWNLOAD DTM FILES & UNZIP:\n # ---------------------------\n print('\\nDownloading DTM files:')\n for num, name in enumerate(tiles['dtm_url']):\n print('{} of {}'.format(num+1, len(tiles['dtm_url'])))\n dwn_stat, file_name = download_file(name, dwn_dir)\n print('File {}.'.format(dwn_stat))\n extract_zip(join(dwn_dir, file_name))\n # Delete ZIP file after extraction\n remove(join(dwn_dir, file_name))\n \n # Finished downloading:\n # ---------------------\n out_msg = 'Finished downloading DTM files!'\n \n elif data_type == 'LAZ':\n # DOWNLOAD LAZ FILES:\n # -------------------\n print('\\nDownloading LAZ files:')\n for num, name in enumerate(tiles['laz_url']):\n print('{} of {}'.format(num+1, len(tiles['laz_url'])))\n dwn_stat, _ = download_file(name, dwn_dir)\n print('File {}.'.format(dwn_stat))\n \n # Finished downloading:\n # ---------------------\n out_msg = 'Finished downloading LAZ files!'\n \n else:\n dwn_dir = None\n out_msg = 'Unexpected data_type'\n \n # Output dictionary:\n # ------------------\n out = {'out_msg': out_msg,\n 'out_dir': dwn_dir}\n \n return out",
"def download_tf_params():\n\n if not os.path.exists(MODEL_DIR):\n os.makedirs(MODEL_DIR)\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(MODEL_DIR, filename)\n\n if not os.path.exists(filepath):\n def _progress(count, block_size, total_size):\n sys.stdout.write('\\r>> Downloading %s %.1f%%' % (\n filename, float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n\n filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)\n statinfo = os.stat(filepath)\n print()\n print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.')\n\n tarfile.open(filepath, 'r:gz').extractall(MODEL_DIR)",
"def start(self):\n #url = \"http://xapi.openstreetmap.org\" \\\n #url = \"http://osm.bearstech.com\" \\\n url = \"http://osmxapi.hypercube.telascience.org\" \\\n \"/api/0.6/node[amenity=%s][bbox=%s]\" % \\\n (self._amenity, self._location.getBox())\n\n self._has_list = False\n self._places = None\n self._osm_hand.clear_places()\n \n try:\n self._net_if.download(url)\n except Exception as inst:\n self.send_error(inst)",
"def download():\n raise NotImplementedError",
"def __init__(self):\n\t\tself.label = \"Endpoint Downloader\"\n\t\tself.description = \"This tool downloads geometry from queryable ArcGis Server endpoint.\"\n\t\tself.canRunInBackground = False",
"def get_imc_topo(topo_file):\n topo_graph = nx.Graph()\n with open(topo_file, 'r') as f:\n for line in f.readlines():\n if (len(line) > 10) and (line[0] != '#'):\n split_data = line.split()\n source = split_data[0]\n dest = split_data[2]\n #capacity = 1000 # We are fixing this to one.\n capacity = get_imc_capacity(split_data[1], split_data[3])\n if not topo_graph.has_edge(source, dest):\n topo_graph.add_edge(source, dest, capacity = capacity)\n # Checks graph for any componnets and returns the largest one.\n topo_graph = validate_graph(topo_graph)\n f.close()\n return topo_graph",
"def export_mlp(self):\n return self.net",
"def download_special(pxdataset, data_dir):\n # PXD004074 (Tsr1) --------------------------------------------------------\n if pxdataset.pxid == \"PXD004074\":\n tsr1_filename = \"Rappsilber_Cook_CLMS_Tsr1_fasta.zip\"\n tsr1_zip = os.path.join(data_dir, tsr1_filename)\n pxdataset.pxget(tsr1_filename, data_dir)\n\n with zipfile.ZipFile(tsr1_zip, \"r\") as fname:\n fname.extractall(data_dir)\n\n # PXD010222 (PPARg_LBD) ---------------------------------------------------\n if pxdataset.pxid == \"PXD010222\":\n ppar_seq = [\n \">wef|PV4545|PPARg-LBD_human GST-tagged PPARgamma LBD\",\n \"MAPILGYWKIKGLVQPTRLLLEYLEEKYEEHLYERDEGDKWRNKKFELGLEFPNLPYYIDGD\",\n \"VKLTQSMAIIRYIADKHNMLGGCPKERAEISMLEGAVDIRYGVSRIAYSKDFETLKVDFLSK\",\n \"LPEMLKMFEDRLCHKTYLNGDHVTHPDFMLYDALDVVLYMDPMCLDAFPKLVCFKKRIEAIP\",\n \"QIDKYLKSSKYIALWPLQGWQATFGGGDHPPKSDLVPRHNQTSLYKKAGTMQLNPESADLRA\",\n \"LAKHLYDSYIKSFPLTKAKARAILTGKTTDKSPFVIYDMNSLMMGEDKIKFKHITPLQEQSK\",\n \"EVAIRIFQGCQFRSVEAVQEITEYAKSIPGFVNLDLNDQVTLLKYGVHEIIYTMLASLMNKD\",\n \"GVLISEGQGFMTREFLKSLRKPFGDFMEPKFEFAVKFNALELDDSDLAIFIAVIILSGDRPG\",\n \"LLNVKPIEDIQDNLLQALELQLKLNHPESSQLFAKLLQKMTDLRQIVTEHVQLLQVIKKTET\",\n \"DMSLHPLLQEIYKDL\"\n ]\n\n ppar_path = os.path.join(data_dir, \"pparg.fasta\")\n with open(ppar_path, \"w\") as fasta:\n fasta.writelines([l + \"\\n\" for l in ppar_seq])",
"def _download_to_flc(self):\n self.communicator.download_to_flc()",
"def download(data_type, gs_aoi, main_dir, local_rep=True):\n # Get URLs for tiles covered by a polygon:\n tiles = get_tile_names(gs_aoi, data_type)\n print(f'Found {len(tiles)} products')\n\n # Make sure temporary folder for download exists:\n dwn_dir = join(main_dir, data_type)\n if not exists(dwn_dir):\n makedirs(dwn_dir)\n\n if local_rep:\n # Copy DTM files from local repository:\n print('\\nCopying DTM files:')\n for num, name in enumerate(tiles):\n print('{} of {}'.format(num+1, len(tiles)))\n dwn_stat, _ = copy_local(name, dwn_dir)\n print('File {}.'.format(dwn_stat))\n out_msg = 'Finished copying DTM files!'\n else:\n # Download DTM files:\n print(f\"\\nDownloading {data_type} files:\")\n for num, name in enumerate(tiles):\n print('{} of {}'.format(num+1, len(tiles)))\n dwn_stat, _ = download_file(name, dwn_dir)\n print('File {}.'.format(dwn_stat))\n if data_type == \"DTM\":\n # Convert to Geotiff\n print(\"Converting to GeoTIFF...\")\n result = asc_to_gtif(dwn_dir)\n print(result)\n out_msg = \"Finished downloading DTM files!\"\n\n # Output dictionary:\n out = {'out_msg': out_msg,\n 'out_dir': dwn_dir}\n\n return out",
"def testGraphExtract(self):\n graph = Graph2()\n graph.parseFile(TESTFILE)",
"def test_download_terrascope():\n\n s1_belgium.download(\"sigma0_cube_terrascope.nc\",format=\"NetCDF\")",
"def save_to_geojson(self, topology_map, filename):",
"def fetch(self) -> None:\n workflow_spec_path = os.path.join(self._output_dir, self._spec)\n self._download_file(self._parsed_url.original_url, workflow_spec_path)",
"def main(url, localfile):\n ph.download_file(url, localfile)",
"def download(self):\n\n with open(self.dataset_path) as dataset_file:\n dataset = json.load(dataset_file)\n\n path = \"\".join([POST_HIT_PATH, dataset[\"dataset\"][\"data_path\"]])\n if not os.path.exists(path):\n os.makedirs(path)\n\n protocole = dataset[\"dataset\"][\"protocole\"]\n\n download_links = []\n\n for resource in dataset[\"dataset\"][\"resources\"]:\n file_path = \"\".join([path, resource[\"filename\"]])\n\n #Check if the the download link has not been used before (One download link for all)\n if resource[\"download_link\"] not in download_links:\n \n print(\"DOWNLOADING : {}\".format(resource[\"filename\"]))\n f = urllib.request.urlopen(resource[\"download_link\"])\n data = f.read()\n with open(file_path, \"wb\") as donwload_file:\n donwload_file.write(data)\n\n download_links.append(resource[\"download_link\"])\n\n \n #Extract all files from the tar archives if necessary\n if tarfile.is_tarfile(file_path):\n tf = tarfile.open(file_path)\n tf.exractall()",
"def build_topo(topo_file, display_graph = False):\n topo_graph = read_topo( topo_file )\n # mininet topo\n topo = NetworkXTopo( )\n topo.build_network( topo_graph, HOSTS_PER_SWITCH )\n hosts = topo.hosts( )\n # Debug \n print \"Total number of Vertices:\", len(topo.switches())\n print \"Total number of Edges(including edges to hosts):\", len(topo.links())\n #for host in hosts:\n # print host\n #for link in net.links():\n # print link\n if display_graph:\n draw_graph(topo_graph)\n return topo",
"def download(fnames):\n download_path = Path('./models')\n if not download_path.exists() or not download_path.is_dir():\n print('The directory \\'models\\' does not exist!')\n print('Please ensure you are in the top level of the visual-attention-networks repository')\n print(' and that the \\'models\\' directory exists')\n sys.exit()\n\n server_url = 'https://github.com/davidmascharka/tbd-nets/releases/download/v1.0/'\n if isinstance(fnames, str): # a single file\n fnames = [fnames]\n for fname in fnames:\n if (download_path / fname).exists():\n print('Skipping {}: the file already exists'.format(fname))\n continue\n\n print('Downloading {}'.format(fname))\n urlretrieve(server_url + fname, str((download_path/fname).absolute()), _download_info)\n print('Finished')",
"def download_pojo(self, path=\"\", get_genmodel_jar=False, genmodel_name=\"\"):\n return h2o.download_pojo(self.leader, path, get_jar=get_genmodel_jar, jar_name=genmodel_name)",
"def download_structure(self):\n pdbl = PDBList()\n pdbl.retrieve_pdb_file(self.struct_name, pdir=self.struct_dir)",
"def route_information(th_object, topology_info, file_name, node1, node2, path):\n save_path = path + node1 + \"_\" + node2 + \"_vs_t2.csv\"\n route_data = th_object.get_node_len_etx(topology_info, node1, node2)\n with open(save_path, \"w+\") as f_name:\n f_name.write(\"Time,No_hopes,Cost\\n\")\n cc = 0\n for k in file_name:\n f_name.write(str(k)[11:-7] + \",\" + str(route_data[cc]['hopes_count']) + \",\" + str(route_data[cc]['cost']) +\n \"\\n\")\n cc += 1\n print(node1 + \" \" + node2 + \" route information exported\")",
"def download_file(path, filename, destination):\n import os\n command = \"wget -q -O \"+destination+\"/\"+filename+\" ftp://nomads.ncdc.noaa.gov/\"+path+\"/\"+filename\n os.system(command)",
"def download_files(self):",
"def download_model(name: str) -> str:\n model_name, model_type, model_url = ModelInfo.get_model_info(name)\n model_path = _create_dirs(model_name)\n if model_type == \"single\":\n model_path = _download_file(model_url, model_path)\n elif model_type == \"zip\":\n model_path = _download_zip_model(model_url, model_path)\n else:\n print(f\"model type {model_type} not yet implemented\")\n model_path = \"\"\n return model_path",
"def download():\r\n\tglobal server\r\n\tfolder = str(get_folder())\r\n\tfilename = str(get_filename())\r\n\tprint(folder)\r\n\r\n\t# on crée le chemin absolue\r\n\tpath = \"partage/\"+folder +\"/\"+filename\r\n\tprint(path)\r\n\tdata = [\"download\", path]\r\n\t#on dump la liste\r\n\tdata = pickle.dumps(data)\r\n\tserver.send(data)\r\n\r\n\tcontent = server.recv(2**30) # Reception du conenue du fichier\r\n\r\n\tif content == b\"unknown\":\r\n\t\tprint(\"Fichier inéxistant\")\r\n\telse:\r\n\t\twith open(filename, \"wb\") as f: # Creation d'un fichier similaire\r\n\t\t\tprint(content)\r\n\t\t\tf.write(content)\r\n\t\t\tf.close()",
"def download_phys(out_dataset_folder, N_first=None):\n return download_dataset(\n out_dataset_folder=out_dataset_folder,\n server_url=_SERVER_URL,\n checksums_path=_CHECKSUM_FILE,\n paths_func=phys_paths_func,\n N_first=N_first*3 if N_first else None # Three items per subject\n )",
"def read(self, url: str):\n\n log.info(f\"Downloading KMZ file {basename(url)}\")\n kml = self.fetch(url)\n\n log.info(\"Parsing KML data\")\n self.iter_elems = iterparse(BytesIO(kml), events=(\"start\", \"end\"), resolve_entities=False)\n\n prod_items = {\n \"issuer\": \"Issuer\",\n \"product_id\": \"ProductID\",\n \"generating_process\": \"GeneratingProcess\",\n \"issue_time\": \"IssueTime\",\n }\n\n nsmap = None\n\n # Get Basic Metadata\n prod_definition = None\n prod_definition_tag = None\n for event, element in self.iter_elems:\n if event == \"start\":\n # get namespaces from root element\n if nsmap is None:\n nsmap = element.nsmap\n prod_definition_tag = f\"{{{nsmap['dwd']}}}ProductDefinition\"\n elif event == \"end\":\n if element.tag == prod_definition_tag:\n prod_definition = element\n # stop processing after head\n # leave forecast data for iteration\n break\n\n self.metadata = {k: prod_definition.find(f\"{{{nsmap['dwd']}}}{v}\").text for k, v in prod_items.items()}\n self.metadata[\"issue_time\"] = dt.datetime.fromisoformat(self.metadata[\"issue_time\"])\n\n # Get time steps.\n timesteps = prod_definition.findall(\n \"dwd:ForecastTimeSteps\",\n nsmap,\n )[0]\n self.timesteps = [dt.datetime.fromisoformat(i.text) for i in timesteps.getchildren()]\n\n # save namespace map for later iteration\n self.nsmap = nsmap",
"def download_tile(map_layer, zoom, x, y):\n try:\n tile_url = map_layer.get_tile_url(zoom, x, y)\n tmp_file, headers = urllib.request.urlretrieve(tile_url)\n return (x, y), tmp_file\n except URLError as e:\n app.logger.info(\"Error downloading tile x={}, y={}, z={} for layer {}: {}\".format(\n x, y, zoom, map_layer, e.reason))\n return (x, y), pkg_resources.resource_filename(\"geos\", \"static/empty_tile.png\")",
"def download_latex(self):\n try:\n # $ Set the Arxiv Object to ensure Proper extraction\n identity,paper = self.extract_meta_from_remote(self.paper_id)\n self.identity = identity\n\n if not dir_exists(self.paper_root_path):\n os.makedirs(self.paper_root_path)\n # $ Download the paper. \n downloaded_data = arxiv.download(paper,dirpath=self.paper_root_path,slugify=lambda paper: paper.get('id').split('/')[-1],prefer_source_tarfile=True)\n return downloaded_data\n except Exception as e:\n raise ArxivAPIException(self.paper_id,str(e))",
"def get_topology(odl_url, odl_usr, odl_pass):\n if odl_url.endswith('/'):\n odl_url = odl_url[:-1]\n topology_url = odl_url + '/network-topology:network-topology/'\n topology_json = call_odl_api(odl_usr, odl_pass, topology_url)\n return topology_json",
"def download(dbname, dts, bbox=None):\n log = logging.getLogger(__name__)\n res = 0.36\n url = \"n5eil01u.ecs.nsidc.org\"\n ftp = FTP(url)\n ftp.login()\n for dt in [dts[0] + timedelta(tt) for tt in range((dts[-1] - dts[0]).days + 1)]:\n try:\n r = ftp.cwd(\"/pub/SAN/SMAP/SPL3SMP.003/{0}\".format(dt.strftime(\"%Y.%m.%d\")))\n if r.find(\"successful\") > 0:\n outpath = tempfile.mkdtemp()\n fname = [f for f in ftp.nlst() if f.find(\"h5\") > 0][0]\n with open(\"{0}/{1}\".format(outpath, fname), 'wb') as f:\n ftp.retrbinary(\"RETR {0}\".format(fname), f.write)\n f = h5py.File(\"{0}/{1}\".format(outpath, fname))\n lat = f['Soil_Moisture_Retrieval_Data']['latitude'][:, 0]\n lon = f['Soil_Moisture_Retrieval_Data']['longitude'][0, :]\n lon[lon > 180] -= 360.0\n # FIXME: Need to add reprojection from EASE grid\n i1, i2, j1, j2 = datasets.spatialSubset(np.sort(lat)[::-1], np.sort(lon), res, bbox)\n lati = np.argsort(lat)[::-1][i1:i2]\n loni = np.argsort(lon)[j1:j2]\n sm = np.zeros((len(lati), len(loni)))\n for i in range(len(lati)):\n for j in range(len(loni)):\n sm[i, j] = f['Soil_Moisture_Retrieval_Data']['soil_moisture'][i, j]\n # FIXME: Use spatially variable observation error\n # sme = f['Soil_Moisture_Retrieval_Data']['soil_moisture_error'][i1:i2, j1:j2]\n lat = np.sort(lat)[::-1][i1:i2]\n lon = np.sort(lon)[j1:j2]\n filename = dbio.writeGeotif(lat, lon, res, sm)\n dbio.ingest(dbname, filename, dt, table, False)\n else:\n log.warning(\"No SMAP data available for {0}.\".format(dt.strftime(\"%Y-%m-%d\")))\n except:\n log.warning(\"No SMAP data available for {0}.\".format(dt.strftime(\"%Y-%m-%d\")))",
"def main(pst_file):\n opst = pypff.open(pst_file)\n root = opst.get_root_folder()\n\n message_data = folder_traverse(root, [], **{'pst_name': pst_file, 'folder_name': 'root'})\n\n header = ['pst_name', 'folder_name', 'creation_time', 'submit_time', 'delivery_time',\n 'sender', 'subject', 'attachment_count']\n\n return message_data, header",
"def read_szf_fmv_13(eps_file):\n data = {}\n metadata = {}\n\n n_lines = eps_file.mdr_counter\n n_node_per_line = eps_file.mdr[\"LONGITUDE_FULL\"].shape[1]\n idx_nodes = np.arange(n_lines).repeat(n_node_per_line)\n\n # extract metadata\n metadata[\"spacecraft_id\"] = np.int8(eps_file.mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(eps_file.mphr[\"ORBIT_START\"])\n metadata[\"state_vector_time\"] = datetime.strptime(\n eps_file.mphr[\"STATE_VECTOR_TIME\"][:-4], \"%Y%m%d%H%M%S\")\n\n fields = [\n \"processor_major_version\", \"processor_minor_version\",\n \"format_major_version\", \"format_minor_version\"\n ]\n for f in fields:\n metadata[f] = np.int16(eps_file.mphr[f.upper()])\n\n # extract time\n dt = np.datetime64(\n \"2000-01-01\") + eps_file.mdr[\"UTC_LOCALISATION\"][\"day\"].astype(\n \"timedelta64[D]\"\n ) + eps_file.mdr[\"UTC_LOCALISATION\"][\"time\"].astype(\"timedelta64[ms]\")\n data[\"time\"] = dt[idx_nodes]\n\n fields = [\n \"degraded_inst_mdr\", \"degraded_proc_mdr\", \"sat_track_azi\",\n \"beam_number\", \"flagfield_rf1\", \"flagfield_rf2\", \"flagfield_pl\",\n \"flagfield_gen1\"\n ]\n\n fields = [\n \"degraded_inst_mdr\", \"degraded_proc_mdr\", \"sat_track_azi\",\n \"beam_number\"\n ]\n\n # 101 min = 6082 seconds\n # state_vector_time = ascending node crossing time - 1520.5,\n # time crossing at -90 lat\n orbit_start_time = metadata[\"state_vector_time\"] - timedelta(\n seconds=1520.5)\n orbit_end_time = orbit_start_time + timedelta(seconds=6082)\n\n data[\"orbit_nr\"] = np.ma.zeros(\n data[\"time\"].size, dtype=np.int32,\n fill_value=int32_nan) + metadata[\"orbit_start\"]\n data[\"orbit_nr\"][data[\"time\"] > orbit_end_time] += 1\n\n metadata[\"orbits\"] = {}\n for orbit_nr in np.unique(data[\"orbit_nr\"]):\n if orbit_nr == metadata[\"orbit_start\"]:\n metadata[\"orbits\"][orbit_nr] = (orbit_start_time, orbit_end_time)\n else:\n metadata[\"orbits\"][orbit_nr] = (orbit_end_time, orbit_end_time +\n timedelta(seconds=6082))\n\n # extract data\n for f in fields:\n if eps_file.mdr_sfactor[f.upper()] == 1:\n data[f] = eps_file.mdr[f.upper()].flatten()[idx_nodes]\n else:\n data[f] = (eps_file.mdr[f.upper()].flatten() * 1. /\n eps_file.mdr_sfactor[f.upper()])[idx_nodes]\n\n data[\"swath_indicator\"] = (data[\"beam_number\"].flatten() > 3).astype(\n np.uint8)\n data[\"as_des_pass\"] = (data[\"sat_track_azi\"] < 270).astype(np.uint8)\n\n fields = [(\"longitude_full\", long_nan), (\"latitude_full\", long_nan),\n (\"sigma0_full\", long_nan), (\"inc_angle_full\", uint_nan),\n (\"azi_angle_full\", int_nan), (\"flagfield\", int_nan)]\n\n for f, nan_val in fields:\n data[f] = eps_file.mdr[f.upper()].flatten()\n invalid = eps_file.mdr[f.upper()].flatten() == nan_val\n\n if eps_file.mdr_sfactor[f.upper()] != 1:\n data[f] = data[f] * 1. / eps_file.mdr_sfactor[f.upper()]\n\n data[f][invalid] = nan_val\n\n # modify longitudes from (0, 360) to (-180, 180)\n mask = np.logical_and(data[\"longitude_full\"] != long_nan,\n data[\"longitude_full\"] > 180)\n data[\"longitude_full\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n idx = (data[\"azi_angle_full\"] != int_nan) & (data[\"azi_angle_full\"] < 0)\n data[\"azi_angle_full\"][idx] += 360\n\n # set flags\n data[\"f_usable\"] = set_flags_fmv13(data[\"flagfield\"])\n\n return data, metadata",
"def generate_graphml_output(self, path):\n self.restructure_edge_info()\n self.restructure_node_info()\n return nx.write_graphml(self.G, path)",
"def download_glove ():\n # Get the URL ...\n print(\"Downloading https://nlp.stanford.edu/data/glove.6B.zip ...\")\n res = requests.get(\"https://nlp.stanford.edu/data/glove.6B.zip\", stream=True)\n if res.status_code != 200:\n print(\"Could not download the 6B GloVe Dataset! The server responded with code \" + res.status_code + \".\")\n sys.exit(1)\n\n # ... and write it to file\n fp = open(\"data/glove.6B.zip\", \"wb\")\n total_length = int(res.headers.get('content-length'))\n # Thanks again to the internet for this beautiful piece of code <3\n for chunk in tqdm.tqdm(res.iter_content(chunk_size=1024), unit=\"KB\", total=ceil(total_length/1024) + 1):\n if chunk:\n fp.write(chunk)\n fp.flush()\n fp.close()\n print(\"ZIP-file downloaded! Extracting ...\")\n with ZipFile(\"data/glove.6B.zip\", \"r\") as zf:\n files = zf.namelist()\n print(\"Members in archive:\")\n print(\"\\n\".join(files))\n\n for file in files:\n if file.endswith(\"glove.6B.300d.txt\"):\n print(\"Extracting member \" + file + \" from archive ...\")\n zf.extract(file)\n break\n \n # Remove the zip file again\n os.remove(\"data/glove.6B.zip\")\n print(\"Successfully extracted GloVe embeddings (300 dimensions) to data directory.\")\n print(\"You can now train the classifier using the GloVe embeddings.\")",
"def tile(self, z, x, y_tms):\n logger.debug(_(\"Download tile %s\") % ((z, x, y_tms),))\n # Render each keyword in URL ({s}, {x}, {y}, {z}, {size} ... )\n size = self.tilesize\n s = self.tiles_subdomains[(x + y_tms) % len(self.tiles_subdomains)];\n y_osm = (2**int(z) - 1) - int(y_tms)\n try:\n url = self.tiles_url.format(**locals())\n except KeyError, e:\n raise DownloadError(_(\"Unknown keyword %s in URL\") % e)\n logger.debug(_(\"Retrieve tile at %s\") % url)\n r = DOWNLOAD_RETRIES\n sleeptime = 1\n while r > 0:\n try:\n request = urllib2.Request(url)\n for header, value in self.headers.items():\n request.add_header(header, value)\n stream = urllib2.urlopen(request)\n assert stream.getcode() == 200\n return stream.read()\n except (AssertionError, IOError), e:\n logger.debug(_(\"Download error, retry (%s left). (%s)\") % (r, e))\n r -= 1\n time.sleep(sleeptime)\n # progressivly sleep longer to wait for this tile\n if (sleeptime <= 10) and (r % 2 == 0):\n sleeptime += 1 # increase wait\n raise DownloadError(_(\"Cannot download URL %s\") % url)",
"def download_model(source, target, filename):\n if not os.path.exists(target):\n os.mkdir(target) \n target_file = str(Path(target).joinpath(filename))\n if os.path.exists(target_file):\n print('model already exists, skipping download')\n return\n print(\"Downloading from {} to {}\".format(source, target))\n wget.download(source, target_file) \n print(\"\\nDone!\")",
"def download_models_and_data():\n\n for file in DATA_FILES:\n download_file(file[\"url\"], file[\"path\"])",
"def _download_data(self):\n logger.info('Downloading ChemIDplus data...')\n outfile_path = self._src_data_dir / self._src_fname\n\n self._ftp_download(self._src_server,\n self._src_dir_path,\n self._src_data_dir,\n self._src_fname)\n\n parser = ET.iterparse(outfile_path, ('start', 'end'))\n date = next(parser)[1].attrib['date']\n version = date.replace('-', '')\n outfile_path.rename(self._src_data_dir / f'chemidplus_{version}.xml')\n logger.info('Finished downloading ChemIDplus data')",
"def save_tile(x,y,z,fpath):\n UA = \"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0) Gecko/20100101 Firefox/77.0\"\n tile_url = f\"https://{random.choice('abc')}.tile.openstreetmap.org/{z}/{x}/{y}.png\"\n # cmd = f\"wget --user-agent='please download' -O {fpath} {url}\"\n if os.path.exists(fpath):\n print(f\"Already have tile {fpath}!\")\n return 0\n if os.path.isdir(fpath):\n raise ValueError(f\"requested path {fpath} exists and is a directory!\")\n try:\n res = rq.get(\n url=tile_url,\n headers={'User-Agent': UA}\n )\n status = res.status_code\n if status == 200:\n with open(fpath,'wb') as of:\n of.write(res.content)\n return 0\n else:\n print(f\"Error: response {status} from server:\\n{res.reason}\")\n return status\n except Exception as e:\n print(f\"Error getting tile: {e}\")\n return 1",
"def _download(self, path):\n self.logger.info('Getting Million Song Dataset...')\n self.logger.info('Downloading Echo Nest Taste Subprofile train data...')\n base_url = 'http://millionsongdataset.com/sites/default/files/challenge/'\n\n download_dataset(\n base_url + 'train_triplets.txt.zip',\n join(self.data_folder, 'train.zip')\n )\n rename(join(self.data_folder, 'train'), path)\n\n self.logger.info('Downloading evaluation data for MSD Challenge...')\n download_dataset(\n base_url + 'EvalDataYear1MSDWebsite.zip',\n join(path, 'eval.zip')\n )\n rename(\n join(path, 'EvalDataYear1MSDWebsite'),\n join(path, 'evaluation')\n )\n\n self.logger.info('Downloading list of matching errors...')\n url = 'http://millionsongdataset.com/sites/default/files/tasteprofile/sid_mismatches.txt'\n download_url(url, join(path, 'sid_mismatches.txt'))",
"def retrieveGEOFiles(geonum, directory):\n samplelist = []\n\n ##download data https://geoparse.readthedocs.io/en/latest/GEOparse.html\n print(\"###############STARTING DOWNLOAD################ \\n\\n\\n\")\n print(geonum + '\\n\\n')\n gse = GEOparse.get_GEO(geo=geonum, destdir=directory)\n\n for gsm_name, gsm in gse.gsms.items():\n\n samplelist.append(gsm_name)\n\n filename = directory + \"/\" + gsm_name + \".txt\"\n o = open(filename, \"w\")\n o.write(\"Name: \" + gsm_name)\n o.write(\"\\nMetadata:\")\n for key, value in gsm.metadata.items():\n o.write(\"\\n - %s : %s\" % (key, \", \".join(value)))\n if key == 'supplementary_file':\n for item in value:\n wget.download(item, directory)\n o.close()\n\n for gpl_name, gpl in gse.gpls.items():\n filename = directory + \"/\" + gpl_name + \".platform\"\n o = open(filename, \"w\")\n o.write(\"Name: \" + gpl_name)\n o.write(\"\\nMetadata:\")\n for key, value in gpl.metadata.items():\n o.write(\"\\n - %s : %s\" % (key, \", \".join(value)))\n o.close()\n\n print(\" ################### FINISHED DOWNLOAD ###################### \\n\\n\")\n\n return samplelist",
"def submit_download_mock(_self, _fetch_and_save, filename, dest_folder):\n # If filename == foo/bar/x_y_z_attr.dat, content == \"x_y_z_attr\"\n content = os.path.splitext(os.path.basename(filename))[0]\n if content.split(\"_\")[-1] == \"full\":\n content = {\"molecule\": content}\n qml.data.Dataset._write_file(content, os.path.join(dest_folder, filename))",
"def download(self,fn):\n\t\treturn False #TODO: implement meme download",
"def download_mets(\n api_url, package_uuid, relative_path_to_mets, timestamp, package_list_no\n):\n\n # Request the METS file.\n mets_response = requests.get(\n get_mets_url(api_url, package_uuid, relative_path_to_mets)\n )\n\n # Create a directory to download the METS to.\n numbered_subdir = create_numbered_subdirs(timestamp, package_list_no)\n\n # Output METS to a convenient location to later be parsed.\n download_file = write_mets(mets_response, package_uuid, numbered_subdir)\n\n return download_file",
"def download() -> Path:\n rts_downloader.download()\n rts_gmlc_dir = Path(rts_downloader.rts_download_path) / \"RTS-GMLC\"\n return rts_gmlc_dir",
"def download_wind_poly():\n pyroute_path = \"/home/thomas/Documents/pyroute/\"\n path = pyroute_path + \"analysis/poly_data\"\n download_wind(path, -10, -150.0, -18.0, -135.0)",
"def topo2kml(topo_file_name, topo_type, color='00FF00'):\n\n import os\n from clawpack.geoclaw import topotools\n topo = topotools.Topography(topo_file_name, topo_type=topo_type)\n topo.read_header()\n xy = topo.extent\n name = os.path.splitext(os.path.split(topo_file_name)[-1])[0]\n file_name = '%s.kml' % name\n box2kml(xy, file_name, name, color)",
"def read_graph():\n return nx.read_edgelist('edges.txt.gz', delimiter='\\t')",
"def downloadMinio(url_list,list_d):\n print(\"You are downloading {} images\".format(parser_arguments().limit),end=\" \");print(\"of {} class.\".format(parser_arguments().classes))\n print(\"Please, be patient :)\")\n name = \"-\".join(parser_arguments().classes)\n name = name.lower()\n for i in range(len(url_list)):\n filename= url_list[i].split(\"/\")[-1] # name of the picture file\n r = requests.get(url_list[i], stream =True)\n\n\n if r.status_code == 200:\n r.raw.decode_content = True\n\n with open(filename,'wb') as f : # create the file locally in binary-write mode\n metadata = list_d[i]\n r = requests.get(url_list[i], stream =True)\n shutil.copyfileobj(r.raw, f) #write our image to the file\n path = os.getcwd()+'/'+filename # image path\n minioClient.fput_object(name,filename,path,'image/jpg',metadata)\n os.remove(filename)\n print(filename,'have been successfuly uploaded')\n print('Done!')",
"def download(self):\n file_url = posixpath.join(self.mirrors, self.resources)\n _urlretrieve(file_url, os.path.join(self.root, self.resources))",
"def download(self):\n file_url = posixpath.join(self.mirrors, self.resources)\n _urlretrieve(file_url, os.path.join(self.root, self.resources))",
"def _download_metadata():\n if not os.path.isfile(L1000FWD_METADATA):\n if not os.path.exists('L1000FWD'):\n os.mkdir('L1000FWD')\n response = requests.get('https://amp.pharm.mssm.edu/l1000fwd/download/Drugs_metadata.csv', stream=True)\n if response.status_code != 200:\n raise Exception('This should not happen')\n with open(L1000FWD_METADATA, 'wb') as outfile:\n for chunk in response.iter_content(chunk_size=1024):\n outfile.write(chunk)",
"def __extractChemDrawRemote(self, urlOrData, outputDir, name):\n baseUrl = self.__remoteServiceUrl\n postData = [('pathext', '.cml'), ('mode', 'extract')]\n if self.__isUrl(urlOrData):\n postData.append(('url', urlOrData))\n else:\n postData.append(('file', (name, urlOrData)))\n zipData = self.iceContext.Http().post(baseUrl, postData)\n if outputDir is not None:\n tmpFs = self.iceContext.fs.createTempDirectory()\n tmpFs.writeFile(\"media.zip\", zipData)\n tmpFs.unzipToDirectory(\"media.zip\", outputDir)\n tmpFs.delete()\n return zipData",
"def download_chicago_graph():\n\n\tG = ox.graph_from_place(\"Chicago,IL, United States\", network_type='drive')\n\treturn G",
"def export_representations(self):\n\n dbpath, config = self._start()\n self.logger.msg1(\"Loading ontology\")\n obo_path = check_file(config.obo, dbpath, \"obo\")\n self.obo = MinimalObo(obo_path, True)\n self._export_reference_representations()\n self._export_model_representations(config)\n self._end()",
"def plot_mpr_topology(options, tags=None, cursor=None):\n options['cur_src'] = 'topo'\n options['prefix'] = \"mpr\"\n ################################################################################\n locs = options['locs']\n colors = options['color2'](pylab.linspace(0, 1, 101))\n ################################################################################\n circ_max = 5\n line_max = 10\n floor_factor = 2\n floor_skew = -0.25\n line_min = 1\n\n hosts = get_hosts(options)\n mprs = cursor.execute('''\n SELECT DISTINCT(host)\n FROM nhdp_mpr_selectors\n ''').fetchall()\n\n for q, (tag_key, tag_id, nhdp_hi, nhdp_ht, mpr_minpdr) in enumerate(tags):\n logging.info('tag_id=\\\"%s\\\" (%d/%d)', tag_id, q+1, len(tags))\n min_max_time = cursor.execute('''\n SELECT min(time), max(time)\n FROM nhdp_he\n WHERE tag=?\n ''',(tag_key,)).fetchone()\n fig2d = MyFig(options, rect=[0.1, 0.1, 0.8, 0.7], xlabel='x Coordinate', ylabel='y Coordinate')\n fig3d = MyFig(options, rect=[0.1, 0.1, 0.8, 0.7], xlabel='x Coordinate', ylabel='y Coordinate', ThreeD=True)\n if not q:\n fig3d_onlynodes = MyFig(options, xlabel='x Coordinate [m]', ylabel='y Coordinate [$m$]', ThreeD=True)\n\n min_x = min_y = min_z = numpy.infty\n max_x = max_y = max_z = 0\n\n # first draw the edges...\n for nr, (host) in enumerate(hosts):\n logging.info(' [%d/%d] drawing edges for host=%s', nr+1, len(hosts), host)\n try:\n host_xpos, host_ypos, host_zpos = locs[host]\n except KeyError:\n logging.warning('no position found for node %s', host)\n continue\n ################################################################################\n # host is the receiving router, i.e. in our case the MPR\n # src is the sending router, i.e. the MPR selector\n # We only want to draw an edge if it connects an MPR with its SELECTOR.\n #\n ################################################################################\n cursor.execute('''\n SELECT DISTINCT(src), pdr\n FROM eval_helloPDR AS pdr JOIN nhdp_mpr_selectors AS mpr\n ON pdr.host = mpr.host AND pdr.tx_if = mpr.mprselector\n WHERE pdr.tag_key=? AND pdr.host=? AND mpr.time BETWEEN ? AND ?\n ''', (tag_key, host, min_max_time[0], min_max_time[1]))\n for src, pdr in cursor.fetchall():\n try:\n src_xpos, src_ypos, src_zpos = locs[src]\n except KeyError:\n logging.warning('no position found for node %s', src)\n continue\n\n fig2d.ax.plot(\n [host_xpos+host_zpos*floor_skew*floor_factor, src_xpos+src_zpos*floor_skew*floor_factor],\n [host_ypos+host_zpos*floor_factor, src_ypos+src_zpos*floor_factor],\n linestyle='-', color=colors[pdr*100], linewidth=max(line_max*pdr, line_min), alpha=0.3)\n\n fig3d.ax.plot(\n [host_xpos, src_xpos],\n [host_ypos, src_ypos],\n [host_zpos, src_zpos],\n linestyle='-', color=colors[pdr*100], linewidth=max(line_max*pdr, line_min), alpha=0.3)\n\n # draw the nodes\n for host in hosts:\n logging.info(' [%d/%d] drawing node %s', nr+1, len(hosts), host)\n try:\n xpos, ypos, zpos = locs[host]\n except KeyError:\n logging.warning('no position found for node %s', host)\n continue\n\n max_x = max(xpos, max_x)\n max_y = max(ypos, max_y)\n min_x = min(xpos, min_x)\n min_y = min(ypos, min_y)\n max_z = max(zpos, max_z)\n min_z = max(zpos, min_z)\n if (host,) in mprs:\n fig3d.ax.plot([xpos], [ypos], [zpos], 'o', color='blue', ms=circ_max)\n fig2d.ax.plot(xpos+zpos*floor_skew*floor_factor,ypos+zpos*floor_factor,'o', color='blue', ms=circ_max)\n else:\n fig3d.ax.plot([xpos], [ypos], [zpos], 'o', color='black', ms=circ_max)\n fig2d.ax.plot(xpos+zpos*floor_skew*floor_factor,ypos+zpos*floor_factor,'o', color='black', ms=circ_max)\n if not q:\n color = 'black'\n if host.startswith('a6'):\n color = 'red'\n elif host.startswith('a3'):\n color = 'blue'\n elif host.startswith('a7'):\n color = 'orange'\n fig3d_onlynodes.ax.plot([xpos], [ypos], [zpos], 'o', color=color, ms=circ_max)\n fig2d.colorbar = fig2d.fig.add_axes([0.1, 0.875, 0.8, 0.025])\n fig3d.colorbar = fig3d.fig.add_axes([0.1, 0.875, 0.8, 0.025])\n drawBuildingContours(fig3d.ax, options)\n drawBuildingContours(fig3d_onlynodes.ax, options)\n\n alinspace = numpy.linspace(0, 1, 100)\n alinspace = numpy.vstack((alinspace, alinspace))\n for tax in [fig2d.colorbar, fig3d.colorbar]:\n tax.imshow(alinspace, aspect='auto', cmap=options['color2'])\n tax.set_xticks(pylab.linspace(0, 100, 5))\n tax.set_xticklabels(['$%.2f$' % l for l in pylab.linspace(0, 1, 5)], fontsize=0.8*options['fontsize'])\n tax.set_yticks([])\n tax.set_title('$PDR$', size=options['fontsize'])\n fig2d.ax.axis((min_x-10, max_x+10, min_y-10, max_y+10+max_z*floor_factor+10))\n fig2d.save('2d_topology_hi_%d_ht_%d_minpdr_%.2f' % (nhdp_hi, nhdp_ht, mpr_minpdr))\n fig3d.save('3d_topology_hi_%d_ht_%d_minpdr_%.2f' % (nhdp_hi, nhdp_ht, mpr_minpdr))\n if not q:\n fig3d_onlynodes.save('3d_topology_only_nodes_hi_%d_ht_%d_minpdr_%.2f' % (nhdp_hi, nhdp_ht, mpr_minpdr))",
"def _parse_pbf(self):\n # todo pyrosm bounding_box is inefficient, first extract bbox from pbf using pyosmium\n osm = pyrosm.OSM(filepath=self._pbf, bounding_box=self._bbox)\n osm.keep_node_info = True\n logging.info(\"Parsing OSM ways and nodes from pbf file\")\n ways = osm.get_network(self._profile)\n\n all_nodes = osm._nodes_gdf # todo this has all the attributes but goes out of memory easily\n way_nodes_ids = ways.nodes.explode().unique().tolist()\n nodes = all_nodes.loc[all_nodes.id.isin(way_nodes_ids)]\n\n def clip_way(way):\n clipped_way = way.copy()\n clipped_way.nodes = clipped_way.nodes[:len(way.geometry.coords.xy[0])-1]\n return clipped_way\n\n broken_ways = ways.nodes.apply(lambda x: not all(n in nodes.id.to_list() for n in x))\n for ix, way in ways.loc[broken_ways].iterrows():\n ways.loc[ix] = clip_way(way)\n\n # all_nodes = osm._node_coordinates\n # way_nodes = {id: all_nodes[id] for id in way_nodes_ids if id in all_nodes.keys()}\n # nodes = pd.DataFrame.from_dict(way_nodes, orient='index', columns=['longitude', 'latitude'])\n # nodes['id'] = nodes.index\n\n # ways_nodes_found = ways.nodes.apply(lambda x: all(n in nodes.id.to_list() for n in x))\n # nodes = pd.DataFrame(columns=['id', 'latitude', 'longitude'])\n # for ix, way in ways.iterrows():\n # way_nodes = pd.DataFrame(columns=['id', 'latitude', 'longitude'])\n # try:\n # way_nodes.id = way.nodes\n # way_nodes.latitude = way.geometry.coords.xy[1]\n # way_nodes.longitude = way.geometry.coords.xy[0]\n # except:\n # pass\n # nodes = nodes.append(way_nodes, ignore_index=True)\n\n logging.info(\"Done parsing pbf file\")\n return nodes, ways",
"def main():\n GRAPH = lambda_graph()\n GRAPH.save_graph(\"pylon\")\n meshName = \"pylon.mesh\"\n cmd = \"./population/linuxShow \"+meshName\n process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE)\n process.communicate()\n print \"nodes:\", GRAPH.number_of_nodes()\n print \"edges\", GRAPH.number_of_edges()",
"def node_route_data(th_object, topology_info, file_name):\n route_information(th_object, topology_info, file_name, \"20\", \"91\", \"extracted_data/Route_data/\")\n route_information(th_object, topology_info, file_name, \"22\", \"91\", \"extracted_data/Route_data/\")\n route_information(th_object, topology_info, file_name, \"22\", \"71\", \"extracted_data/Route_data/\")",
"def download_crossword() -> str:\n name = datetime.strftime(datetime.now(), \"%Y%m%d\")\n file_path = os.path.join(os.path.abspath(os.path.curdir), name+\".puz\")\n try:\n nyk = xword_dl.NewYorkerDownloader()\n obj = nyk.download(nyk.find_latest())\n xword_dl.save_puzzle(obj, file_path)\n return file_path\n except xword_dl.requests.ConnectionError:\n return None",
"def build_from_file(self, topology_file, topology_format):\n with open(topology_file) as infile:\n for line in infile:\n if line.startswith(\"#\"):\n continue\n else:\n if topology_format == 0:\n x = line.split(\"\\n\")[0].split(\"|\")\n as1 = int(x[0])\n as2 = int(x[1])\n relationship = int(x[2])\n else:\n x = line.split(\"\\n\")[0].split(\"\\t\")\n if x[2] == \"p2c\":\n as1 = int(x[0])\n as2 = int(x[1])\n relationship = -1\n elif x[2] == \"c2p\":\n as1 = int(x[1])\n as2 = int(x[0])\n relationship = -1\n elif x[2] == \"p2p\":\n as1 = int(x[1])\n as2 = int(x[0])\n relationship = 0\n else:\n continue\n\n if not self.has_edge(as1, as2):\n self.add_edge(as1, as2, relationship=relationship, as1=as1, as2=as2)",
"def GEEtopoPts(ptsFile,metric,buf,poly,username,folderOut, scalePix = 30):\n \n # load required libraries\n import ee\n import math\n \n # Initialize the Earth Engine object, using the authentication credentials.\n ee.Initialize()\n\n ID_field = \"geeID\"\n\n #load pts or poly file\n pts1 = ee.FeatureCollection('users/' + username + '/' + str(ptsFile))\n\n #define topo images\n srtm = ee.Image('USGS/SRTMGL1_003')\n slopeI = ee.Terrain.slope(srtm).multiply(math.pi/180)\n aspectI = ee.Terrain.aspect(srtm).multiply(math.pi/180)\n\n aspectS = aspectI.sin();\n aspectC = aspectI.cos();\n \n if buf > 0:\n bufL = [buf]\n def bufferPoly(feature):\n return feature.buffer(bufL[0])\n\n ptsB = pts1.map(bufferPoly)\n \n #reduce regions, filter out null values, remove geometry and export table\n\n if 'elev' in metric:\n table_tc_pts = srtm.reduceRegions(collection = ptsB.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix)\n task_tc = ee.batch.Export.table.toDrive(collection = table_tc_pts\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = 's_elev_topo_ptsB',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n\n if 'slope' in metric:\n table_tc_pts = slopeI.reduceRegions(collection = ptsB.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix)\n task_tc = ee.batch.Export.table.toDrive(collection = table_tc_pts\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = 's_slope_topo_ptsB',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n\n if 'aspect' in metric:\n table_AS_pts = aspectS.reduceRegions(collection = ptsB.select([ID_field]),\n reducer = ee.Reducer.sum(),\n scale = scalePix)\n table_AC_pts = aspectC.reduceRegions(collection = ptsB.select([ID_field]),\n reducer = ee.Reducer.sum(),\n scale = scalePix)\n task_AS = ee.batch.Export.table.toDrive(collection = table_AS_pts\n .filter(ee.Filter.neq('sum', None))\n .select(['.*'],None,False),\n description = 's_aspect_sin_ptsB',\n folder = folderOut,\n fileFormat = 'CSV')\n task_AC = ee.batch.Export.table.toDrive(collection = table_AC_pts\n .filter(ee.Filter.neq('sum', None))\n .select(['.*'],None,False),\n description = 's_aspect_cos_ptsB',\n folder = folderOut,\n fileFormat = 'CSV')\n task_AS.start()\n task_AC.start()\n\n #print (\"buffered pts by:\" + str(buf))\n\n elif poly > 0:\n \n #reduce regions, filter out null values, remove geometry and export table\n\n if 'elev' in metric:\n table_tc_pts = srtm.reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix)\n task_tc = ee.batch.Export.table.toDrive(collection = table_tc_pts\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = 's_elev_topo_poly1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n\n if 'slope' in metric:\n table_tc_pts = slopeI.reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix)\n task_tc = ee.batch.Export.table.toDrive(collection = table_tc_pts\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = 's_slope_topo_poly1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n\n if 'aspect' in metric:\n table_AS_pts = aspectS.reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.sum(),\n scale = scalePix)\n table_AC_pts = aspectC.reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.sum(),\n scale = scalePix)\n task_AS = ee.batch.Export.table.toDrive(collection = table_AS_pts\n .filter(ee.Filter.neq('sum', None))\n .select(['.*'],None,False),\n description = 's_aspect_sin_poly1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_AC = ee.batch.Export.table.toDrive(collection = table_AC_pts\n .filter(ee.Filter.neq('sum', None))\n .select(['.*'],None,False),\n description = 's_aspect_cos_poly1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_AS.start()\n task_AC.start()\n\n #print (\"spatial mean in poly: no buffer\")\n\n else:\n\n if 'elev' in metric:\n table_tc_pts = srtm.reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix)\n task_tc = ee.batch.Export.table.toDrive(collection = table_tc_pts\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = 's_elev_topo_pts1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n if 'slope' in metric:\n table_tc_pts = slopeI.reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix)\n task_tc = ee.batch.Export.table.toDrive(collection = table_tc_pts\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = 's_slope_topo_pts1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_tc.start()\n\n if 'aspect' in metric:\n table_A_pts = aspectI.reduceRegions(collection = pts1.select([ID_field]),\n reducer = ee.Reducer.mean(),\n scale = scalePix)\n task_A = ee.batch.Export.table.toDrive(collection = table_A_pts\n .filter(ee.Filter.neq('mean', None))\n .select(['.*'],None,False),\n description = 's_aspect_topo_pts1',\n folder = folderOut,\n fileFormat = 'CSV')\n task_A.start()\n \n #print(\"value at point: no buffer\")"
] | [
"0.5974667",
"0.59448946",
"0.5932634",
"0.57080597",
"0.54658467",
"0.5453924",
"0.5384024",
"0.5363021",
"0.5340173",
"0.52313894",
"0.5222386",
"0.51715934",
"0.51461154",
"0.51413363",
"0.5095243",
"0.50925016",
"0.5082509",
"0.5045129",
"0.5041606",
"0.50309974",
"0.5018534",
"0.5013735",
"0.4990025",
"0.49810368",
"0.4974742",
"0.4972284",
"0.4970041",
"0.49683398",
"0.49467424",
"0.49404216",
"0.4935643",
"0.49163175",
"0.49099964",
"0.49093553",
"0.49036032",
"0.49036032",
"0.49005643",
"0.4899401",
"0.489698",
"0.48947453",
"0.48942062",
"0.4893518",
"0.48846778",
"0.48812273",
"0.4878934",
"0.4876881",
"0.4868809",
"0.4862717",
"0.48579633",
"0.48497528",
"0.48493215",
"0.48345774",
"0.48153308",
"0.48132142",
"0.4808218",
"0.48039794",
"0.47981074",
"0.4794011",
"0.47864276",
"0.47776756",
"0.4774937",
"0.4773286",
"0.47682616",
"0.47662988",
"0.47655436",
"0.47655252",
"0.47479334",
"0.47468325",
"0.4741486",
"0.4734429",
"0.47344077",
"0.47296393",
"0.472946",
"0.47264993",
"0.47196463",
"0.47159895",
"0.47088683",
"0.47083598",
"0.47073075",
"0.4706626",
"0.47047058",
"0.4703633",
"0.47000104",
"0.46967316",
"0.46802232",
"0.4679604",
"0.46795395",
"0.46795228",
"0.46686888",
"0.46686888",
"0.466822",
"0.46655387",
"0.46647802",
"0.4664646",
"0.4658911",
"0.46585742",
"0.464743",
"0.46441835",
"0.46427068",
"0.464109",
"0.4634974"
] | 0.0 | -1 |
Returns a list of all topologies in the db | def get_list_of_nets(self):
return self.mfp.get_list_of_nets() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def topologies(self):\n return self._topologies",
"def select_all_topologies(conn):\n cur = conn.cursor()\n cur.execute(\"SELECT * FROM topologies_topology\")\n \n rows = cur.fetchall()\n \n for row in rows:\n print(row)",
"def __get_topologies(self):\n return etree.tostring(self.__topologies, pretty_print=True)",
"def showtopologies():\n middleware.protocolObj.showTopologies()",
"def topology(self) -> List[Topology]:\n return self._topology",
"def get_list_of_ontologies(self):\n try:\n con = self.getOntologyDatabaseConnection()\n column_values = con.cursor()\n con.cursor().callproc('get_list_of_ontologies', [column_values])\n query_results=[]\n for row in column_values:\n if row[0] is None:\n continue\n query_results.append(row)\n return query_results\n except Exception, e:\n print 'Exception caught: %s.\\nThe error is: %s' % (type(e), e)\n return False",
"def dbnodes(self) -> t.List[DBNode]:\n dbnodes = [node for node in self.entities if isinstance(node, DBNode)]\n return dbnodes",
"def nodes(topology):\n return topology.nodes()",
"def get_allowed_topologies(user):\n try:\n up = user.get_profile()\n except AttributeError:\n return db.Topology.objects.none()\n\n if user.has_perm(\"vnswww.topology_use_any\"):\n # We can view and use any templates\n topos = db.Topology.objects.filter()\n else:\n q_own = Q(owner=user)\n q_permitted = Q(allowed_users=user)\n q_org = Q(org=user.get_profile().org)\n q_public = Q(public=True)\n if user.has_perm(\"vnswww.topology_use_org\"):\n print \"Allowed all topos in own org\"\n # We can view and use any from the user's organization\n topos = db.Topology.objects.filter(q_permitted | q_org | q_own)\n else:\n print \"NOT allowed all topos in own org\"\n # We can view any from our own organization which are protected\n topos = db.Topology.objects.filter(q_permitted | q_own)\n\n return topos",
"def get_topology(self):\n topology = []\n # Retrieving waypoints to construct a detailed topology\n for segment in self._wmap.get_topology():\n x1 = segment[0].transform.location.x\n y1 = segment[0].transform.location.y\n x2 = segment[1].transform.location.x\n y2 = segment[1].transform.location.y\n seg_dict = dict()\n seg_dict['entry'] = (x1, y1)\n seg_dict['exit'] = (x2, y2)\n seg_dict['path'] = []\n wp1 = segment[0]\n wp2 = segment[1]\n seg_dict['intersection'] = True if wp1.is_intersection else False\n endloc = wp2.transform.location\n w = wp1.next(1)[0]\n while w.transform.location.distance(endloc) > 1:\n x = w.transform.location.x\n y = w.transform.location.y\n seg_dict['path'].append((x, y))\n w = w.next(1)[0]\n\n topology.append(seg_dict)\n return topology",
"def load_towns():\n if not hasattr(g, 'towns'):\n #g.towns = run_query('select id, name from municipios')\n g.towns = get_towns()\n return g.towns",
"def all_pdbs(self, species=None):\n\n return list(set(\n self.gtop_pdbs(species=species) +\n self.uniprot_pdbs(species=species)\n ))",
"def get_triples(self):\n cursor = self.db.cursor()\n cursor.execute(\"SELECT page_url, link_type, link_url FROM triples ORDER BY page_url, link_type\")\n return cursor.fetchall()",
"def list(self):\n return self.rpc.call(MsfRpcMethod.DbWorkspaces)['workspaces']",
"def get_allhosts():\n connection, tablename = HomeNetwork.get_connection_info()\n query = 'SELECT hostname from {}'.format(tablename)\n output = pandas.read_sql_query(query, connection).to_json(orient='records')\n\n for host in json.loads(output):\n yield host[\"hostname\"]",
"def all_nodes(self):\n nodes = []\n for layer in self.layers:\n nodes += layer.nodes\n return nodes",
"def get_nodes():\n with session_for_read() as session:\n res = session.query(\n model.Node\n ).order_by(\n model.Node.started_at.desc()\n )\n return [model.Node(uuid=entry.uuid, version_id=entry.version_id,\n state=entry.state, started_at=entry.started_at,\n finished_at=entry.finished_at, error=entry.error,\n manage_boot=entry.manage_boot)\n for entry in res.all()]",
"def getNodes(self):\n return self.__allNodes",
"def list_all(self):\n if self.graph is None:\n self.logger.error(\"Graph value none cannot be iterated\")\n return list()\n\n nodes = list()\n for node in self.graph.nodes():\n if node == self.NONE_PACKAGE:\n continue\n nodes.append(node)\n return nodes",
"def get_cities() -> list[str]:\n conn = sqlite3.connect('../Utils/map_storage.db')\n cursor = conn.cursor()\n ret_set = set()\n\n with conn:\n cursor.execute(\"SELECT city FROM nodes\")\n\n for element in cursor.fetchall():\n ret_set.add(element[0])\n\n return list(ret_set)",
"def createTopologicalList(self):\n sortedList = list(self.node.items())\n sortedList.sort(key=lambda item : item[1].order)\n self.topologicalList = [i[0] for i in sortedList]\n \n # Add dummy element, since topological order starts at 1.\n self.topologicalList = [utils.NO_PATH_EXISTS] + self.topologicalList",
"def get_all_metadata(self):\n return self.db.get_all_nodes()",
"def get_all_network_graphs(list_mtfs):\n list_graphs = []\n \n for mtf in list_mtfs:\n list_graphs.append(get_network_graph(mtf))\n \n return list_graphs",
"def topology(self):\n return self._topology",
"def nodes(self):\n return self._get_tree_queryset()",
"def get_all_nodes(self):\n return self._get_all_nodes()",
"def allCountriesAndTowns():\n countryWoeids = [c.woeid for c in db.Country.select()]\n townWoeids = [t.woeid for t in db.Town.select()]\n woeidList = countryWoeids + townWoeids\n\n return woeidList",
"def nodes_in_topological_order(self):\n if not self.sorted:\n self._topological_sort()\n return self._topological_order",
"def select_all_nodes(conn):\n cur = conn.cursor()\n cur.execute(\"SELECT * FROM Nodes\")\n \n rows = cur.fetchall()\n return rows",
"def external_terminologies(self):\n terms = set()\n for node_record in self.graph.run(\"MATCH (n) RETURN (n)\"):\n node = node_record[\"n\"]\n if \"links_to\" in node:\n terms.add(node[\"links_to\"])\n return terms",
"def select_topology(conn, topology_name):\n cur = conn.cursor()\n cur.execute(\"SELECT * FROM topologies_topology WHERE name=?\", (topology_name,))\n \n rows = cur.fetchall()\n \n #for row in rows:\n # print(row)\n\n return rows",
"def findAll(tx):\n query = (\n \"MATCH (n1)-[r]->(n2) \"\n \"RETURN n1 AS node1 , r AS relationship , n2 AS node2 \"\n )\n\n result = tx.run(query)\n return [(record[\"node1\"], record[\"relationship\"], record[\"node2\"]) for record in result]",
"def topologies(self, topologies):\n\n self._topologies = topologies",
"def connected_components(self) -> List[list]:\n self.__set_all_nodes_unvisited()\n res = self.__tarjan()\n # res.reverse()\n return res",
"def _topological_sort_dfs(self) -> Iterator[BaseTransaction]:\n raise NotImplementedError",
"def get_all_places(self):\n self.cursor.execute(\"select * from places\")\n self.connection.commit()\n return self.cursor.fetchall()",
"def all_routing_tree(G, tors, table_file_name):\n \n table = OrderedDict({})\n for s in G.nodes():\n table[s] = OrderedDict({})\n for s in tors:\n for d in tors:\n if s != d:\n routing(G, s, d, table)\n\n with open(table_file_name, 'w') as file:\n file.write(json.dumps(table))\n return table",
"def list_nodes(self):\n\n return list(\n dict(\n self._from_json(self.manage.run(override=\"list-nodes\"))\n ).keys()\n )",
"def get_all_db_proximity(self, context):\n zone_objs = self.dns_manager.get_all_db_proximity(context)\n return zone_objs",
"def nodes(self):\n return list(self.__graph.keys())",
"def topology(self):\n return self._h5[TOPOLOGY][()]",
"def nodes(self):\n return list(self._g.keys())",
"def nodes(self) -> List[Node]:\n return list(self.__graph_dict.keys())",
"def get_list(self):\n return self.__repository.get_all()",
"def list_nodes(self):\n return self.datanodes.keys()",
"def list_nodes(\n self,\n excluded_schemas=[\n \"_definitions\",\n \"_settings\",\n \"_terms\",\n \"program\",\n \"project\",\n \"root\",\n \"data_release\",\n \"metaschema\",\n ],\n ):\n dd = self.sub.get_dictionary_all()\n schemas = list(dd)\n nodes = [k for k in schemas if k not in excluded_schemas]\n return nodes",
"def list_orgs(self):\n orgs = list(self.orgs.keys())\n orgs.sort()\n return orgs",
"def get_triples(self):\n return [\n triple\n for uid, cuds_object in self._registry.items()\n for triple in cuds_object.get_triples()\n ]",
"def getTables(self):\n\treturn self.dbNames",
"def getConnectionList(self):\n return []",
"def stoptopology(topologyName='all'):\n if topologyName == 'all':\n middleware.protocolObj.stopAllProtocols()\n return\n\n queryData = {'from': '/',\n 'nodes': [{'node': 'topology', 'properties': ['name'], 'where': [{'property': 'name', 'regex': topologyName}]}]\n }\n queryResponse = middleware.ixn.query(data=queryData)\n try:\n topologyObj = queryResponse.json()['result'][0]['topology'][0]['href']\n except:\n print('\\nError: Verify the topologyName', topologyName)\n middleware.protocolObj.stopTopology([topologyObj])",
"async def getAll():\n return [cluster.export() for cluster in clusters.get_all()]",
"def get_schemas(self):\n result = self.sql(\"SHOW DATABASES\").execute()\n return [row[0] for row in result.fetch_all()]",
"def all():\n session = session_maker(\n app.config['MYSQL_USER'], app.config['MYSQL_PASS'], app.config['MYSQL_SERVER_PORT_3306_TCP_ADDR'],\n app.config['MYSQL_SERVER_PORT_3306_TCP_PORT'], app.config['DB'])\n\n print(\n tabulate(\n selection_list_all(session),\n headers=['number', 'sqlid', 'name', 'city', 'state']))",
"def retrieveTrees(c):\n\n all_nodes = dict()\n root_nodes = list()\n c.execute('''SELECT id, parent_id, title FROM node''')\n data_db = c.fetchall()\n \n # Initialize nodes list\n for data_line in data_db:\n db_child_id = data_line[0]\n db_parent_id = data_line[1]\n child_title = data_line[2]\n \n node = Node(db_child_id, child_title)\n all_nodes[db_child_id] = node\n if not db_parent_id:\n root_nodes.append(node)\n \n # Create relations\n for data_line in data_db:\n db_child_id = data_line[0]\n db_parent_id = data_line[1]\n if db_parent_id:\n all_nodes[db_parent_id].append(all_nodes[db_child_id])\n \n return (all_nodes, root_nodes,)",
"def get_nodes(self):\n return requests.get(self.__url + 'nodes').json()",
"def get_nodes(self) -> List[Node]:\n\t\treturn sorted(self.nodes, key=lambda x: x.name.lower())",
"def getNodes(self):\n return [ node for node in sorted(self._nodes.values()) ]",
"def osd_list(self):\n def unique_list_of_dicts(l):\n return reduce(lambda x, y: x if y in x else x + [y], l, [])\n\n tree = self.osd_tree()\n nodes = tree['nodes']\n if 'stray' in tree:\n nodes += tree['stray']\n for node in nodes:\n if u'depth' in node:\n del node[u'depth']\n nodes = unique_list_of_dicts(nodes)\n osdlists = list(unique_list_of_dicts([node for node in nodes if node['type'] == 'osd']))\n hostlists = list(unique_list_of_dicts([node for node in nodes if node['type'] == 'host']))\n # add host info in osdlist\n for osdlist in osdlists:\n for hostlist in hostlists:\n if osdlist[\"id\"] in hostlist[\"children\"]:\n osdlist[\"host\"] = hostlist[\"name\"]\n break\n return osdlists",
"def get_all() -> list:\n categorias = []\n conn = GenericDao.connect()\n cursor = conn.execute(\"SELECT * FROM categorias\")\n for row in cursor:\n categoria = Categoria(row[1], row[0])\n categorias.append(categoria)\n if debug:\n print(str(categoria))\n\n conn.close()\n return categorias",
"def get_all(self):\n return self.db",
"def list_nodes(self):\n return self.ironic_client.node.list()",
"def getNodes(self):\n return self.graph.keys()",
"def generate_graph(self):\n\t\tif self.joins == None:\n\t\t\tself.get_joins()\n\t\tprint('generating Networkx DiGraph object of {database} from query results'.format(**self.__dict__))\n\t\t# save distinct Child column values\n\t\tchilds = set([j.Child for j in self.joins])\n\t\t# save distinct Parent column values\n\t\tparents = set([j.Parent for j in self.joins])\n\t\t# save names of Leaf tables\n\t\tleafs = list(childs - parents)\n\t\tself._traverse_joins(leafs)",
"def findAllLiveRelationships(tx):\n query = (\n \"MATCH (n1:Person)-[r:LIVE]->(n2:House) \"\n \"RETURN ID(n1) , r , ID(n2);\"\n )\n results = tx.run(query).data()\n return results",
"def getHierarchies():",
"def getHierarchies():",
"def get_topology(file):\n with open(file, 'r') as stream:\n try:\n topo_dict = yaml.load(stream)\n return topo_dict\n except (yaml.YAMLError, KeyError):\n return [] # TODO: give user feedback",
"def nodes(self): \n return [n for n in self.iternodes()]",
"def vertices(self):\n top_exp = TopologyUtils.TopologyExplorer(self.topods_shape(), ignore_orientation=True)\n return map(Vertex, top_exp.vertices())",
"def list_hosts():\n db = sqlite3.connect('/home/tropius/TROPIUS/TROPIUS.db')\n res = hosts.get_all(db)\n res = {'list': res}\n return jsonify(res)",
"def build_topo(self):\n super(EBGPTopo, self).build()",
"def listAll(self):\n red = self.dbConnect()\n return red.keys()",
"def build_topo(topo_file, display_graph = False):\n topo_graph = read_topo( topo_file )\n # mininet topo\n topo = NetworkXTopo( )\n topo.build_network( topo_graph, HOSTS_PER_SWITCH )\n hosts = topo.hosts( )\n # Debug \n print \"Total number of Vertices:\", len(topo.switches())\n print \"Total number of Edges(including edges to hosts):\", len(topo.links())\n #for host in hosts:\n # print host\n #for link in net.links():\n # print link\n if display_graph:\n draw_graph(topo_graph)\n return topo",
"def get_existing_taxonomy() -> List[List[Any]]:\n output = []\n with DBWith() as dbService:\n stmt = \"SELECT id, name, vocabulary, parent_id FROM taxonomy\"\n with closing(dbService.cursor(dictionary=True)) as c:\n c.execute(stmt)\n for item in c:\n sleep(0.000001) # To avoid Mysql.Connector error\n output.append([item[\"id\"], item[\"name\"], item[\"vocabulary\"], item[\"parent_id\"]])\n return output",
"def find_topo_sort(node_list):\r\n visited = set()\r\n topo_order = []\r\n #print(node_list)\r\n for node in node_list:\r\n topo_sort_dfs(node, visited, topo_order)\r\n return topo_order",
"def get_all_typespaces(schema_obj):\n\n typespaces = []\n for vendor in schema_obj.vendor_list:\n for typespace in vendor.typespace_list:\n typespaces.append(typespace)\n return typespaces",
"def gtop_pdbs(self, species=None):\n if species is None:\n return [pdb[\"pdbCode\"] for pdb in self._get_pdb_json() if pdb[\"pdbCode\"]]\n else:\n return [pdb[\"pdbCode\"] for pdb in self._get_pdb_json()\n if pdb[\"pdbCode\"] and pdb[\"species\"].lower() == species.lower()]",
"def get_all(self):\n\n servers = self._scoped_servers()\n servers = [{u'id': x.id, u'name': x.name} for x in servers]\n return self.format_collection(servers)",
"def get_data(self):\n return self.topo_data_flattened",
"def nodes(self):\n return list(self.keys())",
"def nodes(self):\n return list(self.keys())",
"def get_nonterminals(self):\n for node in self.hosttree.traverse():\n if not node.is_leaf():\n yield node",
"def print_tables(self):\n\n conn = self.engine.connect()\n self.print_table(self.nodes, conn)\n self.print_table(self.paths, conn)\n self.view_tree(connection=conn)",
"def all_hosts(self):\n ...",
"def listClusters():\n return [c['name'] for c in pymongo.Connection().clovr.clusters.find()]",
"def get_all_stations(session: Session) -> List[Row]:\n return session.query(PlanningWeatherStation.station_code).all()",
"def get_graphs(self):\n ids = self._graphs.keys()\n ids.sort()\n return [self._graphs[id] for id in ids]",
"def list_all_tables(db):\n # Get the tables which exist in the database\n db_tables = ex_sql_and_fetch(db, \"SELECT * FROM pg_catalog.pg_tables\")\n tables = [t[1] for t in db_tables]\n # Get the master tables from the Config\n config_tables = load_config()[db]['schemas'].keys()\n\n # Check to eliminate tables which don't exist from the Config\n relevant = [t for t in tables for c in config_tables if c in t]\n return relevant",
"def list_all():\n\n return (_conn.listDefinedDomains() +\n [_conn.lookupByID(id).name() for id in _conn.listDomainsID()])",
"def graphs(self):\n return self.__graphs",
"def list_tables(database):\n config = load_config()\n tables = [x for x in config[database]['schemas']]\n\n return tables",
"def tables(self) -> list:\n return self.list_tables()",
"def nodes(self):\n return self.sort_dict(self.trajectory_data)",
"def get_all(self):\n url = self._dbname + '/_all'\n return self._connection.get(url).json()",
"def get_all_locations():\n rs = run_query('''select * from zlrz_office_location''')\n return [] if rs is None else list(map(lambda t: Location(t[1], t[2], t[3], t[4], t[5], t[0]), rs))",
"def all_dbs(self):\n return self.cloudant_client.all_dbs()",
"def get_all_stations(engine): \n # Query db\n sql = (\"SELECT DISTINCT a.station_id, \"\n \" a.station_code, \"\n \" a.station_name, \"\n \" c.station_type, \"\n \" d.latitude, \"\n \" d.longitude \"\n \"FROM nivadatabase.projects_stations a, \"\n \" nivadatabase.stations b, \"\n \" nivadatabase.station_types c, \"\n \" niva_geometry.sample_points d \"\n \"WHERE a.station_id = b.station_id \"\n \"AND b.station_type_id = c.station_type_id \"\n \"AND b.geom_ref_id = d.sample_point_id \"\n \"ORDER BY a.station_id\")\n df = pd.read_sql(sql, engine)\n\n return df",
"def connections(self, recurse = True):\n \n return NeuroObject.connections(self, recurse) + [self.root] + self.arborizations(False) + self.gapJunctions(False) + self.innervations(False) + self.synapses(False)",
"def getNodes(self):\n nodes = [{\"address\": \"http://0.0.0.0:100\"}\n ,{\"address\": \"http://0.0.0.0:200\"}\n ,{\"address\": \"http://0.0.0.0:300\"}\n ,{\"address\": \"http://0.0.0.0:400\"}\n ,{\"address\": \"http://0.0.0.0:500\"}]\n return nodes",
"def all_nodes(self, namespace=None):\n source = self._source(namespace)\n return self._list(source, 'all')"
] | [
"0.78852785",
"0.7385446",
"0.7182751",
"0.6945609",
"0.6391738",
"0.63558525",
"0.61613566",
"0.59129083",
"0.5876373",
"0.58721805",
"0.58037055",
"0.5768942",
"0.57195634",
"0.5661269",
"0.56598175",
"0.5634624",
"0.56306636",
"0.56049365",
"0.558728",
"0.5556961",
"0.5554081",
"0.5521487",
"0.5506136",
"0.5500061",
"0.5496706",
"0.5492574",
"0.54880464",
"0.5430345",
"0.5428218",
"0.5421379",
"0.5404293",
"0.53767234",
"0.53736144",
"0.53500295",
"0.53359437",
"0.53287643",
"0.5322304",
"0.531923",
"0.53189474",
"0.5317328",
"0.5313884",
"0.5311926",
"0.5305569",
"0.5303644",
"0.5294466",
"0.5293944",
"0.52915734",
"0.5289501",
"0.5278738",
"0.52704597",
"0.5270135",
"0.52689224",
"0.52657664",
"0.5265551",
"0.52622265",
"0.52556956",
"0.52507836",
"0.5248851",
"0.5245248",
"0.523589",
"0.5233588",
"0.5233566",
"0.52327865",
"0.5231272",
"0.52287114",
"0.52252096",
"0.52252096",
"0.5222351",
"0.522011",
"0.52189845",
"0.52173144",
"0.5216805",
"0.52082765",
"0.52075946",
"0.51987785",
"0.5193946",
"0.51895714",
"0.5188349",
"0.51782197",
"0.5170456",
"0.5164427",
"0.5164427",
"0.5162729",
"0.5155852",
"0.5151304",
"0.5149591",
"0.5143029",
"0.51412284",
"0.51402164",
"0.5139096",
"0.5138264",
"0.5133349",
"0.5128788",
"0.5128762",
"0.51226795",
"0.51146835",
"0.5114657",
"0.51136065",
"0.5110423",
"0.50981426",
"0.5089884"
] | 0.0 | -1 |
Returns a list of all BBS in the db | def get_list_of_bbs(self):
return self.mfp.get_list_of_bbs() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_all_bt(self):\n return list(self.collection.find({\"sensor_type\": \"bt\"}, {\"_id\": False})) # Return a list",
"def get_blists(self):\n return self.blists[:]",
"def get_bus_list():\n\n\tbuses = db.session.query(Bus.bus_name).all()\n\n \n\treturn buses",
"def list(cls, context, filters=None, limit=3000, marker=1,\n sort_key='id', sort_dir='asc'):\n db_boars = cls.dbapi.get_boar_list(\n context, limit=limit, marker=marker, sort_key=sort_key,\n sort_dir=sort_dir, filters=filters)\n\n #import pdb; pdb.set_trace()\n return [Boar._from_db_object(cls(context), obj) for obj in db_boars]",
"def __sync_bulbs__() -> list:\n\n bulbs = list()\n\n try:\n discovered_bulbs = discover_bulbs(timeout=2)\n except Exception as e:\n raise Exception(str(e))\n\n for bulb in discovered_bulbs:\n ip = bulb['ip']\n port = bulb['port']\n model = bulb['capabilities']['model']\n name = bulb['capabilities']['name']\n name = name if name != '' else ip\n identifier = bulb['capabilities']['id']\n\n found_bulb = Bulb(\n ip=ip,\n port=port,\n model=model\n )\n\n found_bulb.set_name(name)\n properties = found_bulb.get_properties()\n\n bulbs.append({\n 'bulb': found_bulb,\n 'name': name,\n 'model': model,\n 'ip': ip,\n 'metadata':\n {\n 'id': identifier,\n 'ip': ip,\n 'name': name,\n 'model': model,\n 'properties': properties\n }\n })\n\n return bulbs",
"def get_all(self):\n cursor = self._dbcon.cursor()\n cursor.execute(u\"select rowid,* from books\")\n result = cursor.fetchall()\n cursor.close()\n return [self._book_from_query_result(x) for x in result]",
"def list_dbs(self):\n return self.get('_all_dbs').json()",
"def get_all_bank_names() -> List[str]:\n\n # Open a new connection\n db, cursor = db_connector.cursor()\n\n query = \"select name from bank\"\n cursor.execute(query)\n data = cursor.fetchall()\n r_list = [x[0] for x in data]\n db.disconnect()\n return r_list",
"def get_all_borrowed_books():\n return BorrowBook.query.all()",
"def get_blocks(self):\n cmd = \"\"\" SELECT * FROM %s; \"\"\" %(TABLE_BLOCKCHAIN)\n\n self.__dbcursor.execute(cmd)\n return self.__dbcursor.fetchall()",
"def getinstancelist():\n dbcursor_dict.execute(dbq.get_all_instance_list, )\n db_instance_list = dbcursor_dict.fetchall()\n return db_instance_list",
"def demo_get_all_books(self):\n results = []\n self.cursor.execute(\"\"\"SELECT ISBN FROM book\"\"\")\n for book in self.cursor.fetchall():\n results.append(book[0])\n return results",
"def do_bay_list(cs, args):\n bays = cs.bays.list(marker=args.marker, limit=args.limit,\n sort_key=args.sort_key,\n sort_dir=args.sort_dir)\n columns = ['uuid', 'name', 'node_count', 'master_count', 'status']\n columns += utils._get_list_table_columns_and_formatters(\n args.fields, bays,\n exclude_fields=(c.lower() for c in columns))[0]\n utils.print_list(bays, columns,\n {'versions': magnum_utils.print_list_field('versions')},\n sortby_index=None)",
"def get_biases(self):\n return []",
"def vbd_list(name=None, call=None):\n if call == \"function\":\n raise SaltCloudSystemExit(\n \"This function must be called with -a, --action argument.\"\n )\n if name is None:\n return \"A name kwarg is rquired\"\n ret = {}\n data = {}\n session = _get_session()\n vms = session.xenapi.VM.get_by_name_label(name)\n if len(vms) == 1:\n vm = vms[0]\n vbds = session.xenapi.VM.get_VBDs(vm)\n if vbds is not None:\n x = 0\n for vbd in vbds:\n vbd_record = session.xenapi.VBD.get_record(vbd)\n data[\"vbd-{}\".format(x)] = vbd_record\n x += 1\n ret = data\n return ret",
"def get_all(self):\n return self.db",
"def get_list() -> List[BankDetails]:\n from paynlsdk.client.transaction import Transaction\n return Transaction.get_banks().banks",
"def get_all(user_id):\n return Bucketlist.query.filter_by(created_by=user_id)",
"def show_all_brain_dumps(user_id):\n\n # grab user in the session\n user_id = session.get(\"user_id\")\n\n # grabs all the brain dumps from the user and order them by date created\n brain_dumps = (\n User_Brain_Dump.query.filter_by(user_id=user_id)\n .order_by(desc(\"date_created\"))\n .all()\n )\n\n page, per_page, offset = get_page_args(\n page_parameter=\"page\", per_page_parameter=\"per_page\"\n )\n\n per_page = 5\n\n offset = (page - 1) * per_page\n total = len(brain_dumps)\n\n pagination_brain_dumps = brain_dumps[offset : offset + per_page]\n pagination = Pagination(\n page=page, per_page=per_page, total=total, css_framework=\"bootstrap4\"\n )\n\n return render_template(\n \"all-brain-dumps.html\",\n brain_dumps=pagination_brain_dumps,\n user_id=user_id,\n per_page=per_page,\n pagination=pagination,\n )",
"def list_databases(self) -> List[Dict]:\n self._check_connection(check_db=False)\n all_data = self.get_databases()\n all_dbs = []\n for data in all_data:\n all_dbs.append(data[\"system:resource_name\"][\"@value\"])\n return all_dbs",
"def get_vendor_bills(self, count: int = 10) -> list:\n return list(\n itertools.islice(self.client.vendor_bills.get_all_generator(), count)\n )",
"def get_bh_obj(self, dbName):\n bh_xml = self.get_batchHistorical_XML(dbName)\n return self.get_batchHistorical_obj(bh_xml)",
"def produce_query_batches(self):\n self.__generate_queries()\n return self.__bobs",
"def get_bulbs(ip=None, name=None, model=None, metadata=False) -> list:\n bulbs = list()\n\n param = 'ip'\n value = ip\n return_all = False\n\n if name:\n param = 'name'\n value = name\n elif model:\n param = 'model'\n value = model\n elif not ip:\n return_all = True\n elif ip:\n ipaddress.ip_address(str(ip))\n\n for bulb in __sync_bulbs__():\n if bulb[param] == value or return_all:\n bulbs.append(bulb['metadata'] if metadata else bulb['bulb'])\n return bulbs",
"def list_buckets():\n for bucket in BUCKET_MANAGER.all_buckets():\n print(bucket)",
"def get_biases(self):\n if self.b is None:\n return []\n else:\n return [self.b]",
"def get_biases(self):\n if self.b is None:\n return []\n else:\n return [self.b]",
"def get_biases(self):\n return list(self.b.values())",
"def get_biases(self):\n return list(self.b.values())",
"def get_biases(self):\n return list(self.b.values())",
"def get_biases(self):\n return list(self.b.values())",
"def getBolts(self):\r\n return self._bolts",
"def get_plant_batches(db_path: str) -> List[PlantBatch]:\n plant_batches: List[PlantBatch] = []\n\n conn: Connection = sqlite3.connect(path.join(db_path, 'batches.db'))\n cur: Cursor = conn.cursor()\n\n for row in cur.execute('SELECT Plant, Location, Tray, n_trays, planting_time FROM batches'):\n # print('\\n\\n')\n # for i in row:\n # print(f\"{type(i)}: {i}\")\n\n batch: PlantBatch = parse_batch_db_entry(row)\n\n plant_batches.append(batch)\n\n cur.close()\n conn.close()\n return plant_batches",
"def getBoogies(self):\n return self.boogies",
"def read(self):\n self.connect()\n get_books = f\"select * from {self.book_table}\"\n try:\n self.cur.execute(get_books)\n self.con.commit()\n for i in self.cur:\n yield i\n except MySQLError as err:\n messagebox.showinfo(\"Failed to fetch files from database\")\n print(err)",
"def get_all_casks(self):",
"def retrieve_bookkeeping_all():\n rdb = StoragePool.get_connected_storage('BayesianPostgres')\n db = rdb.session\n try:\n data = []\n for e in db.query(Ecosystem).all():\n package_count = _count(db, db.query(Package).filter(Package.ecosystem == e))\n ecosystem_name = db.query(Ecosystem).get(e.id).name\n pv_count = _count(db, db.query(Version).join(Package).filter(Package.ecosystem == e))\n entry = {\n \"name\": ecosystem_name,\n \"package_count\": package_count,\n \"package_version_count\": pv_count\n }\n data.append(entry)\n\n result = {\"summary\": data}\n\n except SQLAlchemyError as e:\n result = {\"error\": \"Error encountered while fetching data. Please check logs.\"}\n\n return result",
"def BS ( self ) :\n return self.__bs",
"def get_databases ():\n return _dbobjects[:]",
"def get_binaries(name_only=False):\n\n bins = list()\n\n dtf_db = sqlite3.connect(DTF_DB)\n cur = dtf_db.cursor()\n\n # This just returns the name\n if name_only:\n\n sql = ('SELECT name '\n 'FROM binaries ')\n\n for binary in cur.execute(sql):\n bins.append(binary[0])\n\n # This returns a list of items\n else:\n\n sql = ('SELECT name, version, '\n 'about, author '\n 'FROM binaries '\n 'ORDER BY name')\n\n cur.execute(sql)\n\n while True:\n\n item = dtf.core.item.Item()\n line = cur.fetchone()\n if line is None:\n break\n\n item.type = dtf.core.item.TYPE_BINARY\n item.name = line[0]\n item.version = line[1]\n item.about = line[2]\n item.author = line[3]\n\n bins.append(item)\n\n return bins",
"def get_available_databases() -> List[str]:\r\n\tcur = psycopg2.connect(dbname='postgres').cursor()\r\n\tcur.execute(\"SELECT datname FROM pg_database WHERE datistemplate=FALSE;\")\r\n\treturn [row[0][:-6] for row in cur if row[0].endswith('wikidb')]",
"def list_brands(self, **kwargs):\n url = self.api_url('brands')\n\n return requests.get(\n url,\n headers=self.auth_header,\n params=kwargs,\n ).json()",
"def getBooks(self, showAll=False):\n if showAll:\n sql = '''select ID, NAME from books;'''\n else:\n sql = '''\nselect books.id, books.name, books.author\nfrom books where exists (\nselect * from clippings where books.id = clippings.book);'''\n\n cur = self.__execute__(sql)\n return BookIter(cur)",
"def bills():\n os_bills = Bill()\n os_vote_events = VoteEvent()\n os_bill_sponsors = BillSponsor()\n os_legislator_votes = LegislatorVote()\n\n os_bills.query()\n os_bills.parse()\n\n wiki_functions.write_to_csv_file_for_DataTransfer(os_bills,\n os_bills.bill_table)\n wiki_functions.write_to_csv_file_for_DataTransfer(os_vote_events,\n os_bills.vote_event_table)\n wiki_functions.write_to_csv_file_for_DataTransfer(os_legislator_votes,\n os_bills.legislator_vote_table)\n wiki_functions.write_to_csv_file_for_DataTransfer(os_bill_sponsors,\n os_bills.bill_sponsor_table)",
"def get_databases(self):\n query = mssqlqueries.get_databases()\n logger.info(u'Databases query: %s', query)\n for tabular_result in self.execute_query(query):\n return [x[0] for x in tabular_result[0]]",
"def get_banks() -> List[BankDetails]:\n from paynlsdk.api.transaction.getbanks import Request\n client = APIClient()\n request = Request()\n client.perform_request(request)\n return request.response.banks",
"def getItems(self):\n for object in self.database:\n print(object)",
"def get(self):\n books = db.session.query(models.Book)\n return [book.serialize() for book in books], 200",
"def database_volume_get_list():\n db = database_get()\n\n session = db.session()\n query = session.query(model.Volume)\n\n volume_objs = list()\n for volume in query.all():\n nfvi_volume_data = json.loads(volume.nfvi_volume_data)\n nfvi_volume = nfvi.objects.v1.Volume(nfvi_volume_data['uuid'],\n nfvi_volume_data['name'],\n nfvi_volume_data['description'],\n nfvi_volume_data['avail_status'],\n nfvi_volume_data['action'],\n nfvi_volume_data['size_gb'],\n nfvi_volume_data['bootable'],\n nfvi_volume_data['encrypted'],\n nfvi_volume_data['image_uuid'])\n volume_obj = objects.Volume(nfvi_volume)\n volume_objs.append(volume_obj)\n return volume_objs",
"def get_dashboard_bags():\n conn = fetch_replica_db()\n now = datetime.datetime.now()\n bags = conn.bags.aggregate([\n {'$match': {'$or':[{'pid':{'$exists': False}}, {'pid': None}],\n 'cs.sd': {'$gte': now - datetime.timedelta(days=7)},\n 'cs.ss': {'$in': BAG_DASHBOARD_STATUSES}}},\n {'$group': {'_id': {'cn':'$cn','cs.sl':'$cs.sl'},\n 'count': {'$sum': 1}, 'bss': {'$addToSet': '$bs'}}},\n {'$sort': {'count': -1}}\n ])\n res = dict()\n for bag in bags.get('result', {}):\n if isinstance(bag, dict):\n origin = bag.get('_id').get('cs.sl')\n dest = bag.get('_id').get('cn')\n dstate = get_center_from_cache(dest).get('state')\n count = bag.get('count')\n bss = bag.get('bss')\n if not res.get(origin):\n res[origin] = list()\n res[origin].append({'dest': dest, 'dstate': dstate, 'count': count, 'bss': bss,})\n cache.set(BAG_DASHBOARD_CACHE_KEY, res, 60*20)",
"def get_all(self):\n url = self._dbname + '/_all'\n return self._connection.get(url).json()",
"def view_all_batters(self):\n conn = rs.create_connection(\"dailyfantasyscraper.db\")\n cur = conn.cursor()\n position = \"P\"\n cur.execute(\"SELECT * FROM rotowiredk where position != ?\", position)\n result = cur.fetchall()\n conn.commit()\n conn.close()\n\n for item in result:\n print(item)\n tree.insert('', 'end', values=item)",
"def get():\n LOG.debug('GET list of SBIs.')\n\n # Construct response object.\n _url = get_root_url()\n response = dict(scheduling_blocks=[],\n links=dict(home='{}'.format(_url)))\n\n # Get ordered list of SBI ID's.\n block_ids = DB.get_sched_block_instance_ids()\n\n # Loop over SBIs and add summary of each to the list of SBIs in the\n # response.\n for block in DB.get_block_details(block_ids):\n block_id = block['id']\n LOG.debug('Adding SBI %s to list', block_id)\n LOG.debug(block)\n\n block['num_processing_blocks'] = len(block['processing_block_ids'])\n\n temp = ['OK'] * 10 + ['WAITING'] * 4 + ['FAILED'] * 2\n block['status'] = choice(temp)\n try:\n del block['processing_block_ids']\n except KeyError:\n pass\n block['links'] = {\n 'detail': '{}/scheduling-block/{}' .format(_url, block_id)\n }\n response['scheduling_blocks'].append(block)\n return response, HTTPStatus.OK",
"def get(self):\n return {'bills': [bill.json() for bill in BillModel.find_all()]}",
"def list(self, instance, limit=None, marker=None):\n return self._list(\"/instances/%s/databases\" % base.getid(instance),\n \"databases\", limit, marker)",
"def _get_all_records(self) -> List[DBModelInstance]:\n return self.model.query.all()",
"def view(self):\n self.cursor.execute(\"SELECT * FROM Book\")\n rows = self.cursor.fetchall()\n return rows",
"def get(self):\n query = Boat.query()\n results = query.fetch(limit = MAX_BOATS)\n boat_dicts = []\n for match in results:\n boat_dicts.append({'id': match.id, 'name': match.name, 'type': match.type,\n 'length': match.length, 'at_sea': match.at_sea })\n self.response.headers['Content-Type'] = 'application/json'\n self.response.write(json.dumps(boat_dicts))",
"def list_brokers(MaxResults=None, NextToken=None):\n pass",
"def list_databases():\n response = houston.get(\"/history/databases\")\n houston.raise_for_status_with_json(response)\n return response.json()",
"def select_all_bloggers(conn):\n cur = conn.cursor()\n cur.execute(\"SELECT * FROM blogger\")\n\n rows = cur.fetchall()\n\n return rows # return the rows",
"def list(self, instance=None, limit=20, marker=0):\n if instance is None:\n return super(CloudDatabaseBackupManager, self).list()\n return self.api._manager._list_backups_for_instance(instance, limit=limit,\n marker=marker)",
"def get_bolts(self):\n return self._bolts",
"def fetchall(self, databaseName):\n pass",
"def get_all(cls):\n\t\treturn [el._to_dict() for el in Book.query.all()]",
"def get_bundle_data(buns):\n b_data = []\n for bundle in buns:\n tds = bundle.find_elements_by_xpath(\"./td\")\n td_text = []\n for index, td in enumerate(tds):\n if index == 3:\n break\n td_text.append(td.text)\n b_data.append(td_text)\n return b_data",
"def get_budgets(self) -> list:\n return self.budget_manager.get_budgets()",
"def listAll(self):\n red = self.dbConnect()\n return red.keys()",
"def list_databases(self):\n r = self.__get_response(settings.LST_DBS)\n if r[\"status\"] == 200:\n return r[\"result\"]\n raise Exception(r[\"result\"][\"message\"])",
"def get_all_boards():\n return [board for board in GRAPH_DB.find(\"board\")]",
"def list(self):\n if not self.model:\n raise NameError('database model has not been set.')\n\n with self.session() as session:\n query = self.get_query(session)\n data = query.all()\n return data",
"def _fetch_bills(self, options):\n bill_count = options['max'] or fetch.DEFAULT_BILL_COUNT\n return fetch.bills(per_page=bill_count)",
"def list_databases(self, instance, limit=None, marker=None):\n return instance.list_databases(limit=limit, marker=marker)",
"def all(self) -> list:\n return list(self.__holder.db_tags)",
"def get_bundeslaender_daten(session: Session):\n\n return session.query(models.Bundesland).all()",
"def get_distribution_all_brands(tablename):\n cur, con = database.connect_to_database()\n query = \"select * from \" + tablename\n result = pandas.read_sql_query(query, con)\n\n # get the model column and the number of models\n models = result.Model\n n_models = len(models)\n\n models = extract_brands(models)\n columns = ['Brand', 'Number', 'Percentage']\n return create_distribution(models, columns, n_models)",
"def list(refresh):\n # This works too, but is much slower:\n # ogrinfo WFS:http://openmaps.gov.bc.ca/geo/ows?VERSION=1.1.0\n for table in bcdata.list_tables(refresh):\n click.echo(table)",
"def get_all(self, name: str | None = None) -> list[BoundLoadBalancerType]:\n return self._iter_pages(self.get_list, name=name)",
"def database_volume_snapshot_get_list():\n db = database_get()\n\n session = db.session()\n query = session.query(model.VolumeSnapshot)\n\n volume_snapshot_objs = list()\n for volume_snapshot in query.all():\n nfvi_volume_snapshot_data = \\\n json.loads(volume_snapshot.nfvi_volume_snapshot_data)\n nfvi_volume_snapshot = nfvi.objects.v1.VolumeSnapshot(\n nfvi_volume_snapshot_data['uuid'],\n nfvi_volume_snapshot_data['name'],\n nfvi_volume_snapshot_data['description'],\n nfvi_volume_snapshot_data['size_gb'],\n nfvi_volume_snapshot_data['volume_uuid'])\n volume_snapshot_obj = objects.VolumeSnapshot(nfvi_volume_snapshot)\n volume_snapshot_objs.append(volume_snapshot_obj)\n return volume_snapshot_objs",
"def get_all_blogs(cls):\n blogs = Database.find(collection='blogs',\n query={})\n #blogs will be a dictionary of blogs at this point\n return [cls.__dict_to_class(blog) for blog in blogs] #return a list of blog objects",
"def list_bibles(self):\n ksize = len(max(self.bibles, key=lambda key: len(key)))\n fmt = u'%%%ds - %%s' % ksize\n return u'\\n'.join(fmt % item for item in self.bibles.iteritems())",
"def hbnb():\n states = storage.all(State).values()\n states = sorted(states, key=lambda k: k.name)\n st_ct = []\n\n # Sort cities inside each states\n for state in states:\n st_ct.append([state, sorted(state.cities, key=lambda k: k.name)])\n\n amenities = storage.all(Amenity).values()\n amenities = sorted(amenities, key=lambda k: k.name)\n\n places = storage.all(Place).values()\n places = sorted(places, key=lambda k: k.name)\n\n values = {\"states\": states, \"amenities\": amenities,\n \"places\": places, \"cache_id\": uuid4()}\n\n return render_template('0-hbnb.html', **values)",
"def get_all_bookings():\n # get all the bookings for userid\n bookings = Booking.query.all()\n # dump in the Schema\n results = bookingsSchema.dump(bookings)\n\n return jsonify(results)",
"def fetch_all(self):\n with self.__connection.cursor() as cursor:\n query = \"SELECT * FROM %s\" % self.__schema\n cursor.execute(query)\n return cursor.fetchall()",
"def get_list():\r\n qry = ImportQueue.query\r\n qry = qry.order_by(ImportQueue.id)\r\n return qry.all()",
"def bdev_get_bdevs(client, name=None, timeout=None):\n params = {}\n if name:\n params['name'] = name\n if timeout:\n params['timeout'] = timeout\n return client.call('bdev_get_bdevs', params)",
"def get_db_items(self, key):\n return self._extension_data['db_items'][key]",
"def Print_Items(db):\r\n \r\n for item in db.Transaction.find():\r\n print(item)",
"def get_banks():\n\n\tbank_list = []\n\tbank_obj = lib.operations.bank.get_bank_list()\n\n\tfor obj in bank_obj:\n\t\tcheckbox = \"<div class='checkbox'>\" \\\n\t\t \"<label><input name='checkbox' type='checkbox' id='bankBox' value='{id}'></label>\" \\\n\t\t \"</div>\".format(id=obj.id)\n\t\tbtn_group = \"<div class='btn-group'>\" \\\n\t\t \"<button type='button' id='bankAdd' class='btn btn-success btn-flat' onclick='loadModal(this,{idCol})'>\" \\\n\t\t \"<i class='fa fa-plus'></i>\" \\\n\t\t \"</button>\" \\\n\t\t \"<button type='button' id='bankDel' class='btn btn-danger btn-flat'><i class='fa fa-trash'></i>\" \\\n\t\t \"</button>\" \\\n\t\t \"<button type='button' id='bankRef' class='btn btn-warning btn-flat'><i class='fa fa-refresh'></i>\" \\\n\t\t \"</button>\" \\\n\t\t \"<button type='button' id='bankUpd' class='btn btn-info btn-flat' onclick='loadModal(this,{idCol})'>\" \\\n\t\t \"<i class='fa fa-reply'></i>\" \\\n\t\t \"</button></div>\".format(idCol=obj.id)\n\t\trow = dict(checkbox=checkbox, id=obj.id, name=obj.name, city=obj.city, address=obj.address, options=btn_group)\n\t\tbank_list.append(row)\n\n\treturn send_result(bank_list, status=\"True\", total=len(bank_obj))",
"def fetchall(self) -> list:\n return self.cursor.fetchall()",
"def _from_db_object_list(db_objects, cls, context):\n return [Boar._from_db_object(cls(context), obj)\n for obj in db_objects]",
"def find_all_bygobidlist(cls, gobidlist):\n return cls.find_all_advanced({'gobid':gobidlist})",
"def get_all_books() -> List[Dict]:\n pass",
"def _get_dapall_from_db(self):\n\n dapall_data = {}\n\n daptype = self.bintype.name + '-' + self.template.name\n\n mdb = marvin.marvindb\n\n if not mdb.isdbconnected:\n raise MarvinError('No DB connected')\n\n datadb = mdb.datadb\n dapdb = mdb.dapdb\n\n dapall_row = mdb.session.query(dapdb.DapAll).join(\n dapdb.File, datadb.PipelineInfo, datadb.PipelineVersion).filter(\n mdb.datadb.PipelineVersion.version == self._dapver,\n dapdb.DapAll.plateifu == self.plateifu,\n dapdb.DapAll.daptype == daptype).use_cache().first()\n\n if dapall_row is None:\n raise MarvinError('cannot find a DAPall match for this target in the DB.')\n\n for col in dapall_row.__table__.columns.keys():\n if col != 'pk' and '_pk' not in col:\n dapall_data[col] = getattr(dapall_row, col)\n\n return dapall_data",
"def test_grainbins_get_all(flaskclient, auth_headers):\n\n for _ in range(5):\n grainbin = GrainbinFactory()\n grainbin.save()\n\n url = url_for(\"grainbin.Grainbins\")\n rep = flaskclient.get(url, headers=auth_headers)\n fetched_grainbins = rep.get_json()\n\n assert rep.status_code == 200\n assert len(fetched_grainbins) == 5",
"def api_asset_list():\n return jsonify(app.bank.to_list()), 200",
"def all(cls):\n api = BuslineAPI()\n try:\n objects = api.all()\n except ApiException:\n objects = cls.objects.all()\n return objects",
"def iter_bonds(self):\n for bond in self.bond_list:\n yield bond",
"def get_books(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.get_bins_template\n # NOTE: This implementation currently ignores plenary view\n if self._catalog_session is not None:\n return self._catalog_session.get_catalogs()\n collection = JSONClientValidated('commenting',\n collection='Book',\n runtime=self._runtime)\n result = collection.find().sort('_id', DESCENDING)\n\n return objects.BookList(result, runtime=self._runtime, proxy=self._proxy)",
"def get_budgets(self) -> list:\n return list(self.budgets.values())"
] | [
"0.69727236",
"0.67828137",
"0.6740348",
"0.65534073",
"0.6521765",
"0.63936526",
"0.6312505",
"0.6268653",
"0.62648267",
"0.62623644",
"0.6186435",
"0.61633617",
"0.61626595",
"0.6158413",
"0.6122159",
"0.611637",
"0.610624",
"0.5955863",
"0.5916003",
"0.58861715",
"0.5885258",
"0.5875262",
"0.5860586",
"0.584888",
"0.5844806",
"0.5834048",
"0.5834048",
"0.58271986",
"0.58271986",
"0.58271986",
"0.58271986",
"0.582637",
"0.5814788",
"0.58128095",
"0.5806419",
"0.5805298",
"0.5791809",
"0.57829434",
"0.57382953",
"0.5730314",
"0.57272804",
"0.57115376",
"0.57088053",
"0.56983674",
"0.5697976",
"0.5686281",
"0.56823784",
"0.568106",
"0.5675547",
"0.56709456",
"0.5639468",
"0.56307703",
"0.56259555",
"0.5601391",
"0.5600309",
"0.55993646",
"0.5595393",
"0.5594131",
"0.5585833",
"0.5584342",
"0.55627286",
"0.5556671",
"0.55522716",
"0.55493003",
"0.5548541",
"0.5544308",
"0.5541225",
"0.55391973",
"0.5535979",
"0.55296165",
"0.5524083",
"0.5520958",
"0.5492925",
"0.5491274",
"0.5491192",
"0.54819304",
"0.54804784",
"0.54795206",
"0.54678506",
"0.54630595",
"0.5460178",
"0.54575264",
"0.5447852",
"0.5447758",
"0.54379946",
"0.54328316",
"0.54322374",
"0.543115",
"0.542619",
"0.54217494",
"0.5421034",
"0.54193056",
"0.5417871",
"0.5415858",
"0.5411253",
"0.5410602",
"0.54105115",
"0.54083264",
"0.54052186",
"0.54017156"
] | 0.70204824 | 0 |
Downloads a bb in mfpx file format | def get_bb(self,bbname, mol = False):
lines = self.mfp.get_bb(bbname)
return lines | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getMpcorb(url='https://minorplanetcenter.net/iau/MPCORB/MPCORB.DAT.gz', fname='MPCORB.DAT.gz', verbose=True):\n\n #filename = wget.download(url)\n try:\n r = requests.get(url, allow_redirects=True)\n open(fname, 'wb').write(r.content)\n if (verbose):\n print('Download complete:', url)\n except:\n print(\"Error in getMpcorb: could not download \", fname, \" at \", url)\n raise\n return",
"def download(dbx, folder, subfolder, name):\r\n path = '/%s/%s/%s' % (\"Apps\", \"Contract Drafter\", \"2.amr\")\r\n while '//' in path:\r\n path = path.replace('//', '/')\r\n with stopwatch('download'):\r\n try:\r\n md, res = dbx.files_download(path)\r\n except dropbox.exceptions.HttpError as err:\r\n print('*** HTTP error', err)\r\n return None\r\n data = res.content\r\n print(data, 'bytes; md:', md)\r\n return data",
"def test_download_to_file(req, tmpdir):\n req.get(ENTREZ_URL, text='This works.')\n outdir = tmpdir.mkdir('outdir')\n filename = outdir.join('foo')\n expected = outdir.join('foo.gbk')\n config = core.Config(molecule='nucleotide', verbose=False)\n\n core.download_to_file('FOO', config, filename=filename)\n\n assert expected.check()",
"def download(self):\n #the link has some meta data in it that we need to get a hold of so we cant use metaData.getLink()\n data = None\n\n for link in self.metaData.jsonObj['links']:\n if link.get('rel') == \"content\":\n data = link\n\n assert data is not None\n\n response = self._adapter.getRequest(data['href'], self._baseHeader)\n return {\"filename\": data['title'], \"mime\": data['type'], \"binary\": response['Body'] }",
"def download(self,fn):\n\t\treturn False #TODO: implement meme download",
"def download_data():\n urllib.request.urlretrieve('http://cs.iit.edu/~culotta/cs579/a1/edges.txt.gz', 'edges.txt.gz')",
"def download(dbx, folder, name):\n path = '/%s/%s' % (folder, name)\n while '//' in path:\n path = path.replace('//', '/')\n with stopwatch('download'):\n try:\n md, res = dbx.files_download(path)\n except dropbox.exceptions.HttpError as err:\n log.exception('*** HTTP error', err)\n return None\n text = res.text\n log.debug(\"Downloaded file '%s' of length: %d characters, md: %s\", path, len(text), md)\n return text",
"def download(self):\n pass",
"def download(self):\n pass",
"def x_download():\n\t#_loadconfig()\n\tconf = _get_config()\n\t#print conf['xplane']\n\tdownload_url = conf['xplane']['download']\n\tlocal(\"wget -P %s %s\" % (navimport.conf.work_dir(\"/xplane_zips\"), download_url))",
"def download_image(wnid, query):\r\n images = urllib2.urlopen(image_url+wnid)\r\n images_list = [image.split() for image in images]\r\n random.shuffle(images_list)\r\n \r\n## urllib.urlretrieve(bbox_url+wnid+\".tar.gz\",wnid+\".tar.gz\")\r\n print(bbox_url+wnid)\r\n## urllib.urlretrieve(bbox_url+wnid,wnid+\".tar.gz\")\r\n retrieve(bbox_url+wnid+\".tar.gz\", wnid+\".tar.gz\")\r\n bb_files = tarfile.open(wnid+\".tar.gz\", \"r:gz\")\r\n names = bb_files.getnames()\r\n \r\n names = \"\".join(names)\r\n ids = re.findall(pattern, names)\r\n used_id = None\r\n \r\n for image in images_list:\r\n if re.findall(pattern,image[0])[0] in ids:\r\n try:\r\n url = urllib2.urlopen(image[1]).geturl()\r\n if url == image[1]:\r\n used_id = image[0]+\".xml\"\r\n## urllib.urlretrieve(url,\"usable\"+query+\".jpg\")\r\n retrieve(url, \"usable\"+query+\".jpg\")\r\n break\r\n except:\r\n continue\r\n \r\n for member in bb_files:\r\n if used_id in member.name:\r\n bbox = bb_files.extractfile(member)\r\n bbox_file = open(\"usable\"+query+\".xml\",\"w\")\r\n bbox_file.write(bbox.read())\r\n bbox_file.close()\r\n break\r\n \r\n bb_files.close()",
"def download_model():\n logging.info(\"[genreml] Downloading model...\")\n with urllib.request.urlopen(config.FMAModelConfig.FMA_MODEL_URL) as f:\n data = f.read()\n open(config.FMAModelConfig.FMA_MODEL_PATH, 'wb').write(data)\n logging.info(\"[genreml] Model download complete\")",
"def download_data():\n url = 'https://www.dropbox.com/s/h9ubx22ftdkyvd5/ml-latest-small.zip?dl=1'\n urllib.request.urlretrieve(url, 'ml-latest-small.zip')\n zfile = zipfile.ZipFile('ml-latest-small.zip')\n zfile.extractall()\n zfile.close()",
"def extract(*args):\r\n bank_rut= args[0]\r\n bank_id= args[1]\r\n\r\n while True:\r\n try:\r\n print(\"Downloading file for...\" + str(args[0]),end=\"\\n\")\r\n myfile = requests.get(\"https://www.sbif.cl/sbifweb/internet/bancos/balances/\"+str(YEAR)+\"/\"+bank_id+\".zip\", allow_redirects=True)\r\n time.sleep(rd.randint(4,7))\r\n break\r\n except:\r\n print(\"request failed\")\r\n pass\r\n \r\n open(str(PATH.joinpath(\"./data_banks/\"+bank_id+\".zip\")), 'wb').write(myfile.content)\r\n time.sleep(rd.randint(1,2))\r\n \r\n yield (bank_rut,bank_id)",
"def _download(item):\n\n filename = item.filename()\n filename = os.path.join(item.vdir(), filename)\n logger.info(\"Downloading '%s' to %s\" % (item.show, filename))\n\n f = open(filename, \"wb\")\n\n buf = net.tivoget(item.show.url)\n for chunk in buf:\n f.write(chunk)\n\n f.close()\n\n item.downloaded = True\n item.save()",
"async def get_file(self, link, name, md5, session):\n if os.path.exists(name) or md5 in opts.archived_md5:\n self.count += 1\n return\n\n async with session.get(link) as media:\n # Open file initially with .part suffix\n with open(f\"{name}.part\", \"wb\") as f:\n while True:\n chunk = await media.content.read(1024)\n if not chunk:\n break\n f.write(chunk)\n\n # Remove .part suffix once complete\n # After this point file won't get removed if script gets interrupted\n os.rename(f\"{name}.part\", name)\n\n if opts.archive:\n log_hash(md5)\n self.count += 1\n msg(f\"{self.fetch_progress()} {self.board}/{self.dir}/{name}\")",
"def download(path):\n\n # Check if directory exists\n if not os.path.isdir(path + \"birdvox_dcase_20k\"):\n print(\"Creating birdvox_dcase_20k Directory\")\n os.mkdir(path + \"birdvox_dcase_20k\")\n base = \"https://zenodo.org/record/1208080/files/\"\n filename = \"BirdVox-DCASE-20k.zip\"\n if not os.path.exists(path + \"birdvox_dcase_20k/\" + filename):\n url = base + filename + \"?download=1\"\n urllib.request.urlretrieve(url, path + \"birdvox_dcase_20k/\" + filename)\n url = \"https://ndownloader.figshare.com/files/10853300\"\n filename = \"data_labels.csv\"\n if not os.path.exists(path + \"birdvox_dcase_20k/\" + filename):\n urllib.request.urlretrieve(url, path + \"birdvox_dcase_20k/\" + filename)",
"def download():\n\treturn response.download(request, db)",
"def _download_to_flc(self):\n self.communicator.download_to_flc()",
"def download():\n return response.download(request,db)",
"def download():\n return response.download(request,db)",
"def download():\n return response.download(request,db)",
"def download():\n return response.download(request,db)",
"def download():\n return response.download(request,db)",
"def download():\n return response.download(request,db)",
"def main(url, localfile):\n ph.download_file(url, localfile)",
"def download_data():\n url = 'https://www.dropbox.com/s/xk4glpk61q3qrg2/imdb.tgz?dl=1'\n urllib.request.urlretrieve(url, 'imdb.tgz')\n tar = tarfile.open(\"imdb.tgz\")\n tar.extractall()\n tar.close()",
"def download_data():\n url = 'https://www.dropbox.com/s/8oehplrobcgi9cq/imdb.tgz?dl=1'\n urllib.request.urlretrieve(url, 'imdb.tgz')\n tar = tarfile.open(\"imdb.tgz\")\n tar.extractall()\n tar.close()",
"def download_files(self):",
"def __download_file(self, filename):\r\n \r\n respons = requests.get(self.__url + filename, stream=True)\r\n save_filename = os.path.join(self.__folder, os.path.basename(filename))\r\n with open(save_filename, 'wb') as output_file:\r\n for chunk in respons.iter_content(chunk_size=128):\r\n output_file.write(chunk)",
"def download():\n raise NotImplementedError",
"def download_file():\n data = c.recv(BUFFER)\n \n if data == b\"terminate\":\n print(\"DOWNLOADING FAILED !!!\")\n return\n\n file = open(FILE_NAME,\"wb\")\n while True:\n if data == b\"DONE\":\n break\n \n print(\"Receiving. . . \")\n file.write(data)\n data = c.recv(BUFFER)\n \n file.close()\n print(\"Successfully received!!!\")\n \n print(\"Webpage saved as {} at {}\".format(FILE_NAME, getcwd())) \n return None",
"def download():\n \"\"\"\n \"The book p.79 have error.\n \"https://github.com/login/oauth/authorize?client_id=7e0a3cd836d3e544dbd9&redirect_uri=https%3A%2F%2Fgist.github.com%2Fauth%2Fgithub%2Fcallback%3Freturn_to%3Dhttps%253A%252F%252Fgist.github.com%252Fyoungsoul%252Ffc69665c5d08e189c57c0db0e93017a6&response_type=code&state=9b385430ee7cd1a75ca91c1d1cb6c565111f6b81e54a71f42ae9b22035241b9b\n \"\"\"\n subprocess.call([\n 'wget',\n 'https://github.com/amplab/datascience-sp14/raw/master/lab7/mldata/mnist-original.mat', \n '-P',\n 'origin_data/'\n ])\n logger.info('Download success!')",
"def download_bitfile(cls, bitfile_name):\n def parse_bitfile_header(bitfile_name):\n \"\"\"\n Parse the header of the bitfile and strip off the binary part that will be written to the FPGA.\n \"\"\"\n with open(bitfile_name, 'rb') as bitf:\n contents = bitf.read()\n\n finished = False\n offset = 0\n bit_dict = {}\n\n # Strip the (2+n)-byte first field (2-bit length, n-bit data)\n length = struct.unpack('>h', contents[offset:offset + 2])[0]\n offset += 2 + length\n\n # Strip a two-byte unknown field (usually 1)\n offset += 2\n\n # Strip the remaining headers. 0x65 signals the bit data field\n while not finished:\n desc = contents[offset]\n offset += 1\n\n if desc != 0x65:\n length = struct.unpack('>h',\n contents[offset:offset + 2])[0]\n offset += 2\n fmt = \">{}s\".format(length)\n data = struct.unpack(fmt,\n contents[offset:offset + length])[0]\n data = data.decode('ascii')[:-1]\n offset += length\n\n if desc == 0x61:\n s = data.split(\";\")\n bit_dict['design'] = s[0]\n bit_dict['version'] = s[-1]\n elif desc == 0x62:\n bit_dict['part'] = data\n elif desc == 0x63:\n bit_dict['date'] = data\n elif desc == 0x64:\n bit_dict['time'] = data\n elif desc == 0x65:\n finished = True\n length = struct.unpack('>i',\n contents[offset:offset + 4])[0]\n offset += 4\n # Expected length values can be verified in the chip TRM\n bit_dict['length'] = str(length)\n if length + offset != len(contents):\n raise RuntimeError(\"Invalid length found\")\n bit_dict['data'] = contents[offset:offset + length]\n else:\n raise RuntimeError(\"Unknown field: {}\".format(hex(desc)))\n return bit_dict\n\n \"\"\"\n Download new bitstream onto FPGA.\n \"\"\"\n\n assert isabs(bitfile_name), f\"bitfile_name = {bitfile_name} is not an absolute path!\"\n binfile_name = bitfile_name.split(\"/\")[-1] + \".bin\"\n firmware_path = \"/lib/firmware/\" + binfile_name\n\n # Copy .bin part of .bit file into /lib/firmware folder.\n bit = parse_bitfile_header(bitfile_name)\n bit_buffer = np.frombuffer(bit[\"data\"], \"i4\")\n bin_buffer = bit_buffer.byteswap()\n bin_buffer.tofile(firmware_path, \"\")\n\n # Send the new .bin file to the FPGA manager.\n\n BS_FPGA_MAN = \"/sys/class/fpga_manager/fpga0/firmware\"\n BS_FPGA_MAN_FLAGS = \"/sys/class/fpga_manager/fpga0/flags\"\n\n with open(BS_FPGA_MAN_FLAGS, 'w') as fd:\n fd.write(\"0\")\n\n with open(BS_FPGA_MAN, 'w') as fd:\n fd.write(binfile_name)\n cls._current_bitfile_name = bitfile_name\n\n return cls._current_bitfile_name",
"def _download(self):\n self._system.download(\"http://geant4.web.cern.ch/geant4/support/source/\" + self._tar_name)",
"def download(self):\n debug(str(self) + ' Downloading from newzbin.com..')\n if not NewzbinDownloader.canDownload():\n debug(str(self) + ' download: No www.newzbin.com login information')\n return\n\n info('Downloading newzbin NZB: %s ' % self.msgId)\n self.handleNZBDownloadFromNewzbin()",
"def download_attachment(self, msg):\n path = None\n for part in msg.walk():\n if part.get_content_type() == 'application/pdf':\n\n time_prefix = datetime.datetime.now().strftime(\"%Y-%m-%dT%H:%M:%S\")\n filename = time_prefix+\"-\"+part.get_filename()\n path = os.path.join(self._DOWNLOAD_FOLDER, filename)\n\n if not os.path.isfile(path):\n with open(path, 'wb') as fb:\n fb.write(part.get_payload(decode=True))\n\n self._processed = True\n return path, self.get_company(msg['From'], msg['To'])",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download():\n return response.download(request, db)",
"def download_model(\\\n download_base='http://download.tensorflow.org/models/object_detection/', \\\n model_name='ssd_mobilenet_v1_coco_11_06_2017'\\\n ):\n\n # add tar gz to the end of file name\n model_file = model_name + '.tar.gz'\n\n try:\n opener = urllib.request.URLopener()\n opener.retrieve(download_base + model_file, \\\n model_file)\n tar_file = tarfile.open(model_file)\n for f in tar_file.getmembers():\n file_name = os.path.basename(f.name)\n if 'frozen_inference_graph.pb' in file_name:\n tar_file.extract(f, os.getcwd())\n except Exception as e:\n raise",
"def __handleDownload(self,block):\n self.file.write(block)",
"def download_dilbert(s, u):\n with open(\"comicfile.jpg\", \"wb\") as file:\n response = s.get(u)\n file.write(response.content)",
"def download_binpackage(package, filepath, config):\n resp = None\n req = Request('GET', package['download_url'])\n local_filename = \"{}/{}\".format(filepath, package['filename'])\n\n if config['debug']:\n print(\"DEBUG: Request ({}) {}\".format('GET', package['download_url']))\n\n try:\n resp = Session().send(\n Session().prepare_request(req), verify=True, stream=True)\n with open(local_filename, 'wb') as lfile:\n shutil.copyfileobj(resp.raw, lfile)\n resp.raise_for_status()\n except (HTTPError, ConnectionError, Timeout, IOError) as ex:\n abort(ex.message)\n\n return local_filename",
"def download():\n if auth.has_membership(1):\n user = \"Admin\"\n elif auth.has_membership(2):\n user = \"Examiner\"\n elif auth.has_membership(3):\n user = \"student\"\n elif auth.has_membership(5):\n user = \"Managment\"\n\n db.activity_log.insert( Title_entry=\"Download assignment\", \n referance_id=auth.user.id,\n remarks=\"content downloaded by {}\".format(user))\n db.commit()\n return response.download(request, db)",
"def download1():\n #t=request.vars.arg(0)\n response.flash=request\n #print request.wsgi.environ['HTTP_REFERER']\n #print 'yghklo=',request.args[0]\n a=db(db.Project.Project_File==request.args[0]).select(db.Project.ALL)\n #a=db(db.Project.id==38).select(db.Project.ALL)\n #if a == None:\n#\t print 'silent'\n # print 'a= aabhas download',a[0].no_of_download, a[0].Project_File\n # if a[0].no_of_download==None:\n#\t a[0].no_download=0\n db(db.Project.Project_File==a[0].Project_File).update(no_of_download=(a[0].no_of_download or 0)+1)\n print 'a.id=',a[0].id\n # print len(a),'\\n'\n #print \"\\n\\n\\n\\n\"\n return response.download(request, db)",
"def bbl_file(self, base_file):\n bbl_path = os.path.abspath(os.path.splitext(base_file)[0]) + '.bbl'\n return self.open_encode_safe(bbl_path).readlines()",
"def download_caffe_model(model_name, meta_info, dst_dir='./model'):\n if not os.path.isdir(dst_dir):\n os.mkdir(dst_dir)\n model_name = os.path.join(dst_dir, model_name)\n assert 'prototxt' in meta_info, \"missing prototxt url\"\n prototxt = mx.test_utils.download(meta_info['prototxt'], model_name+'_deploy.prototxt')\n assert 'caffemodel' in meta_info, \"mssing caffemodel url\"\n caffemodel = mx.test_utils.download(meta_info['caffemodel'], model_name+'.caffemodel')\n assert 'mean' in meta_info, 'no mean info'\n mean = meta_info['mean']\n if isinstance(mean, str):\n mean = mx.test_utils.download(mean, model_name+'_mean.binaryproto')\n return (prototxt, caffemodel, mean)",
"def download_file(download_url, save_path):\n url = \"https://www.encodeproject.org/\" + download_url\n urllib.request.urlretrieve(url, save_path)",
"def download(self, outputfile: str, outputformat: str):\n pass",
"def __download_pretrained(self, fname: str, fdir: str):\n download_url = self._fastlinks[\"url\"] + fname\n r = requests.get(download_url, stream=True)\n with open(fdir, \"wb\") as downfile:\n total_length = int(r.headers.get('content-length'))\n tt = float(\"{:.2f}\".format(total_length / 1024 ** 2))\n for ch in tqdm.tqdm(iterable=r.iter_content(chunk_size=1024 ** 2), total=tt, unit='MB'):\n if ch:\n downfile.write(ch)",
"def downloadFile(self, base_url, file_name):\n url = os.path.join(base_url, file_name)\n req = urllib2.Request(url)\n try:\n f = urllib2.urlopen(req, timeout=self.timeout)\n local_file = open(os.path.join(self.config.get('PATHS', 'pdfdir'), file_name), \"w\")\n local_file.write(f.read())\n local_file.close()\n except Exception, err:\n print \"[ Failed ]\"\n print \"\\n***ERROR in downloadFile: %s\" % err\n sys.exit(0)",
"def maybe_download():\n\n print(\"Downloading Inception 5h Model ...\")\n download.maybe_download_and_extract(url=data_url, download_dir=data_dir)",
"def menu_download_blender(self, event=None):\n self.link('http://www.blender.org/download/get-blender')",
"def fusion_api_download_fabric_support_dump(self, uri, localfile, api=None, headers=None):\n return self.fabric.get_file(uri=uri, localfile=localfile, api=api, headers=headers)",
"def download(self, account, code):\n\n url = Spider.BASE_URL + \"/p/%s/?taken-by=%s\" % (code, account)\n r = self.session.get(url)\n content_match = re.search(r\"<script.*?>\\s*?window._sharedData\\s*?=\\s*?({.*}).*?</script>\", r.text,\n re.MULTILINE)\n data = json.loads(content_match.group(1))\n media = data['entry_data']['PostPage'][0]['graphql']['shortcode_media']\n download_urls = []\n if media['__typename'] == 'GraphVideo': # video\n download_urls.append(media[\"video_url\"])\n if media['__typename'] == 'GraphImage': # image\n download_urls.append(media[\"display_url\"])\n if media['__typename'] == 'GraphSidecar': # slide\n nodes = media['edge_sidecar_to_children']['edges']\n for node in nodes:\n node = node['node']\n if node['is_video']:\n download_urls.append(node['video_url'])\n else:\n download_urls.append(node['display_url'])\n\n actual_download_dir = os.path.join(download_dir, account)\n if not os.path.isdir(actual_download_dir):\n os.mkdir(actual_download_dir)\n for url in download_urls:\n filename = os.path.join(actual_download_dir, url.split('/')[-1].split('?')[0])\n temp_name = filename + '.tmp'\n if os.path.isfile(filename):\n if self.spider.auto_stop:\n print('file', filename, \"already exists, exiting......\")\n sys.exit()\n print('file', filename, \"already exists, skipping\")\n else:\n print('downloading %s:' % filename)\n r = self.session.get(url, stream=True)\n content_length = int(r.headers['content-length'])\n curr = 0\n with open(temp_name, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024):\n f.write(chunk)\n curr += 1024\n progress(curr, content_length)\n os.rename(temp_name, filename)\n self.spider.item_count += 1",
"def download (url):\n path, url = url\n r = requests.get (url, stream = True)\n content = r.text\n #print (content)\n with open (path + '.txt', 'w') as f:\n f.write (content)",
"def download(self, url_match):\n pass",
"def _download_single(url, to, id):\n if os.path.exists(to):\n error_flags[id] = 1\n return\n\n try:\n request = rq.Request(url=url, headers=forge_agent_header)\n info = rq.urlopen(request).read()\n\n except urllib.error.URLError as e:\n print(url, 'urllib error')\n error_flags[id] = 2\n return\n\n except Exception as e:\n print(url, e)\n error_flags[id] = 2\n return\n\n with open(to, \"wb\") as file:\n print(url, 'writing')\n file.write(info)\n\n error_flags[id] = 1",
"def boldExtract(genera):\r\n # Prepare Web Service Endpoint for BOLD's Public Data Portal API\r\n # Appending BOLD's base URL to each genera from the NSR list\r\n base_url = 'http://v4.boldsystems.org/index.php/API_Public/combined?taxon='\r\n source_urls = list(map(lambda x: \"{}{}{}\".\r\n format(base_url, x, '&format=tsv'), genera))\r\n\r\n # Download sequence data from BOLD using list of url's\r\n print('Beginning sequence data retrieval...')\r\n counter = 0\r\n for url in source_urls:\r\n r = http.request('GET', url)\r\n name = genera[counter]\r\n counter += 1\r\n with open(args.outdir1+\"/\"+name+\".tsv\", \"wb\") as fcont:\r\n fcont.write(r.data)",
"def __getFile_urllib(self, _src, _dst):\n\n #-------------------- \n # Open the local destination file \n # so that it can start reading in the buffers.\n #-------------------- \n try:\n dstDir = os.path.dirname(_dst) \n if not os.path.exists(dstDir):\n os.makedirs(dstDir)\n dstFile = open(_dst, \"wb\")\n except Exception as e:\n self.__downloadFailed(_src, _dst, dstFile, str(e))\n return\n\n\n\n #-------------------- \n # Construct the request and authentication handler\n #-------------------- \n xnatUrl = Xnat.path.makeXnatUrl(self.host, _src)\n request = urllib.request.Request(xnatUrl)\n request.add_header(\"Authorization\", \n self.authHeader['Authorization'])\n\n\n\n #-------------------- \n # Get the response from the XNAT host.\n #-------------------- \n try:\n response = urllib.request.urlopen(request)\n\n\n\n\n #-------------------- \n # If the urllib.request version fails then use http.client.\n # See get_http.client for more details.\n #-------------------- \n #except urllib.request.HTTPError, e:\n except Exception as e:\n #print(str(e))\n #print(f\"{_src} {_dst}\")\n #print(d)\n self.__downloadFailed(_src, _dst, dstFile, str(e))\n return\n\n\n #-------------------- \n # Get the content size, first by checking log, then by reading \n # header\n #-------------------- \n self.downloadTracker['downloadedSize']['bytes'] = 0 \n self.downloadTracker['totalDownloadSize'] = \\\n self.getFileSize(xnatUrl)\n if not self.downloadTracker['totalDownloadSize']['bytes']:\n # If not in log, read the header\n if response.headers and \"Content-Length\" in response.headers:\n self.downloadTracker['totalDownloadSize']['bytes'] = \\\n int(response.headers[\"Content-Length\"]) \n self.downloadTracker['totalDownloadSize']['MB'] = \\\n Xnat.utils.bytesToMB(\\\n self.downloadTracker['totalDownloadSize']['bytes'])\n\n\n #-------------------- \n # Start the buffer reading cycle by\n # calling on the buffer_read function above.\n #-------------------- \n bytesRead = self.__bufferRead(xnatUrl, dstFile, response)\n dstFile.close()",
"def get_file(self, directory, url, name, extension=\".xml\"):\n print \"Getting file....\"\n name = \"{0}{1}\".format(name, extension)\n dir_main = self.os.path.join(directory, name)\n try:\n s = self.urllib2.urlopen(url)\n except:\n return 404\n f = open(dir_main,'wb+')\n meta = s.info()\n file_size = int(meta.getheaders(\"Content-Length\")[0])\n print \"Downloading: %s Bytes: %s\" % (name, file_size)\n current_file_size = 0\n block_size = 4096\n while True:\n buf = s.read(block_size)\n if not buf:\n break\n current_file_size += len(buf)\n f.write(buf)\n status = (\"\\r%10d [%3.2f%%]\" %\n (current_file_size, current_file_size * 100. / file_size))\n status = status + chr(8)*(len(status)+1)\n self.sys.stdout.write(status)\n self.sys.stdout.flush()\n f.close()\n print \"\\nDone getting feed\"\n return 200",
"def download_matt_mahoney_text8(filename, expected_bytes):\n if not os.path.exists(filename):\n print('Downloading ...')\n filename, _ = urllib.request.urlretrieve(url + filename, filename)\n statinfo = os.stat(filename)\n if statinfo.st_size == expected_bytes:\n print('Found and verified', filename)\n else:\n print(statinfo.st_size)\n raise Exception(\n 'Failed to verify ' + filename + '. Can you get to it with a browser?')\n return filename",
"def download(request, ef_id):\n ef = get_object_or_404(ExamFile, id=ef_id)\n path = os.path.join(settings.MEDIA_ROOT, ef.path.path)\n response= HttpResponse(content=file(path, 'rb').read(), \n mimetype='application/pdf')\n # fn = os.path.split(ef.path.path)[1]\n # response['Content-Disposition'] = \"attachment; filename=%s\" % (fn)\n return response",
"def download(self, download_path):\n return",
"def step_3b(browser):\n xml_file = '../../src/imio.project.pst/src/imio/project/pst/model/PST_eComptes_Export_201805V1.xsd'\n # select xml file\n file_field = browser.find(u'Document XML exporté depuis eComptes')\n with open(xml_file, 'r') as f:\n file_field.set('value', (f.read(), 'ecomptes_pst.xml'))\n # import xml file\n browser.find_button_by_label('Importer').click()\n # write browser contents\n # with open('browser_contents', 'w') as f:\n # f.write(browser.contents)",
"def download(self, *args, **kwargs):\n return wb.download(*args, **kwargs)",
"def write_file(req, file_type, download, dataset, stream, period, root_name):\n# ~~~~ Loading up the GRIB file~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n#\n head, _ = path.splitext(root_name)\n\n if file_type == 'grib':\n\n if download:\n raise TelemacException(\\\n '... I am not programmed to '\n 'download grib files directly.\\n\\n')\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nLoading essentials from the GRIB\\n')\n grb2slf = Grib(dataset, req, stream)\n\n grb2slf.set_geometry()\n\n if stream == 'spec':\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nSpecial case for spectral file\\n')\n grb2slf.put_geometry('geo_'+head+'.slf')\n grb2slf.set_spectral()\n\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nConverting grib file(s) into SELAFIN\\n')\n grb2slf.put_content(root_name)\n\n# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n# ~~~~ Downloading the NetCDF file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n#\n# Unfortunately, I did not manage to access the NetCDF file remotely\n elif file_type == 'netcdf':\n\n ecmwf2slf = Ecmwf(period, req)\n if download:\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nMaking an ECMWF request\\n')\n ecmwf2slf.connect_to_ecmwf(\"datasets/%s\" % (req['dataset']))\n\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nHaving to download the ECMWF file first\\n')\n ecmwf2slf.download_ecmwf()\n print(\" ~> download completed.\")\n\n ecmwf2slf.open_ecmwf()\n ecmwf2slf.set_geometry()\n\n if stream == 'spec':\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nSpecial case for spectral file\\n')\n ecmwf2slf.put_geometry('geo_'+head+'.slf')\n ecmwf2slf.set_spectral()\n\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nConverting netcdf file into SELAFIN\\n')\n ecmwf2slf.put_content(root_name, stream)",
"def download_file(client, file_id):\n\n file_content = client.file(file_id).content()\n print(file_content)"
] | [
"0.5965063",
"0.5833095",
"0.575973",
"0.5746206",
"0.5630841",
"0.55773723",
"0.5568776",
"0.5568582",
"0.5568582",
"0.5539128",
"0.55324054",
"0.55256397",
"0.55006206",
"0.5487703",
"0.54368585",
"0.5432206",
"0.5420917",
"0.53768295",
"0.5314198",
"0.5299799",
"0.5299799",
"0.5299799",
"0.5299799",
"0.5299799",
"0.5299799",
"0.52953345",
"0.52899987",
"0.5286727",
"0.5284866",
"0.52811134",
"0.525642",
"0.52529275",
"0.52507156",
"0.52476215",
"0.52283734",
"0.5225335",
"0.5204252",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5186384",
"0.5182433",
"0.51747",
"0.5167857",
"0.51666224",
"0.51600015",
"0.51520306",
"0.51382065",
"0.51246953",
"0.51205677",
"0.51162165",
"0.5109428",
"0.51072806",
"0.51050216",
"0.5104538",
"0.5097926",
"0.50918394",
"0.5091832",
"0.5086937",
"0.50868803",
"0.5084246",
"0.50834984",
"0.5070655",
"0.5066462",
"0.5065639",
"0.50579506",
"0.5055701",
"0.5049525",
"0.504766",
"0.50414205"
] | 0.53111416 | 19 |
Downloads a MOF structure in mfpx file format | def get_mof_structure_by_id(self,strucid, mol = False):
lines,name = self.mfp.get_mof_structure_by_id(strucid)
return lines | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def download_model():\n logging.info(\"[genreml] Downloading model...\")\n with urllib.request.urlopen(config.FMAModelConfig.FMA_MODEL_URL) as f:\n data = f.read()\n open(config.FMAModelConfig.FMA_MODEL_PATH, 'wb').write(data)\n logging.info(\"[genreml] Model download complete\")",
"def getMpcorb(url='https://minorplanetcenter.net/iau/MPCORB/MPCORB.DAT.gz', fname='MPCORB.DAT.gz', verbose=True):\n\n #filename = wget.download(url)\n try:\n r = requests.get(url, allow_redirects=True)\n open(fname, 'wb').write(r.content)\n if (verbose):\n print('Download complete:', url)\n except:\n print(\"Error in getMpcorb: could not download \", fname, \" at \", url)\n raise\n return",
"def download_structure(self):\n pdbl = PDBList()\n pdbl.retrieve_pdb_file(self.struct_name, pdir=self.struct_dir)",
"def download_model(\\\n download_base='http://download.tensorflow.org/models/object_detection/', \\\n model_name='ssd_mobilenet_v1_coco_11_06_2017'\\\n ):\n\n # add tar gz to the end of file name\n model_file = model_name + '.tar.gz'\n\n try:\n opener = urllib.request.URLopener()\n opener.retrieve(download_base + model_file, \\\n model_file)\n tar_file = tarfile.open(model_file)\n for f in tar_file.getmembers():\n file_name = os.path.basename(f.name)\n if 'frozen_inference_graph.pb' in file_name:\n tar_file.extract(f, os.getcwd())\n except Exception as e:\n raise",
"def read(self, url: str):\n\n log.info(f\"Downloading KMZ file {basename(url)}\")\n kml = self.fetch(url)\n\n log.info(\"Parsing KML data\")\n self.iter_elems = iterparse(BytesIO(kml), events=(\"start\", \"end\"), resolve_entities=False)\n\n prod_items = {\n \"issuer\": \"Issuer\",\n \"product_id\": \"ProductID\",\n \"generating_process\": \"GeneratingProcess\",\n \"issue_time\": \"IssueTime\",\n }\n\n nsmap = None\n\n # Get Basic Metadata\n prod_definition = None\n prod_definition_tag = None\n for event, element in self.iter_elems:\n if event == \"start\":\n # get namespaces from root element\n if nsmap is None:\n nsmap = element.nsmap\n prod_definition_tag = f\"{{{nsmap['dwd']}}}ProductDefinition\"\n elif event == \"end\":\n if element.tag == prod_definition_tag:\n prod_definition = element\n # stop processing after head\n # leave forecast data for iteration\n break\n\n self.metadata = {k: prod_definition.find(f\"{{{nsmap['dwd']}}}{v}\").text for k, v in prod_items.items()}\n self.metadata[\"issue_time\"] = dt.datetime.fromisoformat(self.metadata[\"issue_time\"])\n\n # Get time steps.\n timesteps = prod_definition.findall(\n \"dwd:ForecastTimeSteps\",\n nsmap,\n )[0]\n self.timesteps = [dt.datetime.fromisoformat(i.text) for i in timesteps.getchildren()]\n\n # save namespace map for later iteration\n self.nsmap = nsmap",
"def _download_metadata():\n if not os.path.isfile(L1000FWD_METADATA):\n if not os.path.exists('L1000FWD'):\n os.mkdir('L1000FWD')\n response = requests.get('https://amp.pharm.mssm.edu/l1000fwd/download/Drugs_metadata.csv', stream=True)\n if response.status_code != 200:\n raise Exception('This should not happen')\n with open(L1000FWD_METADATA, 'wb') as outfile:\n for chunk in response.iter_content(chunk_size=1024):\n outfile.write(chunk)",
"def download(self):\n #the link has some meta data in it that we need to get a hold of so we cant use metaData.getLink()\n data = None\n\n for link in self.metaData.jsonObj['links']:\n if link.get('rel') == \"content\":\n data = link\n\n assert data is not None\n\n response = self._adapter.getRequest(data['href'], self._baseHeader)\n return {\"filename\": data['title'], \"mime\": data['type'], \"binary\": response['Body'] }",
"def _download_obm_data_from_file_system(self) -> 'DataFrame':\n\n # note: fetch all OBM file part names\n url = f\"{self._wml_client.wml_credentials['url']}/v2/asset_files/auto_ml/{self.location.path.split('/auto_ml/')[-1]}/{self._run_id}/data/obm/features\"\n params = self._wml_client._params()\n params['flat'] = \"true\"\n\n response = requests.get(url,\n params=params,\n headers=self._wml_client._get_headers(),\n verify=False)\n\n if response.status_code != 200:\n raise ApiRequestFailure(u'Failure during {}.'.format(\"getting files information\"), response)\n\n file_names = [e['path'].split('/')[-1] for e in response.json()['resources'] if\n e['type'] == 'file' and e['path'].split('/')[-1].startswith('part')]\n\n # TODO: this can be done simultaneously (multithreading / multiprocessing)\n # note: download all data parts and concatenate them into one output\n parts = []\n for file_name in file_names:\n csv_response = requests.get(url + '/' + file_name,\n params=self._wml_client._params(),\n headers=self._wml_client._get_headers(),\n stream=True,\n verify=False)\n\n if csv_response.status_code != 200:\n raise ApiRequestFailure(u'Failure during {}.'.format(\"downloading model\"), csv_response)\n\n downloaded_asset = csv_response.content\n # note: read the csv/xlsx file from the memory directly into the pandas DataFrame\n buffer = io.BytesIO(downloaded_asset)\n parts.append(try_load_dataset(buffer=buffer))\n\n data = concat(parts)\n # --- end note\n return data",
"def _download_metafile(dataset, path=None):\n if not path:\n path = sunpy.config.get('downloads', 'sample_dir')\n base_url = 'https://spdf.gsfc.nasa.gov/pub/software/cdawlib/0MASTERS/'\n fname = dataset.lower() + '_00000000_v01.cdf'\n url = base_url + fname\n try:\n downloaded_file = pooch.retrieve(url=url, known_hash=None, fname=fname, path=path, progressbar=True)\n except ModuleNotFoundError:\n downloaded_file = pooch.retrieve(url=url, known_hash=None, fname=fname, path=path, progressbar=False)\n return downloaded_file",
"def download_mo(app, request, filename):\n \n filename = secure_filename(filename)\n file_path = os.path.join(app.config['UPLOAD_FOLDER'], filename)\n \n if file_exists(file_path):\n try:\n po = polib.pofile(file_path)\n except:\n flash('Invalid translation file detected.', 'error')\n return redirect(url_for('home')) \n \n # Now build the mo file\n mofilename = filename+'.mo'\n mopath = os.path.join(app.config['MO_RESULT_PATH'], mofilename)\n po.save_as_mofile(mopath)\n \n return send_from_directory(\n app.config['MO_RESULT_PATH'],\n mofilename,\n as_attachment=True)\n \n flash('You\\'re trying to download file that are not exists.', 'error')\n return redirect(url_for('home'))",
"def mol(smiles):\n mol = Chem.MolFromSmiles(smiles)\n AllChem.Compute2DCoords(mol)\n mb = Chem.MolToMolBlock(mol)\n return Response(response=mb, status=200, mimetype='chemical/x-mdl-molfile', headers={'Content-Disposition': 'attachment;filename=structure.mol'})",
"def download_mets(\n api_url, package_uuid, relative_path_to_mets, timestamp, package_list_no\n):\n\n # Request the METS file.\n mets_response = requests.get(\n get_mets_url(api_url, package_uuid, relative_path_to_mets)\n )\n\n # Create a directory to download the METS to.\n numbered_subdir = create_numbered_subdirs(timestamp, package_list_no)\n\n # Output METS to a convenient location to later be parsed.\n download_file = write_mets(mets_response, package_uuid, numbered_subdir)\n\n return download_file",
"def maybe_download():\n\n print(\"Downloading Inception 5h Model ...\")\n download.maybe_download_and_extract(url=data_url, download_dir=data_dir)",
"def x_download():\n\t#_loadconfig()\n\tconf = _get_config()\n\t#print conf['xplane']\n\tdownload_url = conf['xplane']['download']\n\tlocal(\"wget -P %s %s\" % (navimport.conf.work_dir(\"/xplane_zips\"), download_url))",
"def test_fobj():\n Level3File(get_test_data('nids/Level3_FFC_N0Q_20140407_1805.nids'))",
"def test_download_to_file(req, tmpdir):\n req.get(ENTREZ_URL, text='This works.')\n outdir = tmpdir.mkdir('outdir')\n filename = outdir.join('foo')\n expected = outdir.join('foo.gbk')\n config = core.Config(molecule='nucleotide', verbose=False)\n\n core.download_to_file('FOO', config, filename=filename)\n\n assert expected.check()",
"def submit_download_mock(_self, _fetch_and_save, filename, dest_folder):\n # If filename == foo/bar/x_y_z_attr.dat, content == \"x_y_z_attr\"\n content = os.path.splitext(os.path.basename(filename))[0]\n if content.split(\"_\")[-1] == \"full\":\n content = {\"molecule\": content}\n qml.data.Dataset._write_file(content, os.path.join(dest_folder, filename))",
"def downloadMinio(url_list,list_d):\n print(\"You are downloading {} images\".format(parser_arguments().limit),end=\" \");print(\"of {} class.\".format(parser_arguments().classes))\n print(\"Please, be patient :)\")\n name = \"-\".join(parser_arguments().classes)\n name = name.lower()\n for i in range(len(url_list)):\n filename= url_list[i].split(\"/\")[-1] # name of the picture file\n r = requests.get(url_list[i], stream =True)\n\n\n if r.status_code == 200:\n r.raw.decode_content = True\n\n with open(filename,'wb') as f : # create the file locally in binary-write mode\n metadata = list_d[i]\n r = requests.get(url_list[i], stream =True)\n shutil.copyfileobj(r.raw, f) #write our image to the file\n path = os.getcwd()+'/'+filename # image path\n minioClient.fput_object(name,filename,path,'image/jpg',metadata)\n os.remove(filename)\n print(filename,'have been successfuly uploaded')\n print('Done!')",
"def readFT(self,file=\"out__1.ft\"):",
"def download_data():\n urllib.request.urlretrieve('http://cs.iit.edu/~culotta/cs579/a1/edges.txt.gz', 'edges.txt.gz')",
"def downloadPubmedMeta(pmid):\n try:\n wait(3, 'eutils.ncbi.nlm.nih.gov')\n ret = pubPubmed.getOnePmid(pmid)\n except urllib2.HTTPError as e:\n raise pubGetError('HTTP error %s on Pubmed' % str(e.code), 'pubmedHttpError', str(e.code))\n except pubPubmed.PubmedError as e:\n raise pubGetError(e.longMsg, e.logMsg)\n\n if ret == None:\n raise pubGetError('empty result when requesting metadata from NCBI Eutils for PMID %s' % str(pmid), 'pubmedEmpty')\n for h in addHeaders:\n ret[h] = ''\n\n return ret",
"def _download_to_flc(self):\n self.communicator.download_to_flc()",
"def _parse_mdat(box_bs, header):\n \n mdat = MediaDataBox()\n mdat.header = header\n mdat.payload = box_bs.read(mdat.header.box_size * 8).bytes\n return mdat",
"def download(self, session):\n target_path = self.get_target_full_dir()\n os.chdir(target_path)\n schema_get = session.get(self.get_full_url(), verify=False)\n target_name = self.get_target_name()\n logger.debug('Starting download of file {} to {}.'.format(target_name.upper(), target_path))\n with open(os.path.join(target_path, target_name), \"wb\") as code:\n code.write(schema_get.content)\n logger.info('{} file has been downloaded successfully.'.format(target_name.upper()))",
"def download_caffe_model(model_name, meta_info, dst_dir='./model'):\n if not os.path.isdir(dst_dir):\n os.mkdir(dst_dir)\n model_name = os.path.join(dst_dir, model_name)\n assert 'prototxt' in meta_info, \"missing prototxt url\"\n prototxt = mx.test_utils.download(meta_info['prototxt'], model_name+'_deploy.prototxt')\n assert 'caffemodel' in meta_info, \"mssing caffemodel url\"\n caffemodel = mx.test_utils.download(meta_info['caffemodel'], model_name+'.caffemodel')\n assert 'mean' in meta_info, 'no mean info'\n mean = meta_info['mean']\n if isinstance(mean, str):\n mean = mx.test_utils.download(mean, model_name+'_mean.binaryproto')\n return (prototxt, caffemodel, mean)",
"def download(self,fn):\n\t\treturn False #TODO: implement meme download",
"def fetchpart(self, uid, mbox, partnum):\n self.select_mailbox(mbox, False)\n data = self._cmd(\"FETCH\", uid, \"(BODYSTRUCTURE BODY[%s])\" % partnum)\n bs = BodyStructure(data[int(uid)][\"BODYSTRUCTURE\"])\n attdef = bs.find_attachment(partnum)\n return attdef, data[int(uid)][\"BODY[%s]\" % partnum]",
"def download_data():\n url = 'https://www.dropbox.com/s/h9ubx22ftdkyvd5/ml-latest-small.zip?dl=1'\n urllib.request.urlretrieve(url, 'ml-latest-small.zip')\n zfile = zipfile.ZipFile('ml-latest-small.zip')\n zfile.extractall()\n zfile.close()",
"def download_movie(self, filmid):\n self.logger.debug('download_movie')\n if not self._test_download_path(self.settings.getDownloadPathMovie()):\n return\n film = self.database.retrieve_film_info(filmid)\n if film is None:\n return\n (filmurl, extension,) = self._get_film_url_and_extension(film)\n # try to create a good name for the downloaded file\n namestem = mvutils.cleanup_filename(film.title)[:80]\n if not namestem:\n # try to take the show name instead...\n namestem = mvutils.cleanup_filename(film.show)[:64]\n if not namestem:\n namestem = u'Film'\n namestem = namestem + '-{}'.format(film.filmid)\n elif self.settings.getMovieNameWithShow():\n showname = mvutils.cleanup_filename(film.show)[:64]\n if showname:\n namestem = showname + ' - ' + namestem\n # review name\n if self.settings.getReviewName():\n (namestem, confirmed) = self.notifier.get_entered_text(namestem, 30986)\n namestem = mvutils.cleanup_filename(namestem)\n if len(namestem) < 1 or confirmed is False:\n return\n # determine destination path and film filename\n if self.settings.getUseMovieFolder():\n pathname = self.settings.getDownloadPathMovie() + namestem + '/'\n filename = namestem\n else:\n pathname = self.settings.getDownloadPathMovie()\n filename = namestem\n # check for duplicate\n # keep\n if self.settings.getFileExistsAction() == 1 and xbmcvfs.exists(pathname + filename + extension):\n return\n # prompt\n if self.settings.getFileExistsAction() == 0:\n while xbmcvfs.exists(pathname + filename + extension):\n (filename, confirmed) = self.notifier.get_entered_text(filename, 30987)\n filename = mvutils.cleanup_filename(filename)\n if len(filename) < 1 or confirmed is False:\n return\n\n # download the stuff\n if self._download_files(film, filmurl, pathname, filename, extension):\n self._make_movie_nfo_file(film, filmurl, pathname, filename)\n else:\n self.logger.debug('download_movie ERROR')",
"def _download(item):\n\n filename = item.filename()\n filename = os.path.join(item.vdir(), filename)\n logger.info(\"Downloading '%s' to %s\" % (item.show, filename))\n\n f = open(filename, \"wb\")\n\n buf = net.tivoget(item.show.url)\n for chunk in buf:\n f.write(chunk)\n\n f.close()\n\n item.downloaded = True\n item.save()",
"def download(self):\n pass",
"def download(self):\n pass",
"def download_files(self):",
"def write_file(req, file_type, download, dataset, stream, period, root_name):\n# ~~~~ Loading up the GRIB file~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n#\n head, _ = path.splitext(root_name)\n\n if file_type == 'grib':\n\n if download:\n raise TelemacException(\\\n '... I am not programmed to '\n 'download grib files directly.\\n\\n')\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nLoading essentials from the GRIB\\n')\n grb2slf = Grib(dataset, req, stream)\n\n grb2slf.set_geometry()\n\n if stream == 'spec':\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nSpecial case for spectral file\\n')\n grb2slf.put_geometry('geo_'+head+'.slf')\n grb2slf.set_spectral()\n\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nConverting grib file(s) into SELAFIN\\n')\n grb2slf.put_content(root_name)\n\n# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n# ~~~~ Downloading the NetCDF file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n#\n# Unfortunately, I did not manage to access the NetCDF file remotely\n elif file_type == 'netcdf':\n\n ecmwf2slf = Ecmwf(period, req)\n if download:\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nMaking an ECMWF request\\n')\n ecmwf2slf.connect_to_ecmwf(\"datasets/%s\" % (req['dataset']))\n\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nHaving to download the ECMWF file first\\n')\n ecmwf2slf.download_ecmwf()\n print(\" ~> download completed.\")\n\n ecmwf2slf.open_ecmwf()\n ecmwf2slf.set_geometry()\n\n if stream == 'spec':\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nSpecial case for spectral file\\n')\n ecmwf2slf.put_geometry('geo_'+head+'.slf')\n ecmwf2slf.set_spectral()\n\n print('\\n\\n'+72*'~'+'\\n')\n print('\\nConverting netcdf file into SELAFIN\\n')\n ecmwf2slf.put_content(root_name, stream)",
"def read_szf_fmv_12(eps_file):\n data = {}\n metadata = {}\n\n n_lines = eps_file.mdr_counter\n n_node_per_line = eps_file.mdr[\"LONGITUDE_FULL\"].shape[1]\n idx_nodes = np.arange(n_lines).repeat(n_node_per_line)\n\n # extract metadata\n metadata[\"spacecraft_id\"] = np.int8(eps_file.mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(eps_file.mphr[\"ORBIT_START\"])\n metadata[\"state_vector_time\"] = datetime.strptime(\n eps_file.mphr[\"STATE_VECTOR_TIME\"][:-4], \"%Y%m%d%H%M%S\")\n\n fields = [\n \"processor_major_version\", \"processor_minor_version\",\n \"format_major_version\", \"format_minor_version\"\n ]\n for f in fields:\n metadata[f] = np.int16(eps_file.mphr[f.upper()])\n\n # extract time\n dt = np.datetime64(\n \"2000-01-01\") + eps_file.mdr[\"UTC_LOCALISATION\"][\"day\"].astype(\n \"timedelta64[D]\"\n ) + eps_file.mdr[\"UTC_LOCALISATION\"][\"time\"].astype(\"timedelta64[ms]\")\n data[\"time\"] = dt[idx_nodes]\n\n fields = [\n \"degraded_inst_mdr\", \"degraded_proc_mdr\", \"sat_track_azi\",\n \"beam_number\", \"flagfield_rf1\", \"flagfield_rf2\", \"flagfield_pl\",\n \"flagfield_gen1\"\n ]\n\n # 101 min = 6082 seconds\n # state_vector_time = ascending node crossing time - 1520.5,\n # time crossing at -90 lat\n orbit_start_time = metadata[\"state_vector_time\"] - timedelta(\n seconds=1520.5)\n orbit_end_time = orbit_start_time + timedelta(seconds=6082)\n\n data[\"orbit_nr\"] = np.ma.zeros(\n data[\"time\"].size, dtype=np.int32,\n fill_value=int32_nan) + metadata[\"orbit_start\"]\n data[\"orbit_nr\"][data[\"time\"] > orbit_end_time] += 1\n\n metadata[\"orbits\"] = {}\n for orbit_nr in np.unique(data[\"orbit_nr\"]):\n if orbit_nr == metadata[\"orbit_start\"]:\n metadata[\"orbits\"][orbit_nr] = (orbit_start_time, orbit_end_time)\n else:\n metadata[\"orbits\"][orbit_nr] = (orbit_end_time, orbit_end_time +\n timedelta(seconds=6082))\n\n # extract data\n for f in fields:\n if eps_file.mdr_sfactor[f.upper()] == 1:\n data[f] = eps_file.mdr[f.upper()].flatten()[idx_nodes]\n else:\n data[f] = (eps_file.mdr[f.upper()].flatten() * 1. /\n eps_file.mdr_sfactor[f.upper()])[idx_nodes]\n\n data[\"swath_indicator\"] = (data[\"beam_number\"].flatten() > 3).astype(\n np.uint8)\n data[\"as_des_pass\"] = (data[\"sat_track_azi\"] < 270).astype(np.uint8)\n\n fields = [(\"longitude_full\", long_nan), (\"latitude_full\", long_nan),\n (\"sigma0_full\", long_nan), (\"inc_angle_full\", uint_nan),\n (\"azi_angle_full\", int_nan), (\"land_frac\", uint_nan),\n (\"flagfield_gen2\", byte_nan)]\n\n for f, nan_val in fields:\n data[f] = eps_file.mdr[f.upper()].flatten()\n invalid = eps_file.mdr[f.upper()].flatten() == nan_val\n\n if eps_file.mdr_sfactor[f.upper()] != 1:\n data[f] = data[f] * 1. / eps_file.mdr_sfactor[f.upper()]\n\n data[f][invalid] = nan_val\n\n # modify longitudes from (0, 360) to (-180, 180)\n mask = np.logical_and(data[\"longitude_full\"] != long_nan,\n data[\"longitude_full\"] > 180)\n data[\"longitude_full\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n idx = (data[\"azi_angle_full\"] != int_nan) & (data[\"azi_angle_full\"] < 0)\n data[\"azi_angle_full\"][idx] += 360\n\n # set flags\n data[\"f_usable\"] = set_flags(data)\n\n return data, metadata",
"def main(pst_file):\n opst = pypff.open(pst_file)\n root = opst.get_root_folder()\n\n message_data = folder_traverse(root, [], **{'pst_name': pst_file, 'folder_name': 'root'})\n\n header = ['pst_name', 'folder_name', 'creation_time', 'submit_time', 'delivery_time',\n 'sender', 'subject', 'attachment_count']\n\n return message_data, header",
"def download(self):\n if not os.path.exists(self.pkg_dir):\n os.makedirs(self.pkg_dir)\n\n url = self.metadata_pkg[\"url\"]\n\n # Download modelpkg only if not already downloaded.\n if os.path.exists(self.file_path):\n self.is_downloaded = True\n else:\n print(f\"Fetching {os.path.basename(self.file_path)} model package from {url} to {self.file_path}\", flush=True)\n r = requests.get(url, stream=True)\n with open(self.file_path, \"wb\") as file_out:\n for chunk in r.iter_content(chunk_size=2048):\n file_out.write(chunk)\n r.close()\n self.is_downloaded = True",
"def _download_chieffi04():\n url = 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?J%2FApJ%2F608%2F405'\n import urllib\n print('Downloading Chieffi 04 yield tables from Vizier (should happen only at the first time)...')\n if os.path.exists(MASTERFILE):\n os.remove(MASTERFILE)\n urllib.urlretrieve(url,MASTERFILE)\n\n import tarfile\n tar = tarfile.open(MASTERFILE)\n tar.extractall(path=DATADIR)\n tar.close()",
"def _download_chieffi04():\n url = 'http://cdsarc.u-strasbg.fr/viz-bin/nph-Cat/tar.gz?J%2FApJ%2F608%2F405'\n import urllib\n print('Downloading Chieffi 04 yield tables from Vizier (should happen only at the first time)...')\n if os.path.exists(MASTERFILE):\n os.remove(MASTERFILE)\n urllib.urlretrieve(url,MASTERFILE)\n\n import tarfile\n tar = tarfile.open(MASTERFILE)\n tar.extractall(path=DATADIR)\n tar.close()",
"def download_attachment(self, msg):\n path = None\n for part in msg.walk():\n if part.get_content_type() == 'application/pdf':\n\n time_prefix = datetime.datetime.now().strftime(\"%Y-%m-%dT%H:%M:%S\")\n filename = time_prefix+\"-\"+part.get_filename()\n path = os.path.join(self._DOWNLOAD_FOLDER, filename)\n\n if not os.path.isfile(path):\n with open(path, 'wb') as fb:\n fb.write(part.get_payload(decode=True))\n\n self._processed = True\n return path, self.get_company(msg['From'], msg['To'])",
"def exportMmf(self, filename):\n self.matrix.export_mtx(filename)",
"def download_genotype_data():\n print(\"downloading genotype data\")\n download_from_url(PSAM_PATH, dst=f\"{GENOTYPE_DATA_PATH}/{MERGED_GENOTYPE_FILE}.psam\", desc=\"downloading psam\")\n download_from_url(PVAR_PATH, dst=f\"{GENOTYPE_DATA_PATH}/{MERGED_GENOTYPE_FILE}.pvar.zst\",\n desc=\"downloading pvar\")\n download_from_url(PGEN_PATH, dst=f\"{GENOTYPE_DATA_PATH}/{MERGED_GENOTYPE_FILE}.pgen.zst\",\n desc=\"downloading pgen\")\n decompress_genotype_file(f\"{MERGED_GENOTYPE_FILE}.pvar\")\n decompress_genotype_file(f\"{MERGED_GENOTYPE_FILE}.pgen\")",
"def download():\n raise NotImplementedError",
"def getmfpages(params):\n url = 'https://www.metafilter.com/'\n page = requests.get(url+params).content",
"def read_szf_fmv_13(eps_file):\n data = {}\n metadata = {}\n\n n_lines = eps_file.mdr_counter\n n_node_per_line = eps_file.mdr[\"LONGITUDE_FULL\"].shape[1]\n idx_nodes = np.arange(n_lines).repeat(n_node_per_line)\n\n # extract metadata\n metadata[\"spacecraft_id\"] = np.int8(eps_file.mphr[\"SPACECRAFT_ID\"][-1])\n metadata[\"orbit_start\"] = np.uint32(eps_file.mphr[\"ORBIT_START\"])\n metadata[\"state_vector_time\"] = datetime.strptime(\n eps_file.mphr[\"STATE_VECTOR_TIME\"][:-4], \"%Y%m%d%H%M%S\")\n\n fields = [\n \"processor_major_version\", \"processor_minor_version\",\n \"format_major_version\", \"format_minor_version\"\n ]\n for f in fields:\n metadata[f] = np.int16(eps_file.mphr[f.upper()])\n\n # extract time\n dt = np.datetime64(\n \"2000-01-01\") + eps_file.mdr[\"UTC_LOCALISATION\"][\"day\"].astype(\n \"timedelta64[D]\"\n ) + eps_file.mdr[\"UTC_LOCALISATION\"][\"time\"].astype(\"timedelta64[ms]\")\n data[\"time\"] = dt[idx_nodes]\n\n fields = [\n \"degraded_inst_mdr\", \"degraded_proc_mdr\", \"sat_track_azi\",\n \"beam_number\", \"flagfield_rf1\", \"flagfield_rf2\", \"flagfield_pl\",\n \"flagfield_gen1\"\n ]\n\n fields = [\n \"degraded_inst_mdr\", \"degraded_proc_mdr\", \"sat_track_azi\",\n \"beam_number\"\n ]\n\n # 101 min = 6082 seconds\n # state_vector_time = ascending node crossing time - 1520.5,\n # time crossing at -90 lat\n orbit_start_time = metadata[\"state_vector_time\"] - timedelta(\n seconds=1520.5)\n orbit_end_time = orbit_start_time + timedelta(seconds=6082)\n\n data[\"orbit_nr\"] = np.ma.zeros(\n data[\"time\"].size, dtype=np.int32,\n fill_value=int32_nan) + metadata[\"orbit_start\"]\n data[\"orbit_nr\"][data[\"time\"] > orbit_end_time] += 1\n\n metadata[\"orbits\"] = {}\n for orbit_nr in np.unique(data[\"orbit_nr\"]):\n if orbit_nr == metadata[\"orbit_start\"]:\n metadata[\"orbits\"][orbit_nr] = (orbit_start_time, orbit_end_time)\n else:\n metadata[\"orbits\"][orbit_nr] = (orbit_end_time, orbit_end_time +\n timedelta(seconds=6082))\n\n # extract data\n for f in fields:\n if eps_file.mdr_sfactor[f.upper()] == 1:\n data[f] = eps_file.mdr[f.upper()].flatten()[idx_nodes]\n else:\n data[f] = (eps_file.mdr[f.upper()].flatten() * 1. /\n eps_file.mdr_sfactor[f.upper()])[idx_nodes]\n\n data[\"swath_indicator\"] = (data[\"beam_number\"].flatten() > 3).astype(\n np.uint8)\n data[\"as_des_pass\"] = (data[\"sat_track_azi\"] < 270).astype(np.uint8)\n\n fields = [(\"longitude_full\", long_nan), (\"latitude_full\", long_nan),\n (\"sigma0_full\", long_nan), (\"inc_angle_full\", uint_nan),\n (\"azi_angle_full\", int_nan), (\"flagfield\", int_nan)]\n\n for f, nan_val in fields:\n data[f] = eps_file.mdr[f.upper()].flatten()\n invalid = eps_file.mdr[f.upper()].flatten() == nan_val\n\n if eps_file.mdr_sfactor[f.upper()] != 1:\n data[f] = data[f] * 1. / eps_file.mdr_sfactor[f.upper()]\n\n data[f][invalid] = nan_val\n\n # modify longitudes from (0, 360) to (-180, 180)\n mask = np.logical_and(data[\"longitude_full\"] != long_nan,\n data[\"longitude_full\"] > 180)\n data[\"longitude_full\"][mask] += -360.\n\n # modify azimuth from (-180, 180) to (0, 360)\n idx = (data[\"azi_angle_full\"] != int_nan) & (data[\"azi_angle_full\"] < 0)\n data[\"azi_angle_full\"][idx] += 360\n\n # set flags\n data[\"f_usable\"] = set_flags_fmv13(data[\"flagfield\"])\n\n return data, metadata",
"def download(dbx, folder, subfolder, name):\r\n path = '/%s/%s/%s' % (\"Apps\", \"Contract Drafter\", \"2.amr\")\r\n while '//' in path:\r\n path = path.replace('//', '/')\r\n with stopwatch('download'):\r\n try:\r\n md, res = dbx.files_download(path)\r\n except dropbox.exceptions.HttpError as err:\r\n print('*** HTTP error', err)\r\n return None\r\n data = res.content\r\n print(data, 'bytes; md:', md)\r\n return data",
"def download_bitfile(cls, bitfile_name):\n def parse_bitfile_header(bitfile_name):\n \"\"\"\n Parse the header of the bitfile and strip off the binary part that will be written to the FPGA.\n \"\"\"\n with open(bitfile_name, 'rb') as bitf:\n contents = bitf.read()\n\n finished = False\n offset = 0\n bit_dict = {}\n\n # Strip the (2+n)-byte first field (2-bit length, n-bit data)\n length = struct.unpack('>h', contents[offset:offset + 2])[0]\n offset += 2 + length\n\n # Strip a two-byte unknown field (usually 1)\n offset += 2\n\n # Strip the remaining headers. 0x65 signals the bit data field\n while not finished:\n desc = contents[offset]\n offset += 1\n\n if desc != 0x65:\n length = struct.unpack('>h',\n contents[offset:offset + 2])[0]\n offset += 2\n fmt = \">{}s\".format(length)\n data = struct.unpack(fmt,\n contents[offset:offset + length])[0]\n data = data.decode('ascii')[:-1]\n offset += length\n\n if desc == 0x61:\n s = data.split(\";\")\n bit_dict['design'] = s[0]\n bit_dict['version'] = s[-1]\n elif desc == 0x62:\n bit_dict['part'] = data\n elif desc == 0x63:\n bit_dict['date'] = data\n elif desc == 0x64:\n bit_dict['time'] = data\n elif desc == 0x65:\n finished = True\n length = struct.unpack('>i',\n contents[offset:offset + 4])[0]\n offset += 4\n # Expected length values can be verified in the chip TRM\n bit_dict['length'] = str(length)\n if length + offset != len(contents):\n raise RuntimeError(\"Invalid length found\")\n bit_dict['data'] = contents[offset:offset + length]\n else:\n raise RuntimeError(\"Unknown field: {}\".format(hex(desc)))\n return bit_dict\n\n \"\"\"\n Download new bitstream onto FPGA.\n \"\"\"\n\n assert isabs(bitfile_name), f\"bitfile_name = {bitfile_name} is not an absolute path!\"\n binfile_name = bitfile_name.split(\"/\")[-1] + \".bin\"\n firmware_path = \"/lib/firmware/\" + binfile_name\n\n # Copy .bin part of .bit file into /lib/firmware folder.\n bit = parse_bitfile_header(bitfile_name)\n bit_buffer = np.frombuffer(bit[\"data\"], \"i4\")\n bin_buffer = bit_buffer.byteswap()\n bin_buffer.tofile(firmware_path, \"\")\n\n # Send the new .bin file to the FPGA manager.\n\n BS_FPGA_MAN = \"/sys/class/fpga_manager/fpga0/firmware\"\n BS_FPGA_MAN_FLAGS = \"/sys/class/fpga_manager/fpga0/flags\"\n\n with open(BS_FPGA_MAN_FLAGS, 'w') as fd:\n fd.write(\"0\")\n\n with open(BS_FPGA_MAN, 'w') as fd:\n fd.write(binfile_name)\n cls._current_bitfile_name = bitfile_name\n\n return cls._current_bitfile_name",
"def test_maff(self):\n index_file = os.path.join(self.test_input, 'folder1#中文', 'mypage.maff')\n os.makedirs(os.path.dirname(index_file), exist_ok=True)\n with zipfile.ZipFile(index_file, 'w') as zh:\n zh.writestr('20200101000000000/index.html', \"\"\"\\\n<!DOCTYPE html>\n<html\n data-scrapbook-create=\"20200101000000000\"\n data-scrapbook-modify=\"20200101000000000\"\n data-scrapbook-source=\"http://example.com\">\n<head>\n<meta charset=\"UTF-8\">\n<title>MyTitle 中文</title>\n</head>\n<body>\npage content\n</body>\n</html>\n\"\"\")\n\n for _info in file2wsb.run(self.test_input, self.test_output):\n pass\n\n book = Host(self.test_output).books['']\n book.load_meta_files()\n book.load_toc_files()\n\n id_folder1, id_item = book.meta.keys()\n self.assertDictEqual(book.meta, {\n id_folder1: {\n 'title': 'folder1#中文',\n 'type': 'folder',\n 'create': id_folder1,\n 'modify': id_folder1,\n },\n id_item: {\n 'title': 'MyTitle 中文',\n 'type': '',\n 'index': f'{id_item}.maff',\n 'create': '20200101000000000',\n 'modify': '20200101000000000',\n 'source': 'http://example.com',\n 'icon': '',\n 'comment': '',\n },\n })\n self.assertDictEqual(book.toc, {\n 'root': [\n id_folder1,\n ],\n id_folder1: [\n id_item,\n ],\n })\n self.assertEqual(set(glob.iglob(os.path.join(self.test_output, '**'), recursive=True)), {\n os.path.join(self.test_output, ''),\n os.path.join(self.test_output, f'{id_item}.maff'),\n })",
"def export_model_description(md: ModelDescription) -> bytes:\n\n # ---------------- write model description -------------------\n\n fmd = ET.Element(\"fmiModelDescription\")\n fmd.set(\"fmiVersion\", \"2.0\")\n fmd.set(\"modelName\", md.modelName)\n fmd.set(\"guid\", md.guid)\n fmd.set(\"author\", md.author)\n fmd.set(\"generationDateAndTime\", md.generationDateAndTime)\n fmd.set(\"variableNamingConvention\", md.variableNamingConvention)\n fmd.set(\"generationTool\", md.generationTool)\n fmd.set(\"description\", md.description)\n\n # CoSimulation\n cs = ET.SubElement(fmd, \"CoSimulation\")\n cs.set(\"modelIdentifier\", md.CoSimulation.modelIdentifier)\n cs.set(\n \"needsExecutionTool\", str(md.CoSimulation.needsExecutionTool).lower(),\n )\n cs.set(\n \"canHandleVariableCommunicationStepSize\",\n str(md.CoSimulation.canHandleVariableCommunicationStepSize).lower(),\n )\n cs.set(\n \"canInterpolateInputs\", str(md.CoSimulation.canInterpolateInputs).lower(),\n )\n\n cs.set(\n \"maxOutputDerivativeOrder\", str(md.CoSimulation.maxOutputDerivativeOrder),\n )\n cs.set(\n \"canRunAsynchronuously\", str(md.CoSimulation.canRunAsynchronuously).lower(),\n )\n cs.set(\n \"canBeInstantiatedOnlyOncePerProcess\",\n str(md.CoSimulation.canBeInstantiatedOnlyOncePerProcess).lower(),\n )\n cs.set(\n \"canNotUseMemoryManagementFunctions\",\n str(md.CoSimulation.canNotUseMemoryManagementFunctions).lower(),\n )\n cs.set(\n \"canGetAndSetFMUstate\", str(md.CoSimulation.canGetAndSetFMUstate).lower(),\n )\n cs.set(\n \"canSerializeFMUstate\", str(md.CoSimulation.canSerializeFMUstate).lower(),\n )\n cs.set(\n \"providesDirectionalDerivative\",\n str(md.CoSimulation.providesDirectionalDerivative).lower(),\n )\n\n # 2.2.4 p.42) Log categories:\n cs = ET.SubElement(fmd, \"LogCategories\")\n for ac in md.logCategories:\n c = ET.SubElement(cs, \"Category\")\n c.set(\"name\", ac)\n\n # 2.2.7 p.47) ModelVariables\n mvs = ET.SubElement(fmd, \"ModelVariables\")\n\n variable_index = 0\n\n for var in md.modelVariables:\n var.variability\n value_reference = str(var.value_reference)\n\n idx_comment = ET.Comment(f'Index of variable = \"{variable_index + 1}\"')\n mvs.append(idx_comment)\n sv = ET.SubElement(mvs, \"ScalarVariable\")\n sv.set(\"name\", var.name)\n sv.set(\"valueReference\", value_reference)\n sv.set(\"variability\", var.variability)\n sv.set(\"causality\", var.causality)\n\n if var.description:\n sv.set(\"description\", var.description)\n\n if var.initial:\n i = var.initial\n sv.set(\"initial\", i)\n\n val = ET.SubElement(sv, var.dataType)\n\n # 2.2.7. p.48) start values\n if var.initial in {\"exact\", \"approx\"} or var.causality == \"input\":\n assert (\n var.start != None\n ), \"a start value must be defined for intial ∈ {exact, approx}\"\n val.set(\"start\", var.start)\n\n variable_index += 1\n\n ms = ET.SubElement(fmd, \"ModelStructure\")\n\n # 2.2.8) For each output we must declare 'Outputs' and 'InitialUnknowns'\n outputs = [\n (idx + 1, o)\n for idx, o in enumerate(md.modelVariables)\n if o.causality == \"output\"\n ]\n\n if outputs:\n os = ET.SubElement(ms, \"Outputs\")\n for idx, o in outputs:\n ET.SubElement(os, \"Unknown\", {\"index\": str(idx), \"dependencies\": \"\"})\n\n os = ET.SubElement(ms, \"InitialUnknowns\")\n for idx, o in outputs:\n ET.SubElement(os, \"Unknown\", {\"index\": str(idx), \"dependencies\": \"\"})\n\n # FMI requires encoding to be encoded as UTF-8 and contain a header:\n #\n # See 2.2 p.28\n return ET.tostring(fmd, pretty_print=True, encoding=\"utf-8\", xml_declaration=True)",
"def _download_minio_file(file_name):\n minioClient = Minio(MINIO_END_POINT,\n access_key = MINIO_ACCESS_KEY,\n secret_key = MINIO_SECRET_KEY,\n secure = True)\n\n file_path = f\"{LOCAL_FILES_PATH}/{file_name}\" # path to download the file\n file_name_indexed = file_name + \".tbi\" # hard coded\n file_path_indexed = f\"{LOCAL_FILES_PATH}/{file_name_indexed}\" # path to download indexed file\n bucket = 'test'\n\n # Create the file\n try:\n f = open(file_path, \"x\")\n f.close()\n\n f = open(file_path_indexed, \"x\")\n f.close()\n except:\n # File already exists, do nothing\n pass\n\n # download the required file into file_path\n try:\n minioClient.fget_object(bucket, file_name, file_path)\n minioClient.fget_object(bucket, file_name_indexed, file_path_indexed)\n except ResponseError as err:\n print(err)",
"def download_presentation(epObject, uc):\r\n fileDict = make_file_dict()\r\n fileDict = populate_file_dict(epObject, uc, fileDict)\r\n now = str(datetime.datetime.now().hour) + \\\r\n str(datetime.datetime.now().minute) + \\\r\n str(datetime.datetime.now().second)\r\n directoryName = epObject.Name.replace(\" \", \"\") + \"_presentation_\" + now\r\n os.mkdir(directoryName)\r\n os.chdir(directoryName)\r\n temp = tempfile.TemporaryFile()\r\n temp.write(urllib.request.urlopen(fileDict['pageUrls'][0]).read())\r\n temp.seek(0)\r\n update_page(temp, fileDict, \"index.html\", index=True)\r\n temp.close()\r\n os.mkdir(\"Pages\")\r\n os.chdir(\"Pages\")\r\n for (pageUrl, pageFileName) in zip(fileDict['pageUrls'][1:], \r\n fileDict['pageFileNames'][1:]):\r\n temp = tempfile.TemporaryFile()\r\n temp.write(urllib.request.urlopen(pageUrl).read())\r\n update_page(temp, fileDict, pageFileName)\r\n temp.close()\r\n os.chdir(\"../\")\r\n os.mkdir(\"Content\")\r\n os.chdir(\"Content\")\r\n for (fileUrl, fileId) in zip(fileDict['fileUrls'], fileDict['fileIds']):\r\n fileName = eportfolio.get_ep_object_properties(uc, fileId).\\\r\n FileName.strip()\r\n urllib.request.urlretrieve(fileUrl, fileName)\r\n os.chdir(\"../\")\r\n os.mkdir(\"Formatting\")\r\n os.chdir(\"Formatting\")\r\n for (cssUrl, cssFileName) in zip(fileDict['cssUrls'],\r\n fileDict['cssFileNames']):\r\n temp = tempfile.TemporaryFile()\r\n temp.write(urllib.request.urlopen(cssUrl).read())\r\n temp.seek(0)\r\n update_css_file(cssUrl, temp, cssFileName)\r\n temp.close()\r\n for imgUrl in fileDict['imgUrls']:\r\n fileName = imgUrl[imgUrl.rfind(\"/\"): ]\r\n if fileName.find(\"?\") > 0:\r\n fileName = fileName[: fileName.find(\"?\")]\r\n urllib.request.urlretrieve(imgUrl, fileName)\r\n os.chdir(\"../\")\r\n print(str(fileDict))\r\n return fileDict",
"def download_file(self, parsed_event, input_dir_path):",
"def modpricesetter_download_xml_from_nemweb(self):\r\n year, month, day = self._get_market_year_month_day_as_str()\r\n base_url = \"https://www.nemweb.com.au/Data_Archive/Wholesale_Electricity/NEMDE/{year}/NEMDE_{year}_{month}/\" + \\\r\n \"NEMDE_Market_Data/NEMDE_Files/NemPriceSetter_{year}{month}{day}_xml.zip\"\r\n url = base_url.format(year=year, month=month, day=day)\r\n r = requests.get(url)\r\n z = zipfile.ZipFile(io.BytesIO(r.content))\r\n z.extractall(self.cache_folder)",
"def download(request, ef_id):\n ef = get_object_or_404(ExamFile, id=ef_id)\n path = os.path.join(settings.MEDIA_ROOT, ef.path.path)\n response= HttpResponse(content=file(path, 'rb').read(), \n mimetype='application/pdf')\n # fn = os.path.split(ef.path.path)[1]\n # response['Content-Disposition'] = \"attachment; filename=%s\" % (fn)\n return response",
"def read_from_mtz ( mtzin = \"\", colin = \"F,SIGF\" ) :\n\n log_string = \"\\n >> clipper_tools: io.structure_factors.read_from_mtz\"\n log_string += \"\\n mtzin: %s\" % mtzin\n\n xml_root = etree.Element('input_file')\n xml_root.attrib['name'] = mtzin\n xml_root.attrib['type'] = 'mini MTZ'\n \n hkl_data = clipper.HKL_data_F_sigF_float()\n hkl_info = clipper.HKL_info ()\n \n if mtzin is not \"\" :\n mtzfilein = clipper.CCP4MTZfile()\n mtzfilein.open_read ( mtzin )\n mtzfilein.import_hkl_info (hkl_info, True)\n mtzfilein.import_hkl_data (hkl_data, \"*/*/[\" + colin + \"]\")\n else :\n return log_string, xml_root, hkl_data, hkl_info\n \n print (dir(hkl_data))\n \n log_string += \"\\n << read_from_mtz has finished\\n\"\n xml_root.attrib['ok'] = 'yes'\n \n return log_string, xml_root, hkl_info, hkl_data",
"def download_meta(self):\n for f in self._manager.remote.list_contents(\".yml\"):\n self._manager.remote.download(f)",
"def download_mission(self):\n cmds = self.vehicle.commands\n cmds.download()\n # Wait until download is complete.\n cmds.wait_valid()",
"def download_document(self,model,field,id,filename=None, **kw):\n Model = request.registry[model]\n cr, uid, context = request.cr, request.uid, request.context\n fields = [field]\n res = Model.read(cr, uid, [int(id)], fields, context)[0]\n filecontent = base64.b64decode(res.get(field) or '')\n if not filecontent:\n return request.not_found()\n else:\n if not filename:\n filename = '%s_%s' % (model.replace('.', '_'), id)\n return request.make_response(filecontent,\n [('Content-Type', 'application/octet-stream'),\n ('Content-Disposition', content_disposition(filename))])",
"def download_file(service, file_id, local_fd):\n request = service.files().get_media(fileId=file_id)\n media_request = http.MediaIoBaseDownload(local_fd, request)\n \n while True:\n try:\n download_progress, done = media_request.next_chunk()\n except errors.HttpError as error:\n print('An error occurred: %s' % error)\n return\n if download_progress:\n print('Download Progress: %d%%' % int(download_progress.progress() * 100))\n if done:\n print('Download Complete')\n return",
"def storage_ipfs_download_file(self, ipfs_cid, output_path=None):\n # Rquest: retrieve a file (the smiley_explorer.png file we uploaded earlier)\n # curl -X POST -H 'Content-Type: application/json' --data '{\"jsonrpc\":\"2.0\",\"method\":\"edgestore.GetFile\",\"params\":[{\"key\": \"0xbc0383809da9fb98c5755e3fa4f19f4ebc7e34308ab321246e4bb54e548fad04\"}],\"id\":1}' http://localhost:19888/rpc\n # Result\n # {\n # \"jsonrpc\": \"2.0\",\n # \"id\": 1,\n # \"result\": {\n # \"path\": \"../data/edgestore/playground/single-node-network/node/storage/file_cache/0xbc0383809da9fb98c5755e3fa4f19f4ebc7e34308ab321246e4bb54e548fad04/smiley_explorer.png\"\n # }\n # }\n\n request_data = {\"jsonrpc\":\"2.0\",\n \"method\":\"edgestore.GetFile\",\n \"params\":[{\"key\": ipfs_cid}],\n \"id\":1}\n res = httpx.post(\n self.ipfs_download_uri,\n headers={\"Content-Type\": \"application/json\"},\n data=json.dumps(request_data),\n timeout=None,\n )\n\n download_path = None\n content = res.json()\n result = content.get(\"result\", None)\n if result is None:\n return False, \"Failed to download file(result is none).\"\n else:\n download_path = result.get(\"path\", None)\n if download_path is None:\n return False, \"Failed to download file(path is none).\"\n else:\n download_path = os.path.join(self.store_home_dir, download_path)\n\n output_file_obj = None\n file_content = None\n try:\n if output_path is not None:\n shutil.copyfile(download_path, output_path)\n output_file_obj = open(output_path, \"rb\")\n except Exception as e:\n pass\n\n try:\n download_file_obj = open(download_path, \"rb\")\n file_content = download_file_obj.read()\n except Exception as e:\n pass\n\n return file_content, output_file_obj",
"def download(self, outputfile: str, outputformat: str):\n pass",
"def generate_file(material_id):\n apr=get_doc_from_MP(material_id)\n mat_list=generate_matrix(apr)\n formu=POSCAR_title(apr)\n cell_for=generate_cell_formula(apr)\n needed_dos=generate_dos_str(material_id)\n revise_dos=dos_into_string(needed_dos)\n ordered_list=generate_ordered_list(revise_dos)\n my_ordered_elements=generate_ordered_elements(revise_dos,ordered_list)\n my_ordered_numbers=generate_ordered_numbers(revise_dos,ordered_list,cell_for)\n generate_POSCAR(formu,mat_list,my_ordered_elements,my_ordered_numbers,revise_dos)",
"def microvm_fxt(request):\n uvm = request.param\n uvm.download()\n return uvm",
"def load_mdf_json(filename):\n\n from neuromllite.utils import load_json, _parse_element\n\n data = load_json(filename)\n\n print(f\"Loaded a graph from {filename}, Root(s): {data.keys()}\")\n if data.keys() == \"graphs\":\n data = {\"UNSPECIFIED\": data}\n model = Model()\n model = _parse_element(data, model)\n\n return model",
"def get_gtfs(agency, fetch):\n if not fetch.get('filename') or not fetch.get('file_url'):\n print \"Feed reference incomplete!:\", fetch\n return\n makedirs(agency)\n filename = os.path.join(agency, fetch['filename'])\n if os.path.exists(filename) and os.stat(filename).st_size == fetch['size']:\n print \"Existing, skipping:\", fetch['file_url']\n else:\n print \"Downloading:\", fetch['file_url']\n urllib.urlretrieve(fetch['file_url'], filename)\n print \"Done\"",
"def disk_fxt(request):\n disk = request.param\n disk.download()\n return disk",
"def fusion_api_download_support_dump(self, uri, localfile, api=None, headers=None):\n return self.dump.get(uri=uri, localfile=localfile, api=api, headers=headers)",
"def fusion_api_download_fabric_support_dump(self, uri, localfile, api=None, headers=None):\n return self.fabric.get_file(uri=uri, localfile=localfile, api=api, headers=headers)",
"def download_special(pxdataset, data_dir):\n # PXD004074 (Tsr1) --------------------------------------------------------\n if pxdataset.pxid == \"PXD004074\":\n tsr1_filename = \"Rappsilber_Cook_CLMS_Tsr1_fasta.zip\"\n tsr1_zip = os.path.join(data_dir, tsr1_filename)\n pxdataset.pxget(tsr1_filename, data_dir)\n\n with zipfile.ZipFile(tsr1_zip, \"r\") as fname:\n fname.extractall(data_dir)\n\n # PXD010222 (PPARg_LBD) ---------------------------------------------------\n if pxdataset.pxid == \"PXD010222\":\n ppar_seq = [\n \">wef|PV4545|PPARg-LBD_human GST-tagged PPARgamma LBD\",\n \"MAPILGYWKIKGLVQPTRLLLEYLEEKYEEHLYERDEGDKWRNKKFELGLEFPNLPYYIDGD\",\n \"VKLTQSMAIIRYIADKHNMLGGCPKERAEISMLEGAVDIRYGVSRIAYSKDFETLKVDFLSK\",\n \"LPEMLKMFEDRLCHKTYLNGDHVTHPDFMLYDALDVVLYMDPMCLDAFPKLVCFKKRIEAIP\",\n \"QIDKYLKSSKYIALWPLQGWQATFGGGDHPPKSDLVPRHNQTSLYKKAGTMQLNPESADLRA\",\n \"LAKHLYDSYIKSFPLTKAKARAILTGKTTDKSPFVIYDMNSLMMGEDKIKFKHITPLQEQSK\",\n \"EVAIRIFQGCQFRSVEAVQEITEYAKSIPGFVNLDLNDQVTLLKYGVHEIIYTMLASLMNKD\",\n \"GVLISEGQGFMTREFLKSLRKPFGDFMEPKFEFAVKFNALELDDSDLAIFIAVIILSGDRPG\",\n \"LLNVKPIEDIQDNLLQALELQLKLNHPESSQLFAKLLQKMTDLRQIVTEHVQLLQVIKKTET\",\n \"DMSLHPLLQEIYKDL\"\n ]\n\n ppar_path = os.path.join(data_dir, \"pparg.fasta\")\n with open(ppar_path, \"w\") as fasta:\n fasta.writelines([l + \"\\n\" for l in ppar_seq])",
"def extract_meta_data(video_file_name, output_file=meta.txt, *args, **kwargs):",
"def _generate_metadata_kind(filename, items, affidavit=None):\n store = appstream.Store('lvfs')\n for item in items:\n\n # add each component\n for md in item.mds:\n component = appstream.Component()\n component.id = md.cid\n component.kind = 'firmware'\n component.name = md.name\n component.summary = md.summary\n component.description = md.description\n if md.url_homepage:\n component.urls['homepage'] = md.url_homepage\n component.metadata_license = md.metadata_license\n component.project_license = md.project_license\n component.developer_name = md.developer_name\n\n # add provide\n for guid in md.guids:\n prov = appstream.Provide()\n prov.kind = 'firmware-flashed'\n prov.value = guid\n component.add_provide(prov)\n\n # add release\n if md.version:\n rel = appstream.Release()\n rel.version = md.version\n rel.description = md.release_description\n if md.release_timestamp:\n rel.timestamp = md.release_timestamp\n rel.checksums = []\n rel.location = app.config['FIRMWARE_BASEURL'] + item.filename\n rel.size_installed = md.release_installed_size\n rel.size_download = md.release_download_size\n rel.urgency = md.release_urgency\n component.add_release(rel)\n\n # add container checksum\n if md.checksum_container:\n csum = appstream.Checksum()\n csum.target = 'container'\n csum.value = md.checksum_container\n csum.filename = item.filename\n rel.add_checksum(csum)\n\n # add content checksum\n if md.checksum_contents:\n csum = appstream.Checksum()\n csum.target = 'content'\n csum.value = md.checksum_contents\n csum.filename = md.filename_contents\n rel.add_checksum(csum)\n\n # add screenshot\n if md.screenshot_caption:\n ss = appstream.Screenshot()\n ss.caption = md.screenshot_caption\n if md.screenshot_url:\n im = appstream.Image()\n im.url = md.screenshot_url\n ss.add_image(im)\n component.add_screenshot(ss)\n\n # add requires for each allowed vendor_ids\n group = db.groups.get_item(item.group_id)\n if group.vendor_ids:\n req = appstream.Require()\n req.kind = 'firmware'\n req.value = 'vendor-id'\n if len(group.vendor_ids) == 1:\n req.compare = 'eq'\n else:\n req.compare = 'regex'\n req.version = '|'.join(group.vendor_ids)\n component.add_require(req)\n\n # add manual firmware or fwupd version requires\n for req_txt in md.requirements:\n split = req_txt.split('/', 4)\n req = appstream.Require()\n req.kind = split[0]\n req.value = split[1]\n req.compare = split[2]\n req.version = split[3]\n component.add_require(req)\n\n # add component\n store.add(component)\n\n # dump to file\n download_dir = app.config['DOWNLOAD_DIR']\n if not os.path.exists(download_dir):\n os.mkdir(download_dir)\n filename = os.path.join(download_dir, filename)\n store.to_file(filename)\n\n # upload to the CDN\n blob = open(filename, 'rb').read()\n _upload_to_cdn(filename, blob)\n\n # generate and upload the detached signature\n if affidavit:\n blob_asc = affidavit.create(blob)\n _upload_to_cdn(filename + '.asc', blob_asc)",
"def keyholemarkup2x(file,output='df'):\n r = re.compile(r'(?<=\\.)km+[lz]?',re.I)\n try:\n extension = r.search(file).group(0) #(re.findall(r'(?<=\\.)[\\w]+',file))[-1]\n \n \n except IOError as e:\n logging.error(\"I/O error {0}\".format(e))\n if (extension.lower()=='kml') is True:\n buffer = file\n elif (extension.lower()=='kmz') is True:\n kmz = ZipFile(file, 'r')\n \n vmatch = np.vectorize(lambda x:bool(r.search(x)))\n A = np.array(kmz.namelist())\n sel = vmatch(A)\n buffer = kmz.open(A[sel][0],'r')\n \n else:\n raise ValueError('Incorrect file format entered. Please provide the '\n 'path to a valid KML or KMZ file.') \n \n \n parser = xml.sax.make_parser()\n handler = PlacemarkHandler()\n parser.setContentHandler(handler)\n parser.parse(buffer)\n \n try:\n kmz.close()\n except:\n pass\n \n df = pd.DataFrame(handler.mapping).T\n names = list(map(lambda x: x.lower(),df.columns))\n if 'description' in names:\n extradata = df.apply(PlacemarkHandler.htmlizer,axis=1)\n df = df.join(extradata)\n \n \n output = output.lower()\n \n if output=='df' or output=='dataframe' or output == None:\n result = df\n \n elif output=='csv':\n out_filename = file[:-3] + \"csv\"\n df.to_csv(out_filename,encoding='utf-8',sep=\"\\t\")\n result = (\"Successfully converted {0} to CSV and output to\"\n \" disk at {1}\".format(file,out_filename))\n \n elif output=='gpd' or output == 'gdf' or output=='geoframe' or output == 'geodataframe':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n result = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n \n \n elif output=='geojson' or output=='json':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n try:\n import geojson\n except ImportError as e:\n raise ImportError('This operation requires geojson. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"geojson\"\n gdf.to_file(out_filename,driver='GeoJSON')\n validation = geojson.is_valid(geojson.load(open(out_filename)))['valid']\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to GeoJSON and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The geojson conversion did not create a '\n 'valid geojson object. Try to clean your '\n 'data or try another file.')\n \n elif output=='shapefile' or output=='shp' or output =='esri shapefile':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n \n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n try:\n import shapefile\n except ImportError as e:\n raise ImportError('This operation requires pyshp. {0}'.format(e))\n \n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"shp\"\n gdf.to_file(out_filename,driver='ESRI Shapefile')\n sf = shapefile.Reader(out_filename)\n import shapefile\n sf = shapefile.Reader(out_filename)\n if len(sf.shapes())>0:\n validation = \"yes\"\n else:\n validation = \"no\"\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to Shapefile and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The Shapefile conversion did not create a '\n 'valid shapefile object. Try to clean your '\n 'data or try another file.') \n else:\n raise ValueError('The conversion returned no data; check if'\n ' you entered a correct output file type. '\n 'Valid output types are geojson, shapefile,'\n ' csv, geodataframe, and/or pandas dataframe.')\n \n return result",
"def _download_file(self, video_objects):\n downloaded_video = []\n path=\"media/\"\n for video_object in video_objects:\n if 'contentUrl' in video_object.keys() and video_object['contentUrl']!='':\n \n url = video_object['contentUrl']\n filename = url.split('/')[-1]\n r = requests.get(url, stream=True)\n \n with open(filename, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024): \n if chunk:\n f.write(chunk)\n\n path+=filename\n return path",
"def test_file_package_request(self):\n file_name = \"file_name\"\n chunk_index = 0\n\n expected_topic = self.factory.common_topic + WAPMF.FILE_BINARY_REQUEST\n expected_payload = json.dumps(\n {\n \"name\": file_name,\n \"chunkIndex\": chunk_index,\n }\n )\n expected_message = Message(expected_topic, expected_payload)\n serialized_message = self.factory.make_from_package_request(\n file_name, chunk_index\n )\n\n self.assertEqual(expected_message, serialized_message)",
"def import_fmu(archive_or_dir) -> ModelDescription:\n\n archive_or_dir = Path(archive_or_dir)\n model_description_str = None\n\n if archive_or_dir.is_file():\n with TemporaryDirectory() as tmpdir, ZipFile(archive_or_dir) as zip_ref:\n\n tmpdir = Path(tmpdir())\n zip_ref.extractall(tmpdir)\n\n model_description_path = tmpdir / \"modelDescription.xml\"\n\n if not model_description_path.is_file():\n raise FileNotFoundError(\n \"No modelDescription.xml file was found inside the FMU archive\"\n )\n\n with open(model_description_path, \"r\") as f:\n model_description_str = f.read()\n else:\n model_description_path = archive_or_dir / \"modelDescription.xml\"\n\n if not model_description_path.is_file():\n raise FileNotFoundError(\n \"No modelDescription.xml file was found inside the FMU directory\"\n )\n\n with open(model_description_path, \"rb\") as f:\n model_description_str = f.read()\n\n return parse_model_description(model_description_str)",
"def get_drms_files(self):\n import drms\n client = drms.Client(email=self.email,verbose=True)\n fmt = '%Y.%m.%d_%H:%M'\n self.t_qstr = self.series+'[{0}_TAI-{1}_TAI@{2}]'.format(self.start.strftime(fmt),self.end.strftime(fmt),self.cadence) \n\n\n #create wavelength query string\n self.w_qstr = '[' \n for i in self.wav: self.w_qstr = self.w_qstr+'{0},'.format(int(i.value))\n #remove last , and add bracket\n self.w_qstr = self.w_qstr[:-1]+']'\n \n #make the series string\n self.s_qstr = '{'+self.segment+'}'\n\n #the full query\n self.qstr = self.t_qstr+self.w_qstr+self.s_qstr\n\n #IF ERRORS WITH URL ERROR IT IS BECAUSE THE DOWNLOAD FILE SIZE IS TOO LARGE\n #export the data file list \n self.expt = client.export(self.qstr)\n#create an array of indexes to download\n index = np.arange(np.size(self.expt.urls.url))\n# get file from JSOC\n #set directory to current if no path set\n outf = self.expt.download(self.odir,index,fname_from_rec=True)",
"async def get_file(self, link, name, md5, session):\n if os.path.exists(name) or md5 in opts.archived_md5:\n self.count += 1\n return\n\n async with session.get(link) as media:\n # Open file initially with .part suffix\n with open(f\"{name}.part\", \"wb\") as f:\n while True:\n chunk = await media.content.read(1024)\n if not chunk:\n break\n f.write(chunk)\n\n # Remove .part suffix once complete\n # After this point file won't get removed if script gets interrupted\n os.rename(f\"{name}.part\", name)\n\n if opts.archive:\n log_hash(md5)\n self.count += 1\n msg(f\"{self.fetch_progress()} {self.board}/{self.dir}/{name}\")",
"def test_download_simfile(self):\n scrape_category.download_simfile(self.simfile, self.dest,\n tidy=False,\n use_logfile=True,\n extract=True,\n link=self.link)\n\n # There should now be three files - a download log, a zip, and\n # an unzipped simfile.\n self.check_saved_files(log=True, unzipped=True, zipped=True)\n\n records = {\"100\": self.simfile}\n updated_records = scrape_category.update_records_from_log(records, self.dest)\n assert len(updated_records) == 1\n assert \"100\" in updated_records\n # The records should be updated to reflect where the simfile\n # was actually saved\n assert updated_records[\"100\"].name == \"foo\"",
"def download_file(path, filename, destination):\n import os\n command = \"wget -q -O \"+destination+\"/\"+filename+\" ftp://nomads.ncdc.noaa.gov/\"+path+\"/\"+filename\n os.system(command)",
"def download_model(name: str) -> str:\n model_name, model_type, model_url = ModelInfo.get_model_info(name)\n model_path = _create_dirs(model_name)\n if model_type == \"single\":\n model_path = _download_file(model_url, model_path)\n elif model_type == \"zip\":\n model_path = _download_zip_model(model_url, model_path)\n else:\n print(f\"model type {model_type} not yet implemented\")\n model_path = \"\"\n return model_path",
"def getFile(self, model):\n res = self.hasModel(model)\n if not res['OK']:\n return res\n if not self.models[model]:\n return S_ERROR(\"No file attached to model %s\" % model)\n return S_OK(self.models[model])",
"def get_data_from_URL(url):\n querystring = {\"q\": \"eminem\"}\n headers = {\n 'x-rapidapi-host': \"deezerdevs-deezer.p.rapidapi.com\",\n 'x-rapidapi-key': \"SIGN-UP-FOR-KEY\"\n }\n\n response = requests.request(\n \"GET\", url, headers=headers, params=querystring)\n received_file = json.loads(response.text)\n return received_file",
"def download_file(client, file_id):\n\n file_content = client.file(file_id).content()\n print(file_content)",
"def LoadMMMetaData(filename):\r\n## print \"loading MM Metadata\"\r\n file = open(filename,'r')\r\n data = file.read()\r\n file.close()\r\n data = data.replace(\"false\",\"False\")\r\n data = data.replace(\"true\",\"True\")\r\n data = data.replace(\"null\",\"0\")\r\n f = eval(str(data))\r\n tiles = []\r\n for i in f.keys():\r\n if i != \"Summary\":\r\n tiles.append(i)\r\n xpos = f[tiles[0]][\"XPositionUm\"]\r\n ypos = f[tiles[0]][\"YPositionUm\"]\r\n zpos = f[tiles[0]][\"ZPositionUm\"] \r\n ScaleFactorX= f[\"Summary\"][\"PixelSize_um\"]\r\n ScaleFactorY= ScaleFactorX\r\n Width=f[\"Summary\"][\"Width\"]\r\n Height=f[\"Summary\"][\"Height\"]\r\n extent=[xpos-(Width/2)*ScaleFactorX,xpos+(Width/2)*ScaleFactorX,\\\r\n ypos-(Height/2)*ScaleFactorY,ypos+(Height/2)*ScaleFactorY] #FOR NOW\r\n\r\n #WHY WAS IT + THEN - FOR Y??\r\n return extent,zpos",
"def download_mojo(self, path=\".\", get_genmodel_jar=False, genmodel_name=\"\"):\n return ModelBase.download_mojo(self.leader, path, get_genmodel_jar, genmodel_name)",
"def main(url, localfile):\n ph.download_file(url, localfile)",
"def get_file(url):\n # Make request\n response = requests.get(url, stream=True)\n response.raise_for_status()\n # Read fits\n iofile = io.BytesIO(response.content)\n content_type = response.headers['Content-Type']\n if content_type == 'image/fits':\n obj = fits.open(iofile)\n else:\n raise Exception('Unknown content type: {0}.'.format(content_type))\n return obj",
"def __download_pretrained(self, fname: str, fdir: str):\n download_url = self._fastlinks[\"url\"] + fname\n r = requests.get(download_url, stream=True)\n with open(fdir, \"wb\") as downfile:\n total_length = int(r.headers.get('content-length'))\n tt = float(\"{:.2f}\".format(total_length / 1024 ** 2))\n for ch in tqdm.tqdm(iterable=r.iter_content(chunk_size=1024 ** 2), total=tt, unit='MB'):\n if ch:\n downfile.write(ch)",
"def _get_file(self, path: str) -> Tuple[str, bytes]:\n self._trace(\"fetching: %s\" % path)\n meta, resp = self._connection.files_download(path)\n return (meta.rev, resp.content)",
"def download_file(service, file_id, local_fd):\n request = service.files().get_media(fileId=file_id)\n media_request = MediaIoBaseDownload(local_fd, request)\n\n while True:\n try:\n download_progress, done = media_request.next_chunk()\n except errors.HttpError as error:\n print('An error occurred: %s' % error)\n return\n # if download_progress:\n # print('Download Progress: %d%%' % int(download_progress.progress() * 100))\n if done:\n print('Download Complete')\n return",
"def _download_epw_file(url):\n r = requests.get(url)\n if r.ok:\n # py2 and 3 compatible: binary write, encode text first\n log.debug(\" ... OK!\")\n return io.StringIO(r.text)\n else:\n log.error(\" connection error status code: %s\" % r.status_code)\n r.raise_for_status()",
"def download_data():\n url = 'https://www.dropbox.com/s/xk4glpk61q3qrg2/imdb.tgz?dl=1'\n urllib.request.urlretrieve(url, 'imdb.tgz')\n tar = tarfile.open(\"imdb.tgz\")\n tar.extractall()\n tar.close()",
"def download_it(fw, acquisition, file_name, input_path):\n\n safe = make_file_name_safe(file_name, replace_str='_')\n\n full_path = input_path + safe\n\n if acquisition.timestamp:\n if acquisition.timezone:\n created = acquisition.original_timestamp.isoformat()\n else:\n created = acquisition.timestamp.isoformat()\n else:\n created = 'unknown'\n\n rpt = 1\n while full_path in context.gear_dict['niftis']: # then repeated name\n full_path = input_path + str(rpt) + '_' + safe\n rpt += 1\n\n if os.path.isfile(full_path):\n log.info('File exists ' + file_name + ' -> ' +\\\n full_path + ' created ' + created)\n else:\n log.info('Downloading ' + file_name + ' -> ' +\\\n full_path + ' created ' + created)\n acquisition.download_file(file_name, full_path)\n\n full_file = fw.get_acquisition_file_info(acquisition.id, file_name)\n field_strength = full_file.info.get('MagneticFieldStrength')\n\n context.gear_dict['niftis'].append(full_path)\n context.gear_dict['file_names'].append(file_name)\n context.gear_dict['createds'].append(created)\n context.gear_dict['field_strength'].append(field_strength)",
"def load_NMF_model():\n model = pickle.load(open(\"models/nmf_model.sav\", 'rb'))\n Q = model.components_ \n return model, Q",
"def download_data():\n url = 'https://www.dropbox.com/s/8oehplrobcgi9cq/imdb.tgz?dl=1'\n urllib.request.urlretrieve(url, 'imdb.tgz')\n tar = tarfile.open(\"imdb.tgz\")\n tar.extractall()\n tar.close()",
"def retrieveURL(mw, url):\n req = urllib2.Request(url, None, {'User-Agent': 'Mozilla/5.0 (compatible; Anki)'})\n resp = urllib2.urlopen(req)\n # ct = resp.info().getheader(\"content-type\")\n filecontents = resp.read()\n # strip off any query string\n url = re.sub(r\"\\?.*?$\", \"\", url)\n path = unicode(urllib2.unquote(url.encode(\"utf8\")), \"utf8\")\n fname = os.path.basename(path)\n if not fname:\n fname = checksum(filecontents)\n return mw.col.media.writeData(unicode(fname), filecontents)",
"def get_external_struct(self):\n self.download_structure()\n parser = MMCIFParser()\n structure = parser.get_structure('STRUCT_OBJ',\n os.path.join(self.struct_dir, self.struct_name) + '.cif')\n return structure",
"def generate_metadata(self):\n if self.options.mbtiles:\n return\n if not os.path.exists(self.output):\n os.makedirs(self.output)\n\n if self.options.profile == 'mercator':\n\n south, west = self.mercator.MetersToLatLon( self.ominx, self.ominy)\n north, east = self.mercator.MetersToLatLon( self.omaxx, self.omaxy)\n south, west = max(-85.05112878, south), max(-180.0, west)\n north, east = min(85.05112878, north), min(180.0, east)\n self.swne = (south, west, north, east)\n\n # Generate googlemaps.html\n if self.options.webviewer in ('all','google') and self.options.profile == 'mercator':\n if not self.options.resume or not os.path.exists(os.path.join(self.output, 'googlemaps.html')):\n f = open(os.path.join(self.output, 'googlemaps.html'), 'w')\n f.write( self.generate_googlemaps() )\n f.close()\n\n # Generate openlayers.html\n if self.options.webviewer in ('all','openlayers'):\n if not self.options.resume or not os.path.exists(os.path.join(self.output, 'openlayers.html')):\n f = open(os.path.join(self.output, 'openlayers.html'), 'w')\n f.write( self.generate_openlayers() )\n f.close()\n\n elif self.options.profile == 'geodetic':\n\n west, south = self.ominx, self.ominy\n east, north = self.omaxx, self.omaxy\n south, west = max(-90.0, south), max(-180.0, west)\n north, east = min(90.0, north), min(180.0, east)\n self.swne = (south, west, north, east)\n\n # Generate openlayers.html\n if self.options.webviewer in ('all','openlayers'):\n if not self.options.resume or not os.path.exists(os.path.join(self.output, 'openlayers.html')):\n f = open(os.path.join(self.output, 'openlayers.html'), 'w')\n f.write( self.generate_openlayers() )\n f.close()\n\n elif self.options.profile in ['raster','gearth','garmin']:\n\n west, south = self.ominx, self.ominy\n east, north = self.omaxx, self.omaxy\n\n self.swne = (south, west, north, east)\n\n # Generate openlayers.html\n if self.options.webviewer in ('all','openlayers'):\n if not self.options.resume or not os.path.exists(os.path.join(self.output, 'openlayers.html')):\n f = open(os.path.join(self.output, 'openlayers.html'), 'w')\n f.write( self.generate_openlayers() )\n f.close()\n\n\n # Generate tilemapresource.xml.\n if (self.options.tile_format != 'hybrid' and self.options.profile != 'garmin'\n and (not self.options.resume or not os.path.exists(os.path.join(self.output, 'tilemapresource.xml')))):\n f = open(os.path.join(self.output, 'tilemapresource.xml'), 'w')\n f.write( self.generate_tilemapresource())\n f.close()",
"def from_molfile(mfl, print_debug=False):\n\n rdm = _rd_chem.rdmolfiles.MolFromMolBlock(mfl, removeHs=False)\n if rdm is None and print_debug:\n print(f'Warning: rdm fails for {mfl} by returning {rdm}')\n\n return rdm",
"def _read_mtd(self) -> (etree._Element, dict):\n mtd_from_path = \"MUX*.xml\"\n mtd_archived = r\"MUX.*\\.xml\"\n\n return self._read_mtd_xml(mtd_from_path, mtd_archived)",
"def _download(self) -> None:\n download_url(\n self.url,\n self.root,\n filename=self.data_dir,\n md5=self.md5 if self.checksum else None,\n )\n self._extract()"
] | [
"0.5991313",
"0.59626365",
"0.55595934",
"0.5496151",
"0.54330605",
"0.53938437",
"0.53894514",
"0.5371297",
"0.536752",
"0.5346592",
"0.53392506",
"0.52200156",
"0.5186871",
"0.51702356",
"0.5167433",
"0.5166752",
"0.51650244",
"0.51601297",
"0.5158457",
"0.51551956",
"0.5145831",
"0.5131597",
"0.51251066",
"0.51243496",
"0.51166767",
"0.51009214",
"0.5078752",
"0.5074977",
"0.5071544",
"0.50380653",
"0.50313544",
"0.50313544",
"0.5019697",
"0.501465",
"0.5009079",
"0.5000874",
"0.4984733",
"0.49786332",
"0.49786332",
"0.49707845",
"0.4964789",
"0.49631497",
"0.49607724",
"0.4958266",
"0.4957252",
"0.49515894",
"0.4944082",
"0.49361485",
"0.49309438",
"0.49299443",
"0.49295017",
"0.4917932",
"0.49110508",
"0.49094328",
"0.49034917",
"0.4903342",
"0.49025905",
"0.48725867",
"0.48568845",
"0.48564494",
"0.48518872",
"0.48467752",
"0.4825836",
"0.48255917",
"0.48254222",
"0.4824907",
"0.4817695",
"0.48150614",
"0.48105526",
"0.47957042",
"0.47939995",
"0.4789333",
"0.47891247",
"0.4786455",
"0.47837362",
"0.47685075",
"0.47664446",
"0.47639027",
"0.4755624",
"0.47279945",
"0.47258434",
"0.47231278",
"0.47211012",
"0.4717047",
"0.4716913",
"0.47165254",
"0.47143823",
"0.47071517",
"0.47065744",
"0.4703207",
"0.46902582",
"0.46884826",
"0.4685416",
"0.46790704",
"0.46766204",
"0.46749464",
"0.46742007",
"0.46652037",
"0.46642938",
"0.4663335",
"0.46585044"
] | 0.0 | -1 |
Returns the coordinations sequences of a topology as a list of lists. | def get_cs(self,name):
return self.mfp.get_cs(name) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def connected_components(self) -> List[list]:\n self.__set_all_nodes_unvisited()\n res = self.__tarjan()\n # res.reverse()\n return res",
"def connected_components(self):\n if self._connected:\n return [self]\n G = Graph()\n G.add_vertices(list(range(self.degree())))\n for p in self._g:\n G.add_edges(enumerate(p.domain()))\n m = G.connected_components()\n if len(m) == 1:\n return [self]\n for mm in m:\n mm.sort()\n m.sort()\n g = [[] for _ in range(len(m))]\n m_inv = [None] * self.degree()\n for t, mt in enumerate(m):\n for i, mti in enumerate(mt):\n m_inv[mti] = i\n for k in range(self.length()):\n tmp = [None] * len(mt)\n for i, mti in enumerate(mt):\n tmp[i] = m_inv[self._g[k](mti)]\n g[t].append(tmp)\n return [Constellation(g=g[i], check=False) for i in range(len(m))]",
"def get_topology(self):\n topology = []\n # Retrieving waypoints to construct a detailed topology\n for segment in self._wmap.get_topology():\n x1 = segment[0].transform.location.x\n y1 = segment[0].transform.location.y\n x2 = segment[1].transform.location.x\n y2 = segment[1].transform.location.y\n seg_dict = dict()\n seg_dict['entry'] = (x1, y1)\n seg_dict['exit'] = (x2, y2)\n seg_dict['path'] = []\n wp1 = segment[0]\n wp2 = segment[1]\n seg_dict['intersection'] = True if wp1.is_intersection else False\n endloc = wp2.transform.location\n w = wp1.next(1)[0]\n while w.transform.location.distance(endloc) > 1:\n x = w.transform.location.x\n y = w.transform.location.y\n seg_dict['path'].append((x, y))\n w = w.next(1)[0]\n\n topology.append(seg_dict)\n return topology",
"def transpositions(self):\n a = self.cyclic_form\n res = []\n for x in a:\n nx = len(x)\n if nx == 2:\n res.append(tuple(x))\n elif nx > 2:\n first = x[0]\n for y in x[nx-1:0:-1]:\n res.append((first,y))\n return res",
"def topology(self) -> List[Topology]:\n return self._topology",
"def convert_coordinations(self, coordinations):\n result = []\n game_rows = len(self.status)\n for i in coordinations:\n result.append((i[1], game_rows - i[0] - 1))\n result = sorted(result)\n return result",
"def get_all_coordinates(self):\n coordinates = []\n\n for relative_coordinate in self.shape:\n co = [self.coordinate[0] + relative_coordinate[0], self.coordinate[1] + relative_coordinate[1]]\n coordinates.append(co)\n return coordinates",
"def destinations(self):\n return sorted([list(x) for x in self if isinstance(x, tuple)])",
"def co_vertexes(self):\n theta = self.orientation + np.pi / 2\n shifts = np.array([np.cos(theta), np.sin(theta)]) * self.b\n return self.coords + (shifts[:, None] * [-1, 1]).T",
"def connected_components(self) -> List[list]:\n self.reset_tags()\n ans = []\n visited = dict() # A dictionary of visited nodes\n\n for key in self._graph.get_all_v():\n if not visited.get(key):\n path = self.connected_component(key)\n for node in path:\n visited.__setitem__(node.key, True)\n ans.append(path)\n return ans",
"def to_list(self):\n t = ([],) * self.size\n for x in range(self.size):\n t[x].extend(self.get_links(x))\n\n return t1",
"def topologies(self):\n return self._topologies",
"def get_vertices(self) -> []:\n return [i for i in self.adj_list]",
"def get_components(graph):\n return [graph.subgraph(c).copy() for c in nx.connected_components(graph)]",
"def get_coordinates(self):\r\n coordinates_list = []\r\n for i in range(self.__length):\r\n if self.__orientation == Direction.VERTICAL:\r\n temp = (self.__location[0] + i, self.__location[1])\r\n if self.__orientation == Direction.HORIZONTAL:\r\n temp = (self.__location[0], self.__location[1] + i)\r\n coordinates_list.append(temp)\r\n return coordinates_list",
"def components_graph(geo, stereo=True):\n return automol.graph.connected_components(graph(geo, stereo=stereo))",
"def connecting(node1, node2):\n comp_list = []\n \"\"\":type : list[components.Component]\"\"\"\n if node1 == node2:\n return []\n for comp in node1.connected_comps:\n if comp.neg == node2:\n comp_list.append(comp)\n elif comp.pos == node2:\n comp_list.append(comp)\n return comp_list",
"def get_all_arcs(self):\n return [ (i, j) for i in self.constraints for j in self.constraints[i] ]",
"def to_list(cls, data):\n\t\tif isinstance(data, Atom) == False:\n\t\t\traise Exception(\"data must be a class object\")\n\t\tx,y,z = (data.atom_loc)[0], (data.atom_loc)[1], (data.atom_loc)[2]\n\t\treturn [x,y,z]",
"def sublistsC (seq):\n if seq:\n sublists = [([seq[0]] + a, b) for a, b in sublistsC(seq[1:])]\n return sublists + [(b, a) for a, b in sublists]\n else:\n return [([], [])]",
"def nodes(topology):\n return topology.nodes()",
"def getArcsTo(self):\n return [ arc for arc in sorted(self._arcsTo) ]",
"def as_list(self):\n nodes = []\n node = self.first_node\n while node:\n nodes.append(node)\n node = node.next\n return nodes",
"def ControlFlowGraphToSequence(graph: reachability_pb2.ControlFlowGraph) -> str:\n s = []\n for node in graph.node:\n successors = ' '.join(sorted(node.child))\n s.append(f'{node.name}: {successors}\\n')\n return ''.join(s)",
"def tranpose(g):\n result = [[]]\n for i in g: # recursively get adjacent nodes\n for j in g[i]:\n result[i].append(j)\n return result",
"def construct_sequence_list(self):\n return list(self.iter_sequence())",
"def get_node_pairs_from_path(path):\n\n path = path[:-1]\n pairs = []\n for i in range(len(path)):\n center_node = path[i]\n for j in range(max(i - config.window_size, 0), min(i + config.window_size + 1, len(path))):\n if i == j:\n continue\n node = path[j]\n pairs.append([center_node, node])\n return pairs",
"def edges(self):\n return [(a, b) for a in self._consequences_of\n for b in self._consequences_of[a]]",
"def connected_components(self) -> List[list]:\n for n in self.dw_graph.get_all_v().values():\n n.distance=0.0\n mega_list = []\n for n in self.dw_graph.get_all_v().values():\n if n.distance!=-10:\n mega_list.append(self.connected_component(n.node_id))\n return mega_list",
"def get_contexts(chords_seq):\n chords , contexts = [], []\n m_before = context_size\n m_after = context_size\n\n copy_chords_seq = copy.deepcopy(chords_seq)\n size = len(chords_seq)\n\n for i in range(size):\n # the neighborhood of chords at the beginning or at the end of a sequence is smaller\n if i < m_before:\n m_before = i\n elif size - i <= m_after:\n m_after = size - i - 1\n\n if (m_before > 0):\n for context in map(list, copy_chords_seq[(i - m_before):i]):\n c_j = copy.deepcopy(list(context))\n #c_j.append(EOS_ID)\n chords.append(list(chords_seq[i]))\n contexts.append(c_j)\n if (m_after > 0):\n\n for context in map(list, chords_seq[(i + 1):(i + m_after + 1)]):\n c_j = copy.deepcopy(list(context))\n #c_j.append(EOS_ID)\n chords.append(list(chords_seq[i]))\n contexts.append(c_j)\n\n m_before = context_size\n m_after = context_size\n\n return (chords,contexts)",
"def get_coordinate_lists(self, crs=None):\n x, y = self.vertices.vectors()[:2]\n if crs is not None and (crs != self.crs):\n x, y = _reproject((x,y), self.crs, crs)\n return x, y",
"def get_conjugate_constructions(self, zero_index=False):\n object_labels, directed_graph = self.get_dependency_graph(zero_index)\n\n sorts = nx.algorithms.dag.all_topological_sorts(directed_graph)\n sorts_list = list(sorts)\n return object_labels, sorts_list",
"def to_list(self):\n path = []\n for point in self.points:\n path.append(point.to_dict())\n\n return path",
"def get_adjacency_list(sequences):\r\n\r\n S = set()\r\n\r\n for i in sequences:\r\n if i not in S:\r\n S.add(i)\r\n\r\n C = get_reverse_complement(i)\r\n if C not in S:\r\n S.add(C)\r\n\r\n #Print adjacency list in format specified\r\n for j in S:\r\n print(\"(\" + j[:-1] + \", \" + j[1:] + \")\")",
"def coord_vecs(self):\n return [np.linspace(x0, x1, nx) for x0, x1, nx in zip(self.mins, self.maxs, self.shape)]",
"def complementary_regions(self):\n g = self._get_puncturefinder_graph()\n # return g.connected_components()\n return list(nx.connected_components(g))",
"def getSortedCyclicDependencies(self):\n res = []\n if self.isCircular():\n res = [self.identifier if self.originalId is None else self.originalId]\n # fill node inputs\n nn = 0\n while nn < len(res):\n _node = res[nn]\n for _inputId in self.model.getNode(_node).ioEngine.inputs:\n input_node = self.model.getNode(_inputId)\n if not _inputId in res and input_node.isCircular():\n # check if node is in circle of _inputId\n if _node in input_node.getFullInputs():\n res.append(_inputId)\n nn += 1\n return res",
"def strongly_connected_components_subgraphs(self):\n return [self.subgraph(_) for _ in self.strongly_connected_components()]",
"def to_list(self):\n return [self.x, self.y, self.z]",
"def get_node_coordinates(nodes) :\r\n\r\n coords = [] #The list of coordinates\r\n\r\n for node in nodes :\r\n coords.append(node.coords)\r\n\r\n return coords",
"def get_node_list(self):\n return [[node] for node in self.graph.nodes]",
"def _node_seq(self):\n return list(self.values())",
"def get_triplet_composition(seq):\n out = [] \n for i in range(len(seq)):\n \n if i+3 > len(seq):\n break\n out.append(seq[i:i+3])\n return out",
"def directions(self):\n return []",
"def get_path_coordinates(path) :\r\n\r\n coords = []\r\n\r\n for node in path :\r\n coords.append(node.coords)\r\n\r\n return coords",
"def connected_component(self, id1: int) -> list:\n self.__set_all_nodes_unvisited()\n res = self.__tarjan(self.get_graph().get_all_v().get(id1))\n\n for scc in res:\n if id1 in scc:\n return scc\n return []",
"def get_vertices(self, crs=None):\n if crs is None:\n return [np.array(v) for v in self.vertices]\n else:\n vertices = []\n for line in self.vertices:\n line_vertices = [_reproject(v[:2], self.crs, crs) for v in line]\n vertices.append(np.array(line_vertices))\n return vertices",
"def get_path_list(path):\n # Build the path from end back to beginning.\n nodes = []\n prevevt = None\n while path is not None:\n nodes.append((path.node, path.cost, prevevt))\n prevevt = path.evt\n path = path.prev\n\n nodes.reverse()\n return nodes",
"def components(self):\n comps = []\n for rx in self.source_field.receiver_list:\n comps += rx.components\n return comps",
"def generate_CA_or_P_trace(trajectoryHandler, backbone_atoms_selection = \"name CA P\"):\n coordsets = numpy.array([])\n try:\n # Only get first frame of the selection\n coordsets = trajectoryHandler.getMergedStructure().select(backbone_atoms_selection).getCoordsets()[0]\n except:\n print \"[ERROR visualizationTools::generate_CA_or_P_trace] Impossible to get coordinates for trace\"\n return coordsets.tolist()",
"def get_path_nodes(self, path):\n nodes = []\n for arc in path:\n start = self.arc_info[arc][\"start\"]\n destin = self.arc_info[arc][\"destin\"]\n nodes.append(start)\n nodes.append(destin)\n nodes = list(set(nodes))\n nodes.sort()\n return nodes",
"def getArcsFrom(self):\n return [ arc for arc in sorted(self._arcsFrom) ]",
"def get_conect_records(self) -> List[str]:\n conect_records = []\n adjacency_matrix = self._structure.adjacency_matrix\n for x_serial, row in enumerate(adjacency_matrix, 1):\n connections = np.flatnonzero(row) + 1\n fmt = \"{: 5d}\" if x_serial < 100000 else \"{: 5X}\"\n first_atom = fmt.format(x_serial)\n connection_list = [\"CONECT\", first_atom]\n for connection in connections:\n fmt = \"{: 5d}\" if connection < 100000 else \"{: 5X}\"\n connection_list.append(fmt.format(connection))\n connection_list.append(\"\\n\")\n conect_records.append(\"\".join(connection_list))\n return conect_records",
"def get_coordinates(self):\n return np.array([(n.x, n.y) for n in self.nodes])",
"def connected_components(G):\n\n # start with empty list of components\n C = []\n visited = set()\n\n for v in G.get_vertices():\n if v not in visited:\n span = spanning_edges(G, v)\n component = set()\n\n for x in span:\n for y in x:\n component.add(y)\n visited.add(y)\n\n C.append(component)\n\n # sort the components list by the min on each element\n C.sort(key=min)\n return C",
"def clts(sequence):\n return [_token2clts(segment)[1] for segment in sequence]",
"def get_paths(self, depth=None):\n if not isinstance(self.ref_cell, Cell):\n return []\n if self.origin is not None:\n trans = numpy.array(self.origin)\n else:\n trans = None\n if self.rotation is not None:\n rot = self.rotation * numpy.pi / 180.0\n else:\n rot = None\n return [\n p.transform(trans, rot, self.magnification, self.x_reflection)\n for p in self.ref_cell.get_paths(depth=depth)\n ]",
"def valence_terms(cls, topology):\n return [tuple(b.atoms) for b in topology.bonds]",
"def casas(self):\n casas = []\n for propietario in self.propietarios:\n for casa in propietario.casas:\n casas.append(casa)\n return casas",
"def coordinates(self):\n ships_coors = []\n if self._dir in (Direction.UP, Direction.DOWN):\n for val in range(self._len):\n new_y = self._y_pos + val\n ships_coors.append((self._x_pos, new_y))\n elif self._dir in (Direction.LEFT, Direction.RIGHT):\n for val in range(self._len):\n new_x = self._x_pos + val\n ships_coors.append((new_x, self._y_pos))\n return ships_coors",
"def get_nodes(self):\n return list(map(lambda x: x[0], self.__nodes))",
"def get_directions():\n return [(1, 0), (0, 1), (-1, 0), (0, -1)]",
"def path(self):\n\t\tnode, path_back = self, []\n\t\twhile node:\n\t\t\tpath_back.append(node)\n\t\t\tnode = node.parent\n\t\treturn list(reversed(path_back))",
"def coordinates(self):\n logging.debug('Get coordinates from text')\n result = []\n blocks = self.del_comm(blocks=True)\n coor = re.compile('[FXYZ][+-]?[0-9]+(\\.[0-9]+)?')\n for line in blocks:\n coord_line = False\n comm = line.split()\n temp = []\n for c in comm:\n if c == 'G1':\n coord_line = True\n if coord_line and coor.match(c):\n temp.append(c)\n if temp:\n result.append(temp)\n return result",
"def calculate_paths(topology):\n nodes = topology['nodes']\n edges = topology['links']\n\n dist = [[len(nodes) + 1 for x in range(len(nodes))] for y in range(len(nodes))]\n paths = [[[] for x in range(len(nodes))] for y in range(len(nodes))]\n\n for e in edges.values():\n s, d = int(e['source']), int(e['target'])\n dist[s][d] = dist[d][s] = 1\n paths[s][d] = [e['id']]\n paths[d][s] = [e['id']]\n\n for k in range(len(nodes)):\n for i in range(len(nodes)):\n for j in range(len(nodes)):\n if dist[i][k] + dist[k][j] < dist[i][j]:\n dist[i][j] = dist[i][k] + dist[k][j]\n paths[i][j] = paths[i][k] + paths[k][j]\n return paths",
"def get_constraint_list(self):\n constraints = []\n for i in xrange(self.num_repeats):\n # Using start_index, start each domain at the correct index when flattening out points in COBYLA.\n constraints.extend(self._domain.get_constraint_list(start_index=self.dim * i))\n return constraints",
"def _vertex_arrays_to_list(x_coords_metres, y_coords_metres):\n\n _check_polyline(\n x_coords_metres=x_coords_metres, y_coords_metres=y_coords_metres)\n\n num_vertices = len(x_coords_metres)\n vertex_list_xy_metres = []\n for i in range(num_vertices):\n vertex_list_xy_metres.append((x_coords_metres[i], y_coords_metres[i]))\n\n return vertex_list_xy_metres",
"def get_conjugated_nodes(self):\n sets = []\n self.get_backbone()\n m = self.mbb\n for bi in m.GetBonds():\n #print ' -- idx = ', bi.GetIdx()\n n = len(sets)\n iconj = bi.GetIsConjugated()\n ins = ( bt2bo[ bi.GetBondType() ] > 1 ) # is non-single bond?\n if iconj or ins:\n ia1, ia2 = bi.GetBeginAtomIdx(), bi.GetEndAtomIdx()\n set_i = set([ia1, ia2])\n if n == 0:\n sets.append( set_i )\n else:\n for j, set_j in enumerate(sets):\n if set_i.intersection( set_j ) > set([]):\n sets[j].update( set_i )\n else:\n if set_i not in sets: sets.append( set_i )\n #print '-- sets = ', sets\n sets_u = cim.merge_sets(sets)\n return sets_u",
"def connected_component(self, id1: int) -> list:\n if self._graph is None or self._graph.get_node(id1) is None:\n return []\n\n self.reset_tags() # This method executes a BFS and tag nodes so reset_tags() must be called.\n\n # Traverse the original graph, from node id1, and tag all reachable nodes\n ans = []\n src = id1 # alias\n original_graph = self.get_graph()\n self.traverse_breadth_first(src, original_graph)\n # Transpose/Reverse graph's edges\n transposed_graph = self.reverse_graph()\n # Traverse the transposed graph, from node id1, and un-tag all reachable nodes\n self.traverse_breadth_first(src, transposed_graph)\n\n # Iterate over nodes in the transposed graph and find the nodes that are tagged twice!\n for key in transposed_graph.get_all_v():\n node = transposed_graph.get_node(key)\n if node.tag == 2:\n ans.append(self._graph.get_node(node.key)) # Append original node\n return ans",
"def array(self):\n return list(self.sequence)",
"def get_cities_of_route(network, route):\n result = []\n if len(route) >= 1:\n road_id = route[0]\n result.append( get_start(network,road_id) )\n result.append( get_end(network,road_id) )\n for i in range(1,len(route)):\n road_id = route[i]\n result.append( get_end(network,road_id) )\n return result",
"def neighbors(node, topology):\n return [n for n in topology[node]]",
"def get_paths(self, depth=None):\n if not isinstance(self.ref_cell, Cell):\n return []\n if self.origin is not None:\n trans = numpy.array(self.origin)\n else:\n trans = None\n if self.rotation is not None:\n rot = self.rotation * numpy.pi / 180.0\n else:\n rot = None\n paths = self.ref_cell.get_paths(depth=depth)\n array = []\n for i in range(self.columns):\n for j in range(self.rows):\n spc = numpy.array([self.spacing[0] * i, self.spacing[1] * j])\n for path in paths:\n array.append(\n libcopy.deepcopy(path).transform(\n trans, rot, self.magnification, self.x_reflection, spc\n )\n )\n return array",
"def get_vertices(self, crs=None):\n if crs is None:\n vertices = []\n for poly_vertices in self.vertices:\n vertices.append([np.array(v) for v in poly_vertices])\n return vertices\n else:\n vertices = []\n for poly_vertices in self.vertices:\n poly = []\n for ring_vertices in poly_vertices:\n poly.append(np.array([_reproject(v[:2], self.crs, crs)\n for v in ring_vertices]))\n vertices.append(poly)\n return vertices",
"def get_vertices(self):\n output = []\n \n for vertex in self.adjacency_list:\n output.append(vertex.value)\n\n return output",
"def get_coordinates_list(self):\n return [tweet['coordinates'][::-1] for tweet in self.tweets_data]",
"def coord_list_t(connected_data, t):\n coord_list = []\n for spot_id in connected_data:\n this_spot_data = connected_data[spot_id]\n row = this_spot_data[this_spot_data[:,0] == t]\n if (len(row) > 0):\n row = list(row[0])\n spot_coords = [spot_id] + row[2:5]\n coord_list.append(spot_coords)\n return coord_list",
"def vertices(self) -> list[Point]:\n a = Point(self.array[..., 0, :], copy=False)\n b = Point(self.array[..., 1, :], copy=False)\n return [a, b]",
"def vertices(self):\n return list(self._graph)",
"def get_adjacency_list(self):\n \n # Get maximum node value\n max_index = max([node.value for node in self.nodes])\n \n # Initialize list long enough to reach max_index\n adjacency_list = [None] * (max_index+1)\n\n # Iterate over all edges\n for edge in self.edges:\n\n from_val = edge.node_from.value # Get node from value\n to_val = edge.node_to.value # Get node to value\n edge_val = edge.value # Get edge value\n\n # Store to and edge values at from value index\n # If there's already content stored at index...\n if adjacency_list[from_val]:\n # Append new content\n adjacency_list[from_val].append((to_val, edge_val))\n else:\n # Else, assign new content as a list\n adjacency_list[from_val] = [(to_val, edge_val)]\n \n # Return list of (to value, edge value) tuples indexed by from value\n return adjacency_list",
"def nodes(self):\n return self.sort_dict(self.trajectory_data)",
"def get_vertices_list(feature):\n return [\n point\n for part in Geometry.get_multipolygon(feature)\n for ring in part\n for point in ring[0:-1]\n ]",
"def to_list(self):\n return [self._position, self._focal_point, self._viewup]",
"def _conv_bbox_to_list(bbox):\n array = list()\n for r in range(bbox[0, 1], bbox[1, 1] + 1):\n for c in range(bbox[0, 0], bbox[1, 0] + 1):\n array.append([c, r])\n return array",
"def getNeighbors(self):\n targets = set()\n for arc in self._arcsFrom:\n targets.add(arc.getFinish())\n return [ node for node in sorted(targets) ]",
"def get_all_connected_nodes(self, where_to=OUTGOING):\n\n list_of_all_nodes = []\n\n if not self._directed or where_to == Vertex.OUTGOING:\n for edge in self._outgoing:\n list_of_all_nodes.append(edge.return_other_side(self))\n elif where_to == Vertex.INCOMING:\n for edge in self._incoming:\n list_of_all_nodes.append(edge.return_other_side(self))\n\n return list_of_all_nodes",
"def pipeline_nodes(self) -> List[Node]:\n return [node for pipeline in self.pipelines for node in pipeline.nodes]",
"def list_conns(self):\n\t\tres = []\n\t\tself.AL.acquire()\n\t\tfor ls in self.ls.keys():\n\t\t\tinfo = self.ls[ls]\n\t\t\tres.append(\n\t\t\t\t(\n\t\t\t\t\t\"Relay\", \"LOCAL\", info[\"local\"], info[\"peer\"],\n\t\t\t\t\tinfo[\"port\"], info[\"got\"], None,\n\t\t\t\t\t)\n\t\t\t\t)\n\t\tfor s in self.s2i.keys():\n\t\t\tinfo = self.s2i[s]\n\t\t\tif info[\"creator\"] == self.cid:\n\t\t\t\tfai = \"LOCAL\"\n\t\t\t\ttai = info[\"peer\"]\n\t\t\telse:\n\t\t\t\tfai = info[\"creator\"]\n\t\t\t\ttai = info[\"peer\"]\n\t\t\tres.append(\n\t\t\t\t(\n\t\t\t\t\t\"Conn\", fai, info[\"local\"], tai, info[\"port\"],\n\t\t\t\t\tinfo[\"recv\"], info[\"send\"]\n\t\t\t\t\t)\n\t\t\t\t)\n\t\tself.AL.release()\n\t\treturn res",
"def get_conn_matrix_vector(self):\n\n vect = []\n for line in sorted(self.connection_matrix):\n for item in self.connection_matrix[line]:\n vect.append(item)\n\n return vect",
"def strongly_connected_component_subgraphs(G):\n cc=strongly_connected_components(G)\n graph_list=[]\n for c in cc:\n graph_list.append(G.subgraph(c))\n return graph_list",
"def to_list(self):\n _return = []\n pointer = self.first\n while pointer is not None:\n _return.append(pointer.data)\n pointer = pointer.next\n return _return",
"def calculate_ctrl_pts(self) -> list[tuple]:\n ctrl_pts = []\n # Skip last point if path is non-cyclic\n point_inds = range(self.num_points) if self.is_cyclic else range(self.num_points - 1)\n for i in point_inds:\n z_i = self.points[i]\n z_j = self.points[(i + 1) % self.num_points]\n rho_coefficient = z_i.alpha * velocity(z_i.theta, z_j.phi)\n sigma_coefficient = z_j.beta * velocity(z_j.phi, z_i.theta)\n ctrl_pt_a = z_i + (1 / 3) * rho_coefficient * cmath.exp(complex(0, z_i.theta)) * (z_j - z_i)\n ctrl_pt_b = z_j - (1 / 3) * sigma_coefficient * cmath.exp(complex(0, -z_j.phi)) * (z_j - z_i)\n ctrl_pts.append((ctrl_pt_a.real, ctrl_pt_a.imag))\n ctrl_pts.append((ctrl_pt_b.real, ctrl_pt_b.imag))\n return ctrl_pts",
"def output_cluster_list(active_sites, clusters):\n atoms = []\n for c in clusters:\n clust = []\n for elem in c:\n clust.append(active_sites[elem])\n atoms.append(clust)\n return atoms",
"def topological_sort_generator(self):\n from sage.graphs.linearextensions import LinearExtensions\n try:\n return LinearExtensions(self).list()\n except TypeError:\n raise TypeError('Digraph is not acyclic; there is no topological sort (or there was an error in sage/graphs/linearextensions.py).')",
"def path(self):\n node, path_back = self, []\n while node:\n path_back.append(node)\n node = node.parent\n return list(reversed(path_back))",
"def path(self):\n node, path_back = self, []\n while node:\n path_back.append(node)\n node = node.parent\n return list(reversed(path_back))",
"def waypoints(self):\n\t\treturn [Star(star_id, galaxy=self.galaxy) for delay, star_id, order, num_ships in self.data.o]",
"def coord_list(self, part_name: str) -> List:\n part = self.parts[part_name]\n if isinstance(part, Polygon):\n # Note that in shapely, the first coord is repeated at the end, which we\n # trim off:\n return list(np.array(part.exterior.coords.xy).T)[:-1]\n elif isinstance(part, LineString):\n return list(np.array(part.coords.xy).T)[:]",
"def get_MultiPolyLists(mpoly,coord_type='x'):\n if coord_type == 'x':\n i=0\n elif coord_type == 'y':\n i=1\n\n # Get the x or y coordinates\n c = []\n if isinstance(mpoly,Polygon):\n mpoly = [mpoly]\n for poly in mpoly: # the polygon objects return arrays, it's important they be lists or Bokeh fails\n exterior_coords = poly.exterior.coords.xy[i].tolist();\n interior_coords = []\n for interior in poly.interiors:\n if isinstance(interior.coords.xy[i],list):\n interior_coords += [interior.coords.xy[i]];\n else:\n interior_coords += [interior.coords.xy[i].tolist()];\n c.append([exterior_coords, *interior_coords])\n return c",
"def angles(self) -> list[npt.NDArray[np.float_]]:\n result = []\n a = cast(Segment, self.edges[-1])\n for b in self.edges:\n b = cast(Segment, b)\n result.append(angle(a.vertices[1], a.vertices[0], b.vertices[1]))\n a = b\n\n return result",
"def to_list(self) -> list:\n return self.A.tolist()"
] | [
"0.6540596",
"0.63142204",
"0.6299031",
"0.62282807",
"0.61203295",
"0.60599744",
"0.58927417",
"0.58664745",
"0.58310354",
"0.5793973",
"0.5747925",
"0.5732554",
"0.57276785",
"0.56795806",
"0.5675118",
"0.5665343",
"0.565245",
"0.5644378",
"0.5638672",
"0.5636335",
"0.5635213",
"0.5629478",
"0.56287163",
"0.56279325",
"0.561738",
"0.56163603",
"0.5603294",
"0.55837274",
"0.55623645",
"0.555877",
"0.55560005",
"0.5555998",
"0.5551129",
"0.55452555",
"0.5510685",
"0.54958385",
"0.54944146",
"0.5491334",
"0.54874724",
"0.54838806",
"0.54786915",
"0.5469671",
"0.5461558",
"0.54612726",
"0.54432374",
"0.54312205",
"0.5423056",
"0.5396468",
"0.5394328",
"0.53887886",
"0.53804964",
"0.5379218",
"0.537797",
"0.5375543",
"0.5375318",
"0.53716147",
"0.5360667",
"0.5359682",
"0.5345035",
"0.53397757",
"0.5335975",
"0.53359175",
"0.5331403",
"0.53288317",
"0.5328635",
"0.53278726",
"0.5326174",
"0.53172314",
"0.53031385",
"0.5291886",
"0.5288048",
"0.5281661",
"0.52777565",
"0.5269698",
"0.5240076",
"0.5236248",
"0.52329767",
"0.5229438",
"0.5221996",
"0.5221939",
"0.5221482",
"0.5210738",
"0.52063364",
"0.52052665",
"0.52029276",
"0.52019286",
"0.5191658",
"0.51906276",
"0.51891005",
"0.5181543",
"0.5173619",
"0.5157561",
"0.5156179",
"0.51505464",
"0.51467097",
"0.51467097",
"0.5137743",
"0.51351297",
"0.5132613",
"0.5131838",
"0.51289093"
] | 0.0 | -1 |
Returns the vertex symbol of a topology as a list of strings | def get_vs(self,name):
return self.mfp.get_vs(name) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_vertices(self):\n return str(self.vert_dict.keys())",
"def graph(g):\n return str(g.adjacencyList())",
"def list_vertices(self):\n return list(self.graph_dict.keys())",
"def __str__(self):\n vList = []\n for vertex in self:\n vList.append(vertex.name)\n gStr = \"The DiGraph contains _vertices: {0}\".format(\" \".join(vList))\n return gStr",
"def vertices(self):\n return list(self.graph_dict.keys())",
"def vertices(self):\r\n return list(self.__graph_dict.keys())",
"def get_vertex_keys(self):\n return self.vertList.keys()",
"def getGraphPointsNames(self):\n return [gp.id for gp in self.getGraphPoints()]",
"def vertices(self):\n return list(self.__graph_dict.keys())",
"def vertices(self):\n return list(self.__graph_dict.keys())",
"def vertices(self):\n return list(self.__graph_dict.keys())",
"def get_symbol(self):\n return []",
"def vertices(self):\n return list(self.__graph.values())",
"def __str__(self):\n s = ''\n for vertex in self.vertices:\n s += vertex.__str__()\n s += \"\\n\"\n return s",
"def get_vertices(self):\n return self.graph.keys()",
"def protocol_names(self):\n\n return tuple([k.name for k in self.query(Protocol).order_by(Protocol.name)])",
"def getVertices(self):\n return list(self.adjList.keys())",
"def return_vertexName(self, index):\n return self.__name_list[index]",
"def edge_vertices(edge):\n return [edge.vertex1, edge.vertex2]",
"def edgesFromVertex(u):\r\n edgeRepresentation = lambda v: f\"({u}, {v}, {self.getCapacity((u, v))}, {self.getFlow((u,v))})\"\r\n return \", \".join(map(edgeRepresentation, sorted(self.adjacent[u])))",
"def protocol_names(self):\n l = self.protocols()\n retval = [str(k.name) for k in l]\n return retval",
"def vertices(self):\n return list(self._graph)",
"def get_vertices(self):\n return self.vertList.keys()",
"def edges_names(self):\n return [(edg[0].name, edg[1].name) for edg in self._edges]",
"def vertices(self):\n return self.keys()",
"def vertices(self):\r\n return self.adjacent.keys()",
"def getEdges(self):\n # for node in graph,\n # return node -> node for j in graph[node]\n\n return [\"->\".join([str(n1), str(n2)]) for n1 in self.graph.keys() for n2 in self.graph[n1]]",
"def get_vertices(self):\n return list(self.vertices.keys())",
"def __repr__(self):\n return repr((self.head_vertex, self.tail_vertex))",
"def obtener_vertices(self):\n return list(self.vertices.keys())",
"def out_vertices(self, vertex):\n return self[vertex].keys()",
"def client_node_edge_point(self) -> List[str]:\n return self._client_node_edge_point",
"def get_vertices(self) -> []:\n return [i for i in self.adj_list]",
"def vertices(self):\n s = set([x for x in self.edges.keys()])\n t = set([y for v in self.edges.values() for (y,d) in v.items()])\n v = s.union(t)\n return list(v)",
"def getGraphPointNamesString(self):\n names = []\n for gp in self.getGraphPoints():\n if hasattr(aq_base(gp), 'isBroken') and gp.isBroken():\n names.append('%s(<span style=\"color: red\">missing</span>)' %\n gp.id)\n else:\n names.append(gp.id)\n return ', '.join(names)",
"def _repr_(self):\n return 'A vertex at ' + repr(self.vector());",
"def edgesFromVertex(u):\r\n edgeRepresentation = lambda v: f\"({u}, {v}, {self.capacity[(u, v)]})\"\r\n return \", \".join(map(edgeRepresentation, self.residualNeighbors(u)))",
"def __repr__(self):\n return 'Vertex(%s)' % repr(self.label)",
"def __repr__(self):\n return 'Vertex(%s)' % repr(self.label)",
"def get_vertices(self):\n if self.vert_list.keys() != None:\n return self.vert_list.keys()\n raise KeyError(\"Vertex not found\")",
"def dump_vertex(self, vertex):\n\n mylist = self.__graph_dict[vertex]\n logging.debug( \"** Mylist : \", mylist)",
"def _repr_(self):\n return 'A vertex at ' + repr(self._representation_vector);",
"def getVertexNumbers(self):\n return self.vertexIndex.keys()",
"def __str__(self):\n return np.array2string(self.graph.toarray())",
"def __str__(self):\n stringRepresentation = []\n for node in self.getNodes():\n stringRepresentation.append(\"->\".join(\n (str(node), str(self.graph[node]))))\n\n return str(stringRepresentation)",
"def edges_key(graph):\n return tuple(graph.edges())",
"def toStl(self):\n return 'vertex {0:.6e} {1:.6e} {2:.6e}'.format(\n self.mV[0], self.mV[1], self.mV[2])",
"def get_node_variable_names(self):\n return [b\"\".join(_i).strip().decode()\n for _i in self._f.variables[\"name_nod_var\"][:]]",
"def vertices(self):\n return self._outgoing.keys()",
"def __repr__(self):\n return repr((self.head_vertex, self.tail_vertex, self.weight))",
"def __repr__(self):\n return repr((self.head_vertex, self.tail_vertex, self.weight))",
"def get_vertices(self):\n output = []\n \n for vertex in self.adjacency_list:\n output.append(vertex.value)\n\n return output",
"def get_graph(self):\n return json.dumps(self.graph.get_edgelist(), separators=(',',':'))",
"def vertex_ids(self):\n return self.get_ids()",
"def psv_name_list(self):\n return list(self._link_reg.psv_names)",
"def node_name_list(self):\n return list(self._node_reg.keys())",
"def str(self) -> List[Tuple[str, str]]:\n kl = self.keys()\n vl = self.values()\n return [str(kl[idx]) + \",\" + str(vl[idx]) for idx in range(len(kl))]",
"def part_one():\n return \"\".join(networkx.lexicographical_topological_sort(G))",
"def __str__(self):\n pList = []\n for vertex in self._parents:\n pList.append(vertex.name)\n vStr = (\"Vertex {0} has value: {1} and is a child of vertices:\"\n \" {2}\".format(self.name, str(self.data), \" \".join(pList)))\n return vStr",
"def __str__(self):\n\t\treturn str(self.graph)",
"def mesh_names(self):\n meshes = []\n for vname in self.nc.variables.keys():\n try:\n if self.nc.variables[vname].cf_role == 'mesh_topology':\n meshes.append(vname)\n except AttributeError:\n pass\n return meshes",
"def __repr__(self):\n s = [\"{} vertices, {} edges\\n\".format(self._V, self._E)]\n for v in range(self._V):\n s.append(\"%d : \" % (v))\n for w in self._adj[v]:\n s.append(\"%d \" % (w))\n s.append(\"\\n\")\n\n return \"\".join(s)",
"def _permutation_to_vertex(self, p):\n return (\n tuple(p._labels[0]),tuple(p._labels[1]),\n tuple(p._twin[0]),tuple(p._twin[1]))",
"def dump_graph(self):\n\n edges = []\n for vertex in self.__graph_dict:\n mylist = list(vertex)\n logging.debug(\"mylist : \", mylist)",
"def V(self) -> list:\n return list(self._graph.values())",
"def _tupleListToStrings(self):\n graphColorStrings = []\n previousSelection = self.colorlist.GetSelection()\n print(repr(self.graphColors))\n if isinstance(self.graphColors, str):\n self.graphColors = eval(self.graphColors)\n for col in self.graphColors:\n col1 = '%.2f' % float(col[0])\n col2 = '%.2f' % float(col[1])\n col3 = '%.2f' % float(col[2])\n graphColorStrings.append(', '.join([col1, col2, col3]))\n self.colorlist.SetItems(graphColorStrings)\n if 0 <= previousSelection < len(graphColorStrings):\n self.colorlist.SetSelection(previousSelection)\n return graphColorStrings",
"def canonical_vertex(self):\n return self.L.zero(), self.K.one()",
"def __str__(self):\n result = [] \n node = self.head\n while node is not None:\n result.append(str(node.value))\n node = node.next_node \n return '[' + ', '.join(result) + ']'",
"def __str__(self):\n result = [] \n node = self.head\n while node is not None:\n result.append(str(node.value))\n node = node.next_node \n return '[' + ', '.join(result) + ']'",
"def get_edges(graph):\n edges = []\n for vertex in graph.keys():\n connected_nodes = graph[vertex]\n for node in connected_nodes:\n edges.append(str(vertex + node))\n\n return edges",
"def __str__(self):\n class_name_str = str(self.__class__.__name__) + \": (\"\n head_str = str(self.head_vertex) + \", \"\n tail_str = str(self.tail_vertex) + \", \"\n weight_str = str(self.weight) + \")\"\n attributes_str = head_str + tail_str + weight_str\n str_rep = class_name_str + attributes_str\n return str_rep",
"def __str__(self):\n class_name_str = str(self.__class__.__name__) + \": (\"\n head_str = str(self.head_vertex) + \", \"\n tail_str = str(self.tail_vertex) + \", \"\n weight_str = str(self.weight) + \")\"\n attributes_str = head_str + tail_str + weight_str\n str_rep = class_name_str + attributes_str\n return str_rep",
"def _permutation_to_vertex(self, p):\n return (tuple(p._labels[0]),tuple(p._labels[1]),\n tuple(p._twin[0]), tuple(p._twin[1]),\n tuple(p._flips[0]), tuple(p._flips[1]))",
"def gpv_name_list(self):\n return list(self._link_reg.gpv_names)",
"def get_vertices(dfg):\n vertices = list()\n for item in dfg.keys():\n if \"&\" in str(item[0]):\n str(item[0]).replace(\"&\", \"&\")\n vertices.append(item[0])\n\n vertices.append(item[1])\n vertices.sort()\n return vertices",
"def __str__(self):\n class_name_str = str(self.__class__.__name__) + \": (\"\n attributes_str = str(self.head_vertex) + \", \" + \\\n str(self.tail_vertex) + \")\"\n str_rep = class_name_str + attributes_str\n return str_rep",
"def vertices(self):\n top_exp = TopologyUtils.TopologyExplorer(self.topods_shape(), ignore_orientation=True)\n return map(Vertex, top_exp.vertices())",
"def _get_rhops(self, vertex: str) -> List[Tuple[str, str]]:\n if isinstance(vertex, rdflib.term.URIRef):\n vertex = Vertex(str(vertex)) # type: ignore\n elif isinstance(vertex, str):\n vertex = Vertex(vertex) # type: ignore\n hops = []\n\n predicates = self._transition_matrix[vertex]\n for pred in predicates:\n assert len(self._transition_matrix[pred]) == 1\n for obj in self._transition_matrix[pred]:\n hops.append((pred, obj))\n return hops",
"def get_vertices_list(feature):\n return [\n point\n for part in Geometry.get_multipolygon(feature)\n for ring in part\n for point in ring[0:-1]\n ]",
"def _symbols_of_output(label: str) -> List[str]:\n if label == common.EPSILON:\n return [label]\n\n # We add a new state transition arc for each character of the output token.\n return list(label)",
"def E(self) -> list:\n res = []\n for v in self.V():\n res.extend([(v.name, i) for i in v.get_connections().keys()])\n return res",
"def _get_shops(self, vertex: str) -> List[Tuple[str, str]]:\n if not vertex.startswith(\"http://\"):\n return []\n self.endpoint.setQuery(\n \"\"\"\n SELECT ?p ?o WHERE {\n <\"\"\"\n + str(vertex)\n + \"\"\"> ?p ?o .\n }\n \"\"\"\n )\n\n self.endpoint.setReturnFormat(JSON)\n results = self.endpoint.query().convert()\n neighbors = []\n for result in results[\"results\"][\"bindings\"]:\n predicate, obj = result[\"p\"][\"value\"], result[\"o\"][\"value\"]\n if predicate not in self.label_predicates:\n neighbors.append((predicate, obj))\n return neighbors",
"def __str__(self):\r\n def edgesFromVertex(u):\r\n \"\"\"\r\n Represents edges incident to the given vertex.\r\n \"\"\"\r\n edgeRepresentation = lambda v: f\"({u}, {v}, {self.capacity[(u, v)]})\"\r\n return \", \".join(map(edgeRepresentation, self.residualNeighbors(u)))\r\n\r\n def adjacencyLists():\r\n \"\"\"\r\n Represents edges incident to relevant vertices\r\n \"\"\"\r\n anyNeighbor = lambda u: any(self.neighbors(u))\r\n verticesWithNeighbors = filter(anyNeighbor, sorted(self.vertices()))\r\n return map(edgesFromVertex, verticesWithNeighbors)\r\n\r\n return \"\\n\".join(adjacencyLists())",
"def __repr__(self):\n return str(self.nodes)",
"def vertices(self):\n return self.pointlist",
"def get_symbols_list(self):\n return self.symbols_list",
"def as_str(self):\n connectivity_str = '_'.join(map(str, self.values))\n return connectivity_str",
"def get_activation_names(model: onnx_pb.ModelProto) -> List[str]:\n activation_names = get_graph_intermediate_activations(model.graph)\n activation_names.extend([node.name for node in model.graph.output])\n return activation_names",
"def __str__(self):\n s = ''\n for node in self.nodes:\n s += '\\n\\n'+str(node)+'\\n\\t'\n edges = node.getChildren()\n keys = edges.keys()\n keys.sort()\n for key in keys:\n bounds = edges[key].getSuffix()\n s += str(edges[key])+' '\n for i in xrange(bounds[0], bounds[1]):\n s += self.target[i]\n s += '\\n\\t'\n return s",
"def path(self):\n path = self.bidirectional_cpp.getPath()\n # format as list on return as SWIG returns \"tuple\"\n if len(path) <= 0:\n return None\n\n _path = []\n # Convert path to its original types and return\n for p in path:\n if p in [\"Source\", \"Sink\"]:\n _path.append(p)\n else:\n if \"int\" in self._original_node_type.__name__:\n _path.append(int(p))\n elif \"str\" in self._original_node_type.__name__:\n _path.append(str(p))\n return _path",
"def return_vertexIndex(self, name):\n return self.__names[name]",
"def get_unique_vertex_name(self, node: Node) -> str:\n\n if node not in self.uniqueVertexMap_:\n self.uniqueVertexMap_[node] = self.uniqueVertexNo_\n self.uniqueVertexNo_ += 1\n\n return f\"v{self.uniqueVertexMap_[node]}\"",
"def list_syms():\n\tSymStringVec=[];\n\tSymStringVec.append(\"CSYM\");\n\tSymStringVec.append(\"DSYM\");\n\tSymStringVec.append(\"TET_SYM\");\n\tSymStringVec.append(\"OCT_SYM\");\n\tSymStringVec.append(\"ICOS_SYM\");\n\tSymStringVec.append(\"ISYM\");\n\treturn SymStringVec",
"def vertexes(self):\n theta = self.orientation\n shifts = np.array([np.cos(theta), np.sin(theta)]) * self.a\n return self.coords + (shifts[:, None] * [-1, 1]).T",
"def toCsvHeadwayStrings(self) -> [str]:\n return [str(self.getId()), str(self.getHeadway())]",
"def symbol_table(self) -> str:\n return self._symbol_table",
"def getVertex(self, key):\n return self.vertList[key]",
"def vertex_adjacencies(self):\n try:\n return self._vertex_adjacencies\n except AttributeError:\n self._vertex_adjacencies = \\\n [ [ v.index(), \n [n.index() for n in v.neighbors()] \n ] for v in self.Vrepresentation() ]\n return self._vertex_adjacencies",
"def get_vsys_fifo_names(backend):\n return (_VSYS_FMT_IN % backend, _VSYS_FMT_OUT % backend)",
"def get_san_gnames(self):\n return self.load_gnames(self.san)",
"def edgify(vertices:list)->list:\n edges = []\n for k in range(0, len(vertices) - 1):\n edges.append([vertices[k], vertices[k + 1]])\n return edges"
] | [
"0.6693307",
"0.64824003",
"0.6428527",
"0.6385017",
"0.61859804",
"0.6184684",
"0.6171001",
"0.61704546",
"0.6142896",
"0.6142896",
"0.6142896",
"0.60300994",
"0.60281855",
"0.6027096",
"0.59981346",
"0.5959071",
"0.5957562",
"0.5946406",
"0.5943226",
"0.5909223",
"0.59061",
"0.5897466",
"0.5867181",
"0.5784396",
"0.57806396",
"0.5763651",
"0.5749707",
"0.572029",
"0.57169646",
"0.5716505",
"0.57118684",
"0.57034975",
"0.56951725",
"0.5671719",
"0.5669737",
"0.564276",
"0.5631885",
"0.56162924",
"0.56162924",
"0.55990547",
"0.557304",
"0.5563493",
"0.5556836",
"0.5549104",
"0.55474997",
"0.5534111",
"0.552087",
"0.5509594",
"0.5509296",
"0.5502266",
"0.5502266",
"0.5490793",
"0.54763865",
"0.5474954",
"0.54674596",
"0.54635036",
"0.5457523",
"0.54350966",
"0.54314804",
"0.5422156",
"0.5420533",
"0.5411247",
"0.5404729",
"0.53978246",
"0.539145",
"0.5388196",
"0.5368792",
"0.5366337",
"0.5366337",
"0.53603566",
"0.5360257",
"0.5360257",
"0.5346013",
"0.5345413",
"0.53434134",
"0.5336734",
"0.532871",
"0.53274333",
"0.5314429",
"0.5306778",
"0.52949506",
"0.52945423",
"0.5288605",
"0.5284203",
"0.52748424",
"0.5267089",
"0.52629215",
"0.52605027",
"0.5250884",
"0.52256036",
"0.52254844",
"0.52077645",
"0.5207143",
"0.5201684",
"0.5194706",
"0.5190312",
"0.51804966",
"0.5176041",
"0.5172609",
"0.5153645",
"0.51535404"
] | 0.0 | -1 |
Searches nets with a given coordination sequences and given vertex symbols and returns the corresponding netnames as a list of strings. | def search_cs(self, cs, vs, cfilter = True):
assert type(cs) == list
assert type(vs) == list
nets = self.mfp.search_cs(cs, vs)
rl = []
if cfilter:
for i,n in enumerate(nets):
if n.find('-c') != -1: rl.append(n)
for i in rl: nets.remove(i)
return nets | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_names(self, net):\n net_name = self.known_name[id(net)]\n\n base_lists = ['ensembles', 'nodes', 'connections', 'networks', 'probes']\n\n for k in dir(net):\n # If it's not a private attribute, a built in function \n # and not a Nengo object\n if not k.startswith('_') and k not in base_lists:\n # iterate through the attributes of the network that are a list\n v = getattr(net, k)\n if isinstance(v, list):\n for i, obj in enumerate(v):\n # If this object is not already a known name\n if not self.known_name.has_key(id(obj)):\n # Combine the name, the network, the attribute and \n # the index for the new identifier\n # This happens for things like connections \n # and other objects\n n = '%s.%s[%d]' % (net_name, k, i)\n self.known_name[id(obj)] = n\n else:\n # If it's not a list, no need to iterate\n self.known_name[id(v)] = '%s.%s' % (net_name, k)\n\n for base_type in base_lists:\n for i, obj in enumerate(getattr(net, base_type)):\n name = self.known_name.get(id(obj), None)\n # If there was no name found already in the known names\n if name is None:\n name = '%s.%s[%d]' % (net_name, base_type, i)\n self.known_name[id(obj)] = name\n\n for n in net.networks:\n self.find_names(n)",
"def _get_shops(self, vertex: str) -> List[Tuple[str, str]]:\n if not vertex.startswith(\"http://\"):\n return []\n self.endpoint.setQuery(\n \"\"\"\n SELECT ?p ?o WHERE {\n <\"\"\"\n + str(vertex)\n + \"\"\"> ?p ?o .\n }\n \"\"\"\n )\n\n self.endpoint.setReturnFormat(JSON)\n results = self.endpoint.query().convert()\n neighbors = []\n for result in results[\"results\"][\"bindings\"]:\n predicate, obj = result[\"p\"][\"value\"], result[\"o\"][\"value\"]\n if predicate not in self.label_predicates:\n neighbors.append((predicate, obj))\n return neighbors",
"def path_maker(orbit_dict, vertex):\n path_list = [vertex]\n while path_list[-1] != 'COM':\n # print(path_list)\n target = path_list[-1]\n for key in orbit_dict.keys():\n if target in orbit_dict[key]:\n path_list.append(key)\n return path_list[::-1]",
"def get_component_paths(\n graph_client: GremlinClient, topology_id: str, topology_ref: str\n) -> List[List[str]]:\n\n sources_sinks: Dict[str, List[str]] = get_source_and_sink_comps(\n graph_client, topology_id, topology_ref\n )\n\n sgt: GraphTraversalSource = graph_client.topology_subgraph(\n topology_id, topology_ref\n )\n\n output: List[List[str]] = []\n\n for source in sources_sinks[\"sources\"]:\n # Pick a start vertex for this source\n start: Vertex = sgt.V().has(\"component\", source).next()\n for sink in sources_sinks[\"sinks\"]:\n LOG.debug(\n \"Finding paths from source component: %s to sink component: %s\",\n source,\n sink,\n )\n # Find one path from the source vertex to any sink vertex and emit the\n # components as well as the edges.\n full_path: List[Union[str, Edge]] = (\n sgt.V(start)\n .repeat(out(\"logically_connected\").simplePath())\n .until(has(\"component\", sink))\n .path()\n .by(\"component\")\n .by()\n .limit(1)\n .next()\n )\n\n # Filter out the edges and keep the component strings\n path: List[str] = [\n element for element in full_path if isinstance(element, str)\n ]\n\n output.append(path)\n\n return output",
"def build_sym_geom_adjacency(geoms, max_gnn=100):\n global INTERNAL_PARAMETERS\n min_gnn = INTERNAL_PARAMETERS['min_geom_neighbors']\n assert min_gnn < max_gnn, \"Too high minimum number of neighbors\"\n n_pts = geoms.shape[0]\n for n_neighbors in range(min_gnn, max_gnn + 1):\n # find the lowest number of NN s.t. the graph is not too disconnected\n C = build_geom_neighbor_graph(geoms, n_neighbors)\n neighbs = C.indices.reshape((n_pts, n_neighbors))\n C = C + C.T\n C.data[:] = 1\n n_comp, _ = sparse.cs_graph_components(C)\n if n_comp == 1:\n print \"# use n_neighbors=%d\" % n_neighbors\n break\n elif n_comp < 1:\n raise ValueError('Bug: n_comp=%d' % n_comp)\n if n_comp > 1:\n print \"# use maximum n_neighbors=%d (%d components)\" % (\n n_neighbors, n_comp)\n return n_comp, C, neighbs",
"def _get_nodes_from_symbol(sym):\n if not isinstance(sym, Symbol):\n raise TypeError('sym must be an `mxnet.symbol.Symbol`,'\n ' received type {}'.format(str(type(sym))))\n conf = json.loads(sym.tojson())\n nodes = conf['nodes']\n data2op = {} # key: data id, value: list of ops to whom data is an input\n for i, node in enumerate(nodes):\n if node['op'] != 'null': # node is an operator\n input_list = node['inputs']\n for idx in input_list:\n if idx[0] == 0: # do not include 'data' node in the op scope\n continue\n if idx[0] in data2op:\n # nodes[idx[0]] is a data as an input to op nodes[i]\n data2op[idx[0]].append(i)\n else:\n data2op[idx[0]] = [i]\n\n # In the following, we group data with operators they belong to\n # by attaching them with operator names as scope names.\n # The parameters with the operator name as the prefix will be\n # assigned with the scope name of that operator. For example,\n # a convolution op has name 'conv', while its weight and bias\n # have name 'conv_weight' and 'conv_bias'. In the end, the operator\n # has scope name 'conv' prepended to its name, i.e. 'conv/conv'.\n # The parameters are named 'conv/conv_weight' and 'conv/conv_bias'.\n node_defs = []\n for i, node in enumerate(nodes):\n node_name = node['name']\n op_name = node['op']\n kwargs = {'op': op_name, 'name': node_name}\n if op_name != 'null': # node is an operator\n inputs = []\n input_list = node['inputs']\n for idx in input_list:\n input_node = nodes[idx[0]]\n input_node_name = input_node['name']\n if input_node['op'] != 'null':\n inputs.append(_scoped_name(input_node_name, input_node_name))\n elif idx[0] in data2op and len(data2op[idx[0]]) == 1 and data2op[idx[0]][0] == i:\n # the data is only as an input to nodes[i], no else\n inputs.append(_scoped_name(node_name, input_node_name))\n else: # the data node has no scope name, e.g. 'data' as the input node\n inputs.append(input_node_name)\n kwargs['input'] = inputs\n kwargs['name'] = _scoped_name(node_name, node_name)\n elif i in data2op and len(data2op[i]) == 1:\n # node is a data node belonging to one op, find out which operator this node belongs to\n op_node_name = nodes[data2op[i][0]]['name']\n kwargs['name'] = _scoped_name(op_node_name, node_name)\n\n if 'attrs' in node:\n # TensorBoard would escape quotation marks, replace it with space\n attr = json.dumps(node['attrs'], sort_keys=True).replace(\"\\\"\", ' ')\n attr = {'param': AttrValue(s=attr.encode(encoding='utf-8'))}\n kwargs['attr'] = attr\n node_def = NodeDef(**kwargs)\n node_defs.append(node_def)\n return node_defs",
"def dfs(g: nx.Graph, start_node: Any) -> str:\n\n way = []\n stack = [start_node]\n y = {node: [] for node in g.nodes}\n while stack:\n elem = stack.pop()\n way.append(elem)\n for node in list(g.neighbors(elem)):\n if node not in way:\n stack.append(node)\n y[node].extend((*y[elem], elem))\n print(y)\n return \"\".join(way)",
"def _bayes_net_graph(nodes: List[str], edges: List[Tuple[str, str]]):\n sources_and_target = [[target] for target in range(len(nodes))]\n\n for source_node, target_node in edges:\n source = nodes.index(source_node)\n target = nodes.index(target_node)\n sources_and_target[target].insert(0, source)\n\n return [\n tuple(st for st in sts) if len(sts) > 1 else sts[0]\n for sts in sources_and_target\n ]",
"def graph_node_names_details(model):\n\n node_details = namedtuple('node_details', ['node', 'outputs'])\n node_names_details = {}\n for initializer in model.initializer():\n initializer_name = initializer.name\n each_node = node_details(node=initializer, outputs=[])\n if initializer_name not in node_names_details:\n each_node.outputs.extend(get_initializer_children_names(model, initializer))\n node_names_details[initializer_name] = each_node\n for node in model.nodes():\n node_name = node.name\n output_names = node.output\n # onnx output has different name from node name\n for output_name in output_names:\n if output_name not in node_names_details:\n node_names_details[output_name] = node_name\n each_node = node_details(node=node, outputs=[])\n if node_name not in node_names_details:\n each_node.outputs.extend(get_node_children_names(model, node))\n node_names_details[node_name] = each_node\n for graph_input in model.graph().input:\n outputs = []\n node_name = graph_input.name\n for k, v in node_names_details.items():\n try:\n if node_name in v.node.input:\n outputs.append(k)\n except BaseException:\n continue\n each_node = node_details(node=graph_input, outputs=outputs)\n # if node_name not in node_names_details:\n node_names_details[node_name] = each_node\n\n return node_names_details",
"def get_paths_from(self, symbol):\n to_return = []\n visitation_queue = [self.head]\n while len(visitation_queue) != 0:\n visiting = visitation_queue.pop(0)\n for elem in visiting.children:\n visitation_queue.append(elem)\n if symbol in visiting.inputs:\n v = visiting\n model_trail = []\n while v.parent is not None:\n model_trail.append(v.m)\n v = v.parent\n to_return.append(SymbolPath(visiting.inputs, model_trail))\n return to_return",
"def get_san_gnames(self):\n return self.load_gnames(self.san)",
"def _compile_networks(self):\n\n _header_ = self._header_ + '_compile_networks(): '\n\n if self.verbose:\n print(_header_ + 'Compiling all networks ...')\n\n networks = []\n\n all_nidx = set(self.nidx2lidx.keys())\n\n while all_nidx:\n\n nidx0 = [all_nidx.pop()]\n network = set(nidx0)\n\n while nidx0 and all_nidx:\n\n nidx = set()\n\n for l in nidx0:\n lidx = self.nidx2lidx[l]\n for n in lidx:\n nidx |= self.lidx2nidx[n]\n\n nidx -= network\n network |= nidx\n all_nidx -= nidx\n nidx0 = nidx.copy()\n\n networks.append(network)\n\n if self.verbose:\n print(_header_ + 'Found %d networks' % len(networks))\n for i, network in enumerate(networks):\n print(' Network %d - %s' % (i, ','.join([str(j) for j in network])))\n\n return networks",
"def get_nodes_info(graph):\n nodes = collections.defaultdict(lambda: {\n 'name': None,\n 'sg': None,\n 'up': set(),\n 'down': set(),\n })\n for node in graph.get_nodes():\n name = node.get_name()\n nodes[name]['name'] = name.strip('\"')\n for sg in graph.get_subgraphs():\n sgname = sg.get_name().strip('\"')\n if sgname.startswith('cluster_'):\n sgname = sgname[8:]\n sgname = sgname.replace('__', '.').replace('_dash_', '-')\n for node in sg.get_nodes():\n name = node.get_name()\n nodes[name]['name'] = name.strip('\"')\n nodes[name]['sg'] = sgname\n return dict(nodes)",
"def enumerate_network(arg):\n\n network = ip_network(arg, strict=False)\n data = list(map(str, network.hosts()))\n data.insert(0, str(network.network_address))\n if network.prefixlen != network.max_prefixlen:\n data.append(str(network.broadcast_address))\n return data",
"def stringize_nx_graph(nx_graph):\n # graph attributes\n for key in nx_graph.graph.keys():\n if isinstance(nx_graph.graph[key], (list, set, np.ndarray)):\n nx_graph.graph[key] = \",\".join([\n str(val) for val in list(nx_graph.graph[key])])\n\n # node attributes\n for node_name, node_attrs in nx_graph.nodes(data=True):\n for key in node_attrs.keys():\n if isinstance(nx_graph.nodes[node_name][key], (list, set, np.ndarray)):\n nx_graph.nodes[node_name][key] = \",\".join([\n str(val) for val in nx_graph.nodes[node_name][key]])\n # adjust node name for nice output in cytoscape\n new_node_name = re.sub(r\"HCLUST.\\d+_\", \"\", node_name)\n new_node_name = new_node_name.replace(\".UNK.0.A\", \"\")\n nx_graph.nodes[node_name][\"name\"] = new_node_name\n \n # edge attributes\n for start_node, end_node in nx_graph.edges():\n for edge_idx in xrange(len(nx_graph[start_node][end_node])):\n edge_attrs = nx_graph[start_node][end_node][edge_idx]\n for key in edge_attrs.keys():\n if isinstance(edge_attrs[key], (list, set, np.ndarray)):\n nx_graph[start_node][end_node][edge_idx][key] = \",\".join([\n str(val) for val in nx_graph[start_node][end_node][edge_idx][key]])\n \n return nx_graph",
"def connecting(node1, node2):\n comp_list = []\n \"\"\":type : list[components.Component]\"\"\"\n if node1 == node2:\n return []\n for comp in node1.connected_comps:\n if comp.neg == node2:\n comp_list.append(comp)\n elif comp.pos == node2:\n comp_list.append(comp)\n return comp_list",
"def find_and_print_network_communities(G, code_dict=None):\n\n comm_dict = partition(G)\n\n comm_members = {}\n for comm in set(comm_dict.values()):\n countries = [node for node in comm_dict if comm_dict[node] == comm]\n if code_dict is not None:\n countries = [code_dict[code] for code in countries]\n\n comm_members[comm] = countries\n\n return comm_members, get_modularity(G, comm_dict)",
"def projection_to_graph(dfs_codes, mapper):\n\tg = build_graph(dfs_codes)\n\tlocal_dict = dict()\n\tedge_list = []\n\tfor n in g.nodes:\n\t\tlocal_dict[n.id] = mapper[n.label]\n\n\tfor n in g.nodes:\n\t\tfor e in n.edges:\n\t\t\t# print(local_dict[e.fromn], local_dict[e.to], mapper[e.label])\n\t\t\tedge_list.append((local_dict[e.fromn], local_dict[e.to], mapper[e.label]))\n\treturn edge_list",
"def bridge_search(start, connection, components):\n connecting = [c for c in components if connection in c]\n if not connecting:\n return [start]\n bridges = []\n for comp in connecting:\n remaining = components[:]\n remaining.remove(comp)\n new_connection = comp[0] if comp[0] != connection else comp[1]\n bridges += bridge_search(\n start + [comp], new_connection, remaining)\n return bridges",
"def bridges(species1_names, species2_names):\n k12 = filter(lambda s: re.search('K-12',s)!=None, species1_names)[0]\n return [(k12, species2_names[0]), (k12, species2_names[1]), (k12, species2_names[2])]",
"def get_all_sghop_info (nffg, return_paths=False):\n sg_map = {}\n for i in nffg.infras:\n for p in i.ports:\n for fr in p.flowrules:\n # if fr.external:\n # continue\n if fr.id not in sg_map:\n # The path is unordered!!\n path_of_shop = []\n flowclass = NFFGToolBox._extract_flowclass(fr.match.split(\";\"))\n sg_map[fr.id] = [None, None, flowclass, fr.bandwidth, fr.delay]\n # We have to find the BEGINNING of this flowrule sequence.\n inbound_link = NFFGToolBox._find_infra_link(nffg, p, outbound=False,\n accept_dyn=True)\n while inbound_link.type != 'DYNAMIC':\n path_of_shop.append(inbound_link)\n if inbound_link.src.node.type == 'SAP':\n break\n # The link is STATIC, and its src is not SAP so it is an Infra.\n prev_fr, prev_p = \\\n NFFGToolBox._get_flowrule_and_its_starting_port(\n inbound_link.src.node, fr.id)\n NFFGToolBox._check_flow_consistencity(sg_map, prev_fr)\n inbound_link = NFFGToolBox._find_infra_link(nffg, prev_p,\n outbound=False,\n accept_dyn=True)\n # 'inbound_link' is DYNAMIC here or it is STATIC and starts from\n # a SAP,\n # so the sequence starts here\n sg_map[fr.id][0] = inbound_link.src\n\n # We have to find the ENDING of this flowrule sequence.\n output_port = NFFGToolBox._get_output_port_of_flowrule(i, fr)\n if output_port is None:\n continue\n outbound_link = NFFGToolBox._find_infra_link(nffg, output_port,\n outbound=True,\n accept_dyn=True)\n while outbound_link.type != 'DYNAMIC':\n path_of_shop.append(outbound_link)\n if outbound_link.dst.node.type == 'SAP':\n break\n # The link is STATIC and its dst is not a SAP so it is an Infra.\n next_fr, _ = NFFGToolBox._get_flowrule_and_its_starting_port(\n outbound_link.dst.node, fr.id)\n # '_' is 'outbound_link.dst'\n next_output_port = NFFGToolBox._get_output_port_of_flowrule(\n outbound_link.dst.node, next_fr)\n NFFGToolBox._check_flow_consistencity(sg_map, next_fr)\n outbound_link = NFFGToolBox._find_infra_link(nffg,\n next_output_port,\n outbound=True,\n accept_dyn=True)\n # the 'outbound_link' is DYNAMIC here or finishes in a SAP, so the\n # flowrule sequence finished here.\n sg_map[fr.id][1] = outbound_link.dst\n\n if return_paths:\n sg_map[fr.id].append(path_of_shop)\n\n return sg_map",
"def _mapped_graph_list(self,G1, sname, POWER=None):\n logger.debug(f\"Matching circuit Graph nodes: {G1.nodes} edges:{G1.edges(data=True)}\")\n mapped_graph_list = {}\n for lib_ele in self.lib:\n block_name = lib_ele['name']\n if block_name==sname:\n continue\n G2 = lib_ele['graph']\n\n # Digital instances only transistors:\n if self._is_digital(G2,sname):\n continue\n if not self._is_small(G1, G2):\n continue\n\n if len(G2.nodes)<=len(G1.nodes):\n logger.debug(f\"Matching: {block_name} : {G2.nodes} {G2.edges(data=True)}\")\n GM = isomorphism.GraphMatcher(\n G1, G2,\n node_match = isomorphism.categorical_node_match(['inst_type'],\n ['nmos']),\n edge_match = isomorphism.categorical_edge_match(['weight'], [1]))\n if GM.subgraph_is_isomorphic():\n logger.debug(f\"ISOMORPHIC : {block_name}\")\n map_list = []\n\n for Gsub in GM.subgraph_isomorphisms_iter():\n\n all_nd = [key for key in Gsub.keys() if 'net' not in G1.nodes[key][\"inst_type\"]]\n logger.debug(f\"matched inst: {all_nd}\")\n if len(all_nd)>1 and self._is_clk(Gsub) :\n logger.debug(f\"Discarding match due to clock {Gsub}\")\n continue\n elif len(all_nd)>1 and self._is_do_not_identify(Gsub,sname):\n logger.debug(f\"Discarding match due to user constraint {Gsub}\")\n continue\n \n if block_name.startswith('DP') or block_name.startswith('CMC'):\n if G1.nodes[all_nd[0]]['values'] == G1.nodes[all_nd[1]]['values'] and \\\n compare_balanced_tree(G1,get_key(Gsub,'DA'),get_key(Gsub,'DB'),[all_nd[0]],[all_nd[1]]) :\n if 'SA' in Gsub.values() and \\\n compare_balanced_tree(G1,get_key(Gsub,'SA'),get_key(Gsub,'SB'),[all_nd[0]],[all_nd[1]]):\n map_list.append(Gsub)\n logger.debug(f\"Matched Lib: {' '.join(Gsub.values())}\")\n logger.debug(f\"Matched Circuit: {' '.join(Gsub)}\")\n # remove pseudo diff pair\n elif block_name.startswith('DP') and POWER is not None and get_key(Gsub,'S') in POWER:\n logger.debug(f\"skipping pseudo DP {POWER}: {' '.join(Gsub)}\")\n else:\n map_list.append(Gsub)\n logger.debug(f\"Matched Lib: {' '.join(Gsub.values())}\")\n logger.debug(f\"Matched Circuit: {' '.join(Gsub)} power:{POWER}\")\n else:\n logger.debug(f\"Discarding match {block_name} due to non matching branches\")\n elif block_name.startswith('SCM') and G1.nodes[all_nd[0]]['values'] != G1.nodes[all_nd[1]]['values']:\n logger.debug(f\"Discarding match {block_name} due to value mismatch\")\n\n else:\n map_list.append(Gsub)\n logger.debug(f\"Matched Lib: {' '.join(Gsub.values())}\")\n logger.debug(f\"Matched Circuit: {' '.join(Gsub)}\")\n if len(map_list)>1:\n fix_order_for_multimatch(G1,map_list,map_list[-1])\n mapped_graph_list[block_name] = map_list\n\n return mapped_graph_list",
"def protocol_names(self):\n\n return tuple([k.name for k in self.query(Protocol).order_by(Protocol.name)])",
"def get_netns_paths():\n paths = []\n if len(sys.argv) > 1:\n if '-pathsinline' in sys.argv: \n print('Please provide net_ns_path separated by comma')\n paths=sys.stdin.readline()\n paths=paths.split(',')\n else:\n for line in sys.stdin:\n paths.append(line)\n\n paths = [x.strip(' ').rstrip('\\n') for x in paths]\n paths = [x for x in paths if x]\n paths_id = [get_id_netnspath(x) for x in paths]\n return paths, paths_id",
"def get_annotation_names(viewer):\n\n layer_nodes_name = None\n layer_edges_name = None\n for layer in viewer.layers:\n if isinstance(layer, napari.layers.points.points.Points):\n layer_nodes_name = layer.name\n elif isinstance(layer, napari.layers.shapes.shapes.Shapes):\n layer_edges_name = layer.name\n if layer_nodes_name is not None and layer_edges_name is not None:\n break\n return layer_nodes_name, layer_edges_name",
"def get_coordinates_net(net_file, net_name):\r\n pl_file = net_file.replace('.nets', '.pl')\r\n net = {}\r\n net_name_number = int(net_name.replace('n', ''))\r\n nodes_in_net_num = 0\r\n node_names = []\r\n data = []\r\n pos = 0\r\n counter = -1\r\n with open(net_file) as nf:\r\n for num, line in enumerate(nf, 0):\r\n if \"NetDegree\" in line:\r\n counter += 1\r\n if counter == net_name_number:\r\n pos = num\r\n data = line.split()\r\n nodes_in_net_num = data[2]\r\n\r\n with open(net_file) as nf:\r\n for num, line in enumerate(nf, 0):\r\n if pos < num <= pos + int(nodes_in_net_num):\r\n data = line.split()\r\n node_names.append(data[0])\r\n\r\n data.clear()\r\n with open(pl_file) as p:\r\n for num, line in enumerate(p):\r\n if num == 0 or '#' in line or line == '\\n':\r\n continue\r\n else:\r\n data.append(line.split())\r\n\r\n for i in node_names:\r\n for j in data:\r\n if i == j[0]:\r\n net[i] = [j[1]]\r\n net[i].append(j[2])\r\n\r\n return net",
"def getOutputsNames(net):\r\n # Get the names of all the layers in the network\r\n layersNames = net.getLayerNames()\r\n # Get the names of the output layers, i.e. the layers with unconnected outputs\r\n return [layersNames[i[0] - 1] for i in net.getUnconnectedOutLayers()]",
"def protocol_names(self):\n l = self.protocols()\n retval = [str(k.name) for k in l]\n return retval",
"def return_layer_names(self):\n\n existing_layernames = []\n if self.node_data is not []:\n for lyrname, lyr in self.layer_lookup.items():\n if self.node_data[lyr].any():\n existing_layernames.append(lyrname)\n return existing_layernames",
"def _get_ring_nodes(m, namin=3, namax=9, remove_redudant=T):\n # first search for rings\n sets = []\n for i in range(namin, namax+1):\n #if i in [3,4,5]:\n pat_i = '*~1' + '~*'*(i-2) + '~*1'\n #else:\n # pat_i = '*:1' + ':*'*(i-2) + ':*1'\n Qi = Chem.MolFromSmarts( pat_i )\n for tsi in m.GetSubstructMatches(Qi):\n set_i = set(tsi)\n if set_i not in sets:\n sets.append( set(tsi) )\n if remove_redudant:\n # now remove those rings that are union of smaller rings\n n = len(sets)\n sets_remove = []\n ijs = itl.combinations( list(range(n)), 2 )\n sets_u = []\n for i,j in ijs:\n set_ij = sets[i].union( sets[j] )\n if (set_ij in sets) and (set_ij not in sets_remove):\n sets_remove.append( set_ij )\n sets_u = cim.get_compl(sets, sets_remove)\n else:\n sets_u = sets\n return sets_u",
"def extract_linear_strings(self): \n # there are at least three choices of how greedy to be.\n # min: each edge is its own feature\n # max: extract features as long as possible, and allow for 'T' junctions.\n # mid: break features at nodes with degree>2.\n # go with mid\n strings=[]\n edge_marks=np.zeros( self.Nedges(),'b1')\n\n for j in self.valid_edge_iter():\n if edge_marks[j]:\n continue\n edge_marks[j]=True\n\n trav=tuple(self.edges['nodes'][j])\n node_fwd=self.edges['nodes'][j,1]\n node_rev=self.edges['nodes'][j,0]\n\n node_string=[node_fwd,node_rev]\n\n for trav in [ (node_fwd,node_rev),\n (node_rev,node_fwd) ]:\n while 1:\n js = self.node_to_edges(trav[1])\n\n if len(js)!=2:\n break\n\n for j in js:\n jnodes=self.edges['nodes'][j]\n if trav[0] in jnodes:\n continue\n if edge_marks[j]:\n # possible if we go all the way around a ring.\n break\n edge_marks[j]=True\n nxt=[n for n in jnodes if n!=trav[1]][0]\n node_string.append(nxt)\n trav=(trav[1],nxt)\n node_string=node_string[::-1]\n\n feat_nodes=np.array( node_string )\n strings.append( feat_nodes )\n return strings",
"def getOutputsNames(net):\n # Get the names of all the layers in the network\n layersNames = net.getLayerNames()\n # Get the names of the output layers, i.e. the layers with unconnected outputs\n return [layersNames[i[0] - 1] for i in net.getUnconnectedOutLayers()]",
"def get_slot_names(self, *args, **kwargs):\n return self._optimizer.get_slot_names(*args, **kwargs)",
"def get_common_sources(\n self, targets: List[Tuple[str, str]], relation: str\n ) -> List[Node]:\n rel_str = \":%s\" % relation if relation else \"\"\n parts = [\n \"(s)-[%s]->({id: '%s'})\" % (rel_str, norm_id(*target)) for target in targets\n ]\n query = \"\"\"\n MATCH %s\n RETURN DISTINCT s\n \"\"\" % \",\".join(\n parts\n )\n nodes = [self.neo4j_to_node(res[0]) for res in self.query_tx(query)]\n return nodes",
"def find_all_connectors(rows):\r\n lines = []\r\n short_lines=[]\r\n counter=0\r\n road_list=['FF']\r\n for row in rows:\r\n strings=string.split(row,'\\n')[1:]\r\n for stringI in strings:\r\n tokens=string.split(stringI,'\\t')\r\n if len(tokens)>1:\r\n if tokens[11] in road_list:\r\n description=tokens[13]\r\n clues=string.split(description, ' ')\r\n for c in clues:\r\n if c=='to':\r\n pieces=string.split(description, 'to')\r\n elif c=='TO':\r\n pieces=string.split(description, 'TO')\r\n else:\r\n continue\r\n i=string.split(pieces[0], ' ')\r\n j=string.split(pieces[1], ' ')\r\n road_i=0\r\n road_j=0\r\n dir_i=None\r\n dir_j=None\r\n dir_i, road_i=handle_special_cases(i)\r\n dir_j, road_j=handle_special_cases(j)\r\n if dir_i==None or road_i==None:\r\n for s in i:\r\n try:\r\n road_i=int(s)\r\n except ValueError:\r\n dir_i=parse_direction(s, dir_i)\r\n if dir_j==None or road_j==None:\r\n for t in j:\r\n try:\r\n road_j=int(t)\r\n except ValueError:\r\n dir_j=parse_direction(t, dir_j)\r\n if dir_i==None or dir_j==None or road_i==0 or road_j==0:\r\n if dir_i==None:\r\n print 'bad i in : ', description\r\n if dir_j==None:\r\n print 'bad j in : ', description\r\n if (road_i==0 or road_j==0):\r\n print 'unhandled road case: ', description \r\n else:\r\n \r\n# print stringI\r\n if tokens[2]==dir_j:\r\n counter=counter+1\r\n lines.append([int(tokens[0]), float(tokens[7]), tokens[2], [float(tokens[8]), float(tokens[9])], int(tokens[1])])\r\n short_lines.append([road_i, dir_i, [float(tokens[8]), float(tokens[9])], road_j, dir_j])\r\n# print 'connecting from road '+str(road_i)+' in direction '+dir_i+' to '+str(road_j)+' in direction '+dir_j+' near postmile '+tokens[7] \r\n elif tokens[2]==dir_i:\r\n counter=counter+1\r\n lines.append([int(tokens[0]), float(tokens[7]), tokens[2], [float(tokens[8]), float(tokens[9])], int(tokens[1])])\r\n short_lines.append([road_i, dir_i, [float(tokens[8]), float(tokens[9])], road_j, dir_j])\r\n# print 'connecting from road '+str(road_i)+' in direction '+dir_i+' near postmile '+tokens[7]+' to '+str(road_j)+' in direction '+dir_j \r\n else:\r\n print 'WEIRD DIRECTION: ', description\r\n continue \r\n #print 'dir_i: ', dir_i\r\n# print 'dir_j: ', dir_j\r\n\r\n print 'on august 10th, there were 110 total connectors and 98 could be parsed'\r\n print counter\r\n return lines, short_lines",
"def getGraphPointsNames(self):\n return [gp.id for gp in self.getGraphPoints()]",
"def parse_graph_nodes(graph_def):\n name_to_node = {}\n for node_def in graph_def.node:\n name_to_node[node_def.name] = node_def\n return name_to_node",
"def xspace_to_tool_names(xspace_paths):\n raw_data, success = _pywrap_profiler.xspace_to_tools_data(\n xspace_paths, 'tool_names')\n if success:\n return [tool + '^' for tool in raw_data.decode().split(',')]\n return []",
"def _find_all_paths(self, start_vertex: str, end_vertex: str, path=[]):\n path = path + [start_vertex]\n if start_vertex == end_vertex:\n return [path]\n\n paths = []\n for vertex in self.graph[start_vertex]:\n if vertex not in path:\n extended_paths = self._find_all_paths(vertex,\n end_vertex,\n path)\n for p in extended_paths:\n paths.append(p)\n return paths",
"def coordinates(self):\n logging.debug('Get coordinates from text')\n result = []\n blocks = self.del_comm(blocks=True)\n coor = re.compile('[FXYZ][+-]?[0-9]+(\\.[0-9]+)?')\n for line in blocks:\n coord_line = False\n comm = line.split()\n temp = []\n for c in comm:\n if c == 'G1':\n coord_line = True\n if coord_line and coor.match(c):\n temp.append(c)\n if temp:\n result.append(temp)\n return result",
"def _get_input_output_node_names(nodes):\n input_names, output_names = set(), set()\n extension_output_names = set()\n for node in nodes:\n tf_node = node if isinstance(node,\n TensorflowNode) else TensorflowNode(node)\n output_names.add(node.name)\n # Add outputs for Split, Switch TensorArrayV3\n if tf_node.op_type == \"Split\":\n for i in range(1, tf_node.attr[\"num_split\"]):\n output_names.add(tf_node.name + \":{}\".format(i))\n if tf_node.op_type == \"Switch\":\n output_names.add(tf_node.name + \":1\")\n extension_output_names.add((tf_node.name, tf_node.name + \":1\"))\n if tf_node.op_type == \"TensorArrayV3\":\n output_names.add(tf_node.name + \":1\")\n extension_output_names.add((tf_node.name, tf_node.name + \":1\"))\n input_names.update(\n set([inp if inp[0] != \"^\" else inp[1:] for inp in tf_node.inputs]))\n inputs = input_names - output_names\n outputs = output_names - input_names\n while extension_output_names:\n ext_names = extension_output_names.pop()\n for name in ext_names:\n if name in outputs:\n outputs -= set(ext_names)\n break\n inputs.discard(None)\n return list(inputs), list(outputs)",
"def instances_of(self, name):\n stmt = \"MATCH (n:Concept { name: '%s' })<-[:is*1..2]-(neighbors) RETURN neighbors.name as name\" % name\n return map(lambda x: x.name.encode('utf-8'), self.graph.cypher.execute(stmt))",
"def get_networks_from_multiplex_pajek(pajek):\n\n def _extract_intra_links(pajek):\n \"\"\"Return dictionary of intralayer `nx.Graph`s from input formatted pajek\"\"\"\n string_links = re.findall(r\"\\d+ \\d+ \\d+.*\", pajek.split(\"*Intra\")[1].split(\"*Inter\")[0])\n intra_links = [list(map(eval, link.split())) for link in string_links]\n G_arr_intra = defaultdict(lambda: nx.Graph)\n for l in intra_links:\n G_arr_intra[l[0]].add_edge(l[1], l[2])\n return G_arr_intra\n \n def _extract_intra_links_from_multiplex(pajek):\n \"\"\"Return dictionary of intralayer `nx.Graph`s from output formatted pajek\"\"\"\n string_links = re.findall(r\"\\d+ \\d+ \\d+ \\d+.*\", pajek.split(\"*multiplex\")[1])\n value_links = [list(map(eval, link.split())) for link in string_links]\n G_arr_intra = defaultdict(lambda: nx.Graph)\n for l in value_links:\n if l[0] == l[2]:\n G_arr_intra[l[0]].add_edge(l[1], l[3])\n return G_arr_intra\n \n if \"*Intra\" in pajek:\n return _extract_intra_links(pajek)\n if \"*multiplex\" in pajek:\n return _extract_intra_links_from_multiplex(pajek)\n \n raise ValueError(\"No intra links inside pajek\")",
"def getSymbols(self, name: unicode, namespace: ghidra.program.model.symbol.Namespace) -> List[ghidra.program.model.symbol.Symbol]:\n ...",
"def dfs_names(self, start_node_num):\n return [self.graph.node_names[num] for num in self.dfs(start_node_num)]",
"def gprog():\n return [('ip','ip','u')]+[k for x in ['c','1','2','3'] for k in gcopy(\"op\"+x, 'ip')]",
"def get_common_targets(\n self,\n sources: List[Tuple[str, str]],\n relation: str,\n ) -> List[Node]:\n rel_str = \":%s\" % relation if relation else \"\"\n parts = [\n \"({id: '%s'})-[%s]->(t)\" % (norm_id(*source), rel_str) for source in sources\n ]\n query = \"\"\"\n MATCH %s\n RETURN DISTINCT t\n \"\"\" % \",\".join(\n parts\n )\n nodes = [self.neo4j_to_node(res[0]) for res in self.query_tx(query)]\n return nodes",
"def find_inlet_nodes(nodes, inlets_shp, gdobj):\r\n\r\n # Check that CRSs match; reproject inlet points if not\r\n inlets_gpd = gpd.read_file(inlets_shp)\r\n mask_crs = CRS(gdobj.GetProjection())\r\n if inlets_gpd.crs != mask_crs:\r\n inlets_gpd = inlets_gpd.to_crs(mask_crs)\r\n logger.info('Provided inlet points file does not have the same CRS as provided mask. Reprojecting.')\r\n\r\n # Convert all nodes to xy coordinates for distance search\r\n nodes_xy = gu.idx_to_coords(nodes['idx'], gdobj)\r\n\r\n # Map provided inlet nodes to actual network nodes\r\n inlets = []\r\n for inlet_geom in inlets_gpd.geometry.values:\r\n # Distances between inlet node and all nodes in network\r\n xy = inlet_geom.xy\r\n dists = np.sqrt((xy[0][0]-nodes_xy[0])**2 + (xy[1][0]-nodes_xy[1])**2)\r\n inlets.append(nodes['id'][np.argmin(dists)])\r\n\r\n # Append inlets to nodes dict\r\n nodes['inlets'] = inlets\r\n\r\n return nodes",
"def find_elements(channel_names):\n\n elements = []\n for i in range(1, 110, 1): \n elements.append(str(ELEMENTS[i].symbol))\n\n elements = sorted(set(channel_names) & set(elements), key = channel_names.index)\n\n return elements",
"def infoboxes_of_graph(self):\n infoboxes = []\n for nodeName in super(SynonymNetwork, self).nodes():\n infoboxes = infoboxes + self.infoboxes_of_graph_node(nodeName)\n return list(set(infoboxes))",
"def neighbors(self):\n return [e.name for e in self.edges()]",
"def getLocations(nodes, urls):\n\ttheurls = dict((u, urls[u]) for u in nodes)\n\tloclist = [urllib.parse.urlparse(url).netloc for url in theurls]",
"def getEdges(self):\n # for node in graph,\n # return node -> node for j in graph[node]\n\n return [\"->\".join([str(n1), str(n2)]) for n1 in self.graph.keys() for n2 in self.graph[n1]]",
"def _create_nodes_from_vertices(self, vertices: List[np.ndarray]) -> List[str]:\n nodes = []\n for vertice in vertices:\n lon, lat = self.proj(vertice[0], vertice[1], inverse=True)\n node = Node(self.id_count, lat, lon)\n nodes.append(node.id_)\n self.osm.add_node(node)\n return nodes",
"def decode_nodes(nodes):\n nrnodes = len(nodes) / 26\n nodes = struct.unpack(\"!\" + \"20sIH\" * nrnodes, nodes)\n for i in xrange(nrnodes):\n node_id, ip, port = nodes[i * 3], numToDottedQuad(nodes[i * 3 + 1]), nodes[i * 3 + 2]\n yield node_id, ip, port",
"def find_symbols(self, **kw):\n return list(self.ifind_symbols(**kw))",
"def list_connections(self, show_passthrough=True):\n excludes = set([name for name, data in self._exprgraph.nodes(data=True)\n if data['expr'].refs_parent()])\n if show_passthrough:\n return [(u, v) for u, v in self._exprgraph.edges() if not (u in excludes or v in excludes)]\n else:\n return [(u, v) for u, v in self._exprgraph.edges()\n if '.' in u and '.' in v and not (u in excludes or v in excludes)]",
"def nonterminals(symbols):\n if ',' in symbols: symbol_list = symbols.split(',')\n else: symbol_list = symbols.split()\n return [Nonterminal(s.strip()) for s in symbol_list]",
"def out(self, from_, *edge_classes):\n records = self.client.command('SELECT out({0}) FROM {1}'\n .format(','.join(self.coerce_class_names(edge_classes))\n , self.coerce_class_names(from_)))\n return [self.get_vertex(v) for v in records[0].oRecordData['out']] \\\n if records else []",
"def getSearchSpaceCoords(self):\r\n needed = {}\r\n coords = self.graph.getNodesCoords()\r\n for vertex, neighbours in self.search_space[1:]:\r\n needed[vertex] = coords[vertex]\r\n for arc in neighbours:\r\n needed[arc] = coords[arc]\r\n return needed",
"def get_uniprot_gene_info(uniprot_result):\n gene_lines = [l for l in uniprot_result.split('\\n') if l.startswith('GN')]\n\n gene_names = []\n\n for gn_line in gene_lines:\n parts = gn_line[2:].split(';')\n for p in parts:\n p = p.strip()\n if p.startswith('Name='):\n gene_names.append(p[5:])\n elif p.startswith('Synonyms='):\n gene_names += [s.strip() for s in p[9:].split(',')]\n\n return gene_names",
"def extract_nodes(graph):\n return graph.get_nodes()",
"def FindSymmetry(graph, ports:list, ports_weight:dict, stop_points:list):\n all_match_pairs={}\n non_power_ports = sorted(set(sorted(ports)) - set(stop_points))\n logger.debug(f\"sorted ports: {non_power_ports}\")\n for port1,port2 in combinations_with_replacement(non_power_ports,2):\n traversed =stop_points.copy()\n if sorted(ports_weight[port1]) == sorted(ports_weight[port2]) !=[0]:\n traversed+=[port1,port2]\n recursive_start_points(graph,all_match_pairs,traversed,port1,port2, ports_weight)\n all_match_pairs = {k:v for k,v in all_match_pairs.items() if len(v)>0}\n logger.debug(f\"all matches found starting from {port1} and {port2} pair: {pprint.pformat(all_match_pairs, indent=4)}\")\n\n return all_match_pairs",
"def enclosing_nodestring(self,x,max_nodes=None):\n if max_nodes<0:\n max_nodes=self.Nnodes()\n elif max_nodes is None:\n max_nodes=self.max_sides\n\n # lame stand-in for a true bounding polygon test\n edges_near=self.select_edges_nearest(x,count=6)\n potential_cells=self.find_cycles(max_cycle_len=max_nodes,\n starting_edges=edges_near)\n pnt=geometry.Point(x)\n for pc in potential_cells:\n poly=geometry.Polygon( self.nodes['x'][pc] )\n if poly.contains(pnt):\n return pc",
"def parse_street_waynodes(input, use_highway):\r\n way_key = use_highway and name_highway_key or name_key\r\n rels, ways, nodes = ParserOSM().parse(input, way_key=way_key)\r\n \r\n return ways, nodes",
"def get_all_roads_starting_from(network, city):\n return network[1][city][0]",
"def list_networks():\n return __sets.keys()",
"def getVisitableNodesNamed(self):\n\n return ((\"source\", self.subnode_source),)",
"def getVisitableNodesNamed(self):\n\n return ((\"source\", self.subnode_source),)",
"def getVisitableNodesNamed(self):\n\n return ((\"source\", self.subnode_source),)",
"def get_component_instance_lists(\n graph_client: GremlinClient, topology_id: str, topology_ref: str\n) -> Dict[str, List[Vertex]]:\n\n sgt: GraphTraversalSource = graph_client.topology_subgraph(\n topology_id, topology_ref\n )\n\n component_names: List[str] = sgt.V().values(\"component\").dedup().toList()\n\n output: Dict[str, List[Vertex]] = {}\n\n for component_name in component_names:\n\n output[component_name] = sgt.V().has(\"component\", component_name).toList()\n\n return output",
"def get_edges(graph):\n edges = []\n for vertex in graph.keys():\n connected_nodes = graph[vertex]\n for node in connected_nodes:\n edges.append(str(vertex + node))\n\n return edges",
"def get_activation_names(model: onnx_pb.ModelProto) -> List[str]:\n activation_names = get_graph_intermediate_activations(model.graph)\n activation_names.extend([node.name for node in model.graph.output])\n return activation_names",
"def _get_inbound_layer_names(self, layer):\n inbound_layer_names = []\n for inbound_node in self._inbound_node_generator(layer):\n # TODO(b/197935452): temporary fix when the input is a dictionary of\n # tensors. A comprehensive solution may be needed.\n if isinstance(inbound_node, dict):\n inbound_node = inbound_node.values()\n for connection_info in inbound_node:\n # input argument case.\n inbound_layer_names.append(connection_info[0])\n # **kwarg argument case.\n inbound_layer_names += [\n value[0] for value in connection_info[3].values() if isinstance(\n value, list)\n ]\n\n return inbound_layer_names",
"def generate_networkx_graphs(raw_graphs):\n\n source_graphs = [source_from_raw(raw) for raw in raw_graphs]\n target_graphs = [target_from_raw(raw) for raw in raw_graphs]\n\n return source_graphs, target_graphs",
"def _get_neighbours(kmer):\n assert (is_dna(kmer))\n bases = 'ACTG'\n result = set()\n for i in range(len(kmer)):\n for base in bases:\n result.add(kmer[:i] + base + kmer[(i + 1):])\n return result",
"def find_groups_from_ctypes(self, mesh, gtypes):\n ctypes = [self._ctypes[gtype] for gtype in gtypes]\n grp_names = []\n for geom in mesh.give_geom().get_children():\n if geom.get_shape_type() in ctypes:\n grp_names.append(geom.read_name())\n return grp_names",
"def get_nerspos(tokens, ners):\n pos_list = list()\n for ner in ners:\n pos = get_nerpos(tokens, ner)\n pos_list.append(pos)\n\n return pos_list",
"def get_subgraphs(nodes):\n subs = collections.defaultdict(dict)\n for name, node in nodes.items():\n subs[node['sg']][name] = node\n return subs",
"def getSourcesFromVehicle(vehicleName):\n pass",
"def paths_list(ctx):\n for path in ctx.obj['CLIENT'].paths.list():\n if not path.source.name:\n cidr_blocks = [subnetwork.cidr_block for subnetwork in path.source.subnetworks]\n source_name = \",\".join(cidr_blocks)\n network_name = \"external\"\n else:\n source_name = path.source.name\n network_name = path.source.network.name\n click.echo(\"%s:%s -(%s)-> %s:%s\" % (network_name, source_name, path.port,\n path.network.name, path.destination.name))",
"def construct_network_from_neighbours_list(related_characters: list):\n graph = nx.Graph()\n for edge in related_characters:\n sentiment = edge[1]\n color = ''\n if sentiment == 'Positive':\n color = 'g'\n elif sentiment == 'Negative':\n color = 'r'\n elif sentiment == 'Neutral':\n color = 'k'\n # graph.add_node(edge[0][0], popularity=\n graph.add_edge(edge[0][0], edge[0][1], color=color, weight=edge[2])\n\n return graph",
"def allReachable(g, n):\n nodeList = []\n \n keysList = []\n\n for key in g.keys():\n keysList.append(key)\n \n for i in keysList:\n if findPath(g, n, i) != None:\n nodeList.append(i)\n \n nodeList.sort()\n nodeList.remove(n)\n \n return nodeList",
"def get_source_and_sink_comps(\n graph_client: GremlinClient, topology_id: str, topology_ref: str\n) -> Dict[str, List[str]]:\n\n sgt: GraphTraversalSource = graph_client.topology_subgraph(\n topology_id, topology_ref\n )\n\n sources: List[str] = sgt.V().where(\n in_(\"logically_connected\").count().is_(0)\n ).values(\"component\").dedup().toList()\n\n sinks: List[str] = sgt.V().where(out(\"logically_connected\").count().is_(0)).values(\n \"component\"\n ).dedup().toList()\n\n return {\"sources\": sources, \"sinks\": sinks}",
"def verbrogentwogs(prot, twogs, prot_in_twogs):\n vt_lijst = []\n for x in prot_in_twogs:\n for y in prot_in_twogs:\n if indexfind(\"%s %s\" % (x, y), twogs):\n vt_lijst.append(\"%s %s\" % (x, y))\n vt_lijst.append(\"%s %s\" % (x, prot))\n vt_lijst.append(\"%s %s\" % (y, prot))\n return vt_lijst",
"def nodes_names_map(self):\n return {nd.name: nd for nd in self.nodes}",
"def findRelationships(RelationShipList):\r\n for i in RelationShipList:\r\n getPos = cmds.xform(i[1], q=True, t=True, ws=True)\r\n cmds.xform(i[0], t=getPos, ws=True)",
"def print_nodes(graph):\n print([n.name for n in graph.node])",
"def _get_gpu_names() -> Sequence[str]:\n result = []\n for device in device_lib.list_local_devices():\n if device.device_type != \"GPU\":\n continue\n desc = device.physical_device_desc\n\n fields = desc.split(\",\")\n for field in fields:\n name, value = field.split(\":\", maxsplit=1)\n name = name.strip()\n value = value.strip()\n if name == \"name\":\n result.append(value)\n return result",
"def extract_symbol_names_from_target(subast) -> list:\n # initialise an empty list of the symbol names\n symbol_names = []\n # walk the target object to look for ast.Name instances\n for walked_ast in ast.walk(subast):\n if type(walked_ast) is ast.Name:\n symbol_names.append(walked_ast.id)\n return symbol_names",
"def get_constellation(startswith):\n constellation = []\n for sat in satellites:\n if sat.name.startswith(startswith):\n serialized = sat.serialize()\n if serialized:\n constellation.append(serialized)\n return constellation",
"def graph(g):\n return str(g.adjacencyList())",
"def neighbors(node, topology):\n return [n for n in topology[node]]",
"def getStationsName(self) :\n names = []\n for sts in self._stations :\n names.append(sts.getName())\n\n return names",
"def network_nodes_species(self):\n G, mapping = self.network()\n waste, resources, intmed_products = self.amenities()\n\n node_dict = {}\n\n for nd in G:\n # print(nd)\n if isinstance(nd, int):\n node_dict[nd] = \"r\"\n elif nd in self.commodity:\n node_dict[nd] = \"Xc\"\n elif nd in waste:\n node_dict[nd] = \"w\"\n elif nd in resources:\n node_dict[nd] = \"Xr\"\n elif nd in intmed_products:\n node_dict[nd] = \"InPr\"\n\n return node_dict",
"def _form_computation_graph(self, idx):\n _list, _set = list, set\n if type(idx) is int:\n node_layers = [np.array([idx], dtype=np.int64)]\n elif type(idx) is list:\n node_layers = [np.array(idx, dtype=np.int64)]\n\n for _ in range(self.n_layers):\n prev = node_layers[-1]\n arr = [node for node in prev]\n arr.extend([e[0] for node in arr for e in self.nbrs_t[node]])\n arr = np.array(_list(_set(arr)), dtype=np.int64)\n node_layers.append(arr)\n node_layers.reverse()\n\n mappings = [{j: i for (i, j) in enumerate(arr)} for arr in node_layers]\n\n return node_layers, mappings",
"def get_components(graph):\n return [graph.subgraph(c).copy() for c in nx.connected_components(graph)]",
"def get_st_graph_info(self, pos_dict):\n\n all_nodes = set()\n all_edge_types = defaultdict(set)\n all_node_edges_and_neighbors = dict()\n \n for bagname in pos_dict:\n node_type_pos_dict = pos_dict[bagname]\n N = len(node_type_pos_dict)\n \n nodes = list()\n for (node_name, node_type) in node_type_pos_dict.keys():\n nodes.append(STGNode(node_name, node_type))\n assert len(nodes) == N\n \n pos_matrix = np.array([list(node_type_pos_dict[node_type_pair]) for node_type_pair in node_type_pos_dict])\n assert pos_matrix.shape == (N, 2)\n\n adj_matrix = self.get_adj_matrix(pos_matrix)\n assert adj_matrix.shape == (N, N)\n \n node_edges_and_neighbors = {node: defaultdict(set) for node in nodes}\n edge_types = defaultdict(list)\n for i in xrange(N):\n curr_node = nodes[i]\n for j in xrange(N):\n curr_neighbor = nodes[j]\n if adj_matrix[i, j] == 1:\n sorted_edge_type = sorted([curr_node.type, curr_neighbor.type])\n edge_type = '-'.join(sorted_edge_type)\n edge_types[curr_node].append(edge_type)\n\n node_edges_and_neighbors[curr_node][edge_type].add(curr_neighbor)\n\n all_nodes.update(nodes)\n \n for node in edge_types:\n all_edge_types[node].update(edge_types[node])\n \n for node in node_edges_and_neighbors:\n if node in all_node_edges_and_neighbors:\n for edge_type in node_edges_and_neighbors[node]:\n if edge_type in all_node_edges_and_neighbors[node]:\n all_node_edges_and_neighbors[node][edge_type].update(node_edges_and_neighbors[node][edge_type])\n else:\n all_node_edges_and_neighbors[node][edge_type] = node_edges_and_neighbors[node][edge_type]\n else:\n all_node_edges_and_neighbors[node] = node_edges_and_neighbors[node] \n \n # List-ifying these so looping over them later on is faster.\n for node in all_edge_types:\n all_edge_types[node] = list(all_edge_types[node])\n \n for node in all_node_edges_and_neighbors:\n for edge_type in all_node_edges_and_neighbors[node]:\n all_node_edges_and_neighbors[node][edge_type] = list(all_node_edges_and_neighbors[node][edge_type])\n \n return list(all_nodes), all_edge_types, all_node_edges_and_neighbors",
"def construct_graph_connection(coord_list, radie):\n\n connection_distance = []\n connection = []\n for j, data in enumerate(coord_list):\n '''Calculate the relative distance of the nodes'''\n distance = np.hypot(coord_list[:,0]-data[0], coord_list[:,1]-data[1])\n '''save nodes which are in range'''\n #for i, data in enumerate(distance):\n for i in range(j+1, len(distance)):\n data = distance[i]\n if data < radie:\n connection.append([j, i])\n connection_distance.append(data)\n\n\n connection_distance = np.array(connection_distance)\n connection = np.array(connection)\n return connection, connection_distance",
"def encode_nodes(nodes):\n n = []\n for node in nodes:\n n.extend([node[0], dottedQuadToNum(node[1].host), node[1].port])\n return struct.pack(\"!\" + \"20sIH\" * len(nodes), *n)"
] | [
"0.5345469",
"0.49848792",
"0.4949889",
"0.49467316",
"0.4918402",
"0.4912904",
"0.49020374",
"0.48935178",
"0.48827243",
"0.48259458",
"0.47976613",
"0.47864097",
"0.47622603",
"0.47563088",
"0.4737711",
"0.4736165",
"0.46989784",
"0.46940482",
"0.46887234",
"0.4671127",
"0.46568128",
"0.46462807",
"0.46405697",
"0.46336645",
"0.46333134",
"0.46217793",
"0.46207842",
"0.46110335",
"0.45953333",
"0.45841807",
"0.45820048",
"0.4581608",
"0.4570312",
"0.4561496",
"0.4560385",
"0.4560372",
"0.45599627",
"0.45580012",
"0.45552474",
"0.4545657",
"0.45291615",
"0.45281938",
"0.45263848",
"0.45203888",
"0.45186156",
"0.45177168",
"0.45114046",
"0.4504256",
"0.4498036",
"0.44901893",
"0.44884807",
"0.44867247",
"0.44854966",
"0.4479947",
"0.44785428",
"0.44779462",
"0.4467461",
"0.44668072",
"0.4454636",
"0.44524178",
"0.44459036",
"0.444503",
"0.44431478",
"0.4442316",
"0.44406295",
"0.44375074",
"0.44343048",
"0.44330198",
"0.44330198",
"0.44330198",
"0.44326892",
"0.4427406",
"0.44272226",
"0.4424819",
"0.442276",
"0.4422385",
"0.4419738",
"0.44098097",
"0.44080028",
"0.44079772",
"0.43925932",
"0.4391887",
"0.43916",
"0.43907753",
"0.43840533",
"0.43806934",
"0.43780887",
"0.43779048",
"0.43708196",
"0.43671167",
"0.43603033",
"0.43600613",
"0.435252",
"0.43495652",
"0.43426958",
"0.43393072",
"0.43390334",
"0.43381688",
"0.43362975",
"0.43299568"
] | 0.44458896 | 61 |
Gets the scaled topo file for a given id supercell id. | def get_scaledtopo(self,id):
lines = self.mfp.get_scaledtopo(id)
return lines | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def id_to_base_id(self, id):\n if self.xy_tiling is None and self.pc_tiling is None:\n return id\n return self.get_tile_from_path(id)[1]",
"def _get_feat_geo_from_file(self, id):\n path_feature, path_mask, path_geo = self._get_name_save(id)\n feature_filt_padded = torch.load(path_feature, map_location=torch.device('cpu')).long()\n mask = torch.load(path_mask, map_location=torch.device('cpu'))\n geo_filt_padded = torch.load(path_geo, map_location=torch.device('cpu'))\n return feature_filt_padded, mask, geo_filt_padded",
"def sersic_2d_image(data_dir):\n path = \"sersic_2d_image.fits.gz\"\n sersic_2d_path = os.path.join(data_dir, path)\n return fits.getdata(sersic_2d_path)",
"def get_sf (pdb_id):\n pdb_ftp_link = \"https://ftp.rcsb.org/pub/pdb/data/structures/all/structure_factors/\"\n url = pdb_ftp_link + \"r{}sf.ent.gz\".format(pdb_id)\n r = requests.get(url)\n with open(sf_path + \"r{}sf.cif.gz\".format(pdb_id), 'wb') as f:\n f.write(r.content)\n #unzips the downloaded file\n os.system(\"gunzip \"+sf_path + \"r{}sf.cif.gz\".format(pdb_id))\n return \"r{}sf.cif\".format(pdb_id)",
"def getbyid(self, id):\n\n return esd.retrieve(id)",
"def get_imc_topo(topo_file):\n topo_graph = nx.Graph()\n with open(topo_file, 'r') as f:\n for line in f.readlines():\n if (len(line) > 10) and (line[0] != '#'):\n split_data = line.split()\n source = split_data[0]\n dest = split_data[2]\n #capacity = 1000 # We are fixing this to one.\n capacity = get_imc_capacity(split_data[1], split_data[3])\n if not topo_graph.has_edge(source, dest):\n topo_graph.add_edge(source, dest, capacity = capacity)\n # Checks graph for any componnets and returns the largest one.\n topo_graph = validate_graph(topo_graph)\n f.close()\n return topo_graph",
"def cell_for_id(self, id):\n\t\tcell_id = (id & self.id2cell_mask) | u0xFFFFFFFF\n\t\tassert np.all(self.is_cell_id(cell_id))\n\n\t\t# TODO: Debugging (remove when happy)\n\t\tx, y, t, _ = self._xyti_from_id(id)\n\t\tcell_id2 = self._cell_id_for_xyt(x, y, t)\n\t\tassert np.all(cell_id2 == cell_id), 'cell_id2=%s cell_id=%s %s x=%s y=%s t=%s' % (cell_id2, cell_id, bin(cell_id), x, y, t)\n\n\t\treturn cell_id",
"def get_topogram(self, _id):\n return self.make_request(\"GET\", \"topograms/\"+_id, {})",
"def _get_disk_by_id(worker):\n cmd = (\n f\"oc debug nodes/{worker} --to-namespace={config.ENV_DATA['cluster_namespace']} \"\n f\"-- chroot /host ls -la /dev/disk/by-id/\"\n )\n return run_cmd(cmd)",
"def get_size_by_id(self, size_id):\n sizes = self._driver.list_sizes()\n size = [i for i in sizes if i.id == size_id][0]\n return size",
"def map_id_to_device(dev_map, osd_id):\n for elem in dev_map:\n if elem['id'] == osd_id:\n return elem['path']",
"def get_geometry(id):\n geom = read_kml()\n result = geom[\"geometry\"][id]\n # print(f\"get_geometry(id={id.__repr__()}) --> {result}\")\n # result.plot()\n return result",
"def get_path(self, path_id):\n\t\tpass",
"def _get_ss_proposal(self, img_id):\n\n if not os.path.isdir(os.path.join(self.root_dir, 'SSProposals')):\n print ('First time run. Refomatting selective search files ...')\n self._reformat_ss_data()\n\n cache_file = os.path.join(self.root_dir, 'SSProposals',\n img_id + '.pkl')\n\n with open(cache_file, 'rb') as fid:\n ss_proposals = cPickle.load(fid)\n return torch.from_numpy(ss_proposals['boxes'].astype(int)).float()",
"def get(cls, id):\n response = get_by_endpoint(\"computed_files/\" + str(id)).json()\n return ComputedFile(**response)",
"def get_from_gridfs(d, f):\n fs = gridfs.GridFS(d)\n b = fs.get(f).read()\n return b",
"def getDim(scale,supercell):\n \n # Check for standard scaling\n motiif_dict = {1:'molecular',supercell:'chains',\\\n supercell**2:'layered', supercell**3:'conventional'}\n if scale in motiif_dict:\n return(motiif_dict[scale])\n \n # If the structure is some intermediate, determine\n # which intermediate\n \n else:\n if scale < 1:\n motiif = 'shrunk molecular'\n elif scale < supercell:\n motiif = \"mol-chain\"\n elif scale < supercell**2:\n motiif = \"chain-2D\"\n elif scale < supercell**3:\n motiif = \"2D-conv\"\n else:\n motiif = 'Network size increased'\n return(motiif)",
"def get_sgd(self, id, name):\n # check if id exists in group definition\n if id in self.mstats.keys() and 'df' in self.mstats[id].keys():\n # print \"id %s in mstats\" % id\n type = 'group' if id.endswith('/') else 'dataset'\n sgd = {'id': id, 'type': type, 'ns':self.sdef['ns'], 'df': self.mstats[id]['df'],}\n # print \"found definition for %s in mstats, mstats=\" % id\n # pp.pprint(self.mstats)\n return sgd\n else:\n # see if parent group is specified in locations; if so, check for id in \n # locations list of members of parent group. Example for nwb format is are\n # \"UnitTimes/\" inside <module>/. <module> is parent group\n pid = self.sdef['id'] # parent id, e.g. \"<module>\"\n ns = self.sdef['ns']\n if pid in self.file.ddef[ns]['locations']:\n if id in self.file.ddef[ns]['locations'][pid]:\n type = 'group' if id.endswith('/') else 'dataset'\n # add id to mstats so can register creation of group\n self.mstats[id] = {'ns':ns, 'created': [], 'qty': '+', \n 'type': type} # todo: jeff, need to check df\n sgd = self.file.get_sdef(id, ns, \"referenced in make_subgroup\")\n # print \"id %s in %s location ns %s structures\" % (id, pid, ns)\n # example output: id UnitTimes/ in <module>/ location ns core structures\n # traceback.print_stack()\n return sgd\n else:\n print \"found parent %s in locations, but %s not inside\" % (pid, id)\n print \"locations contains:\"\n pp.pprint(self.file.ddef[ns]['locations'][pid])\n else:\n print \"did not find parent %s in locations for namespace %s\" % (pid, ns)\n print \"** Error, attempting to create '%s' (name='%s') inside group:\" % (id, name)\n print self.full_path\n print \"But '%s' is not a member of the structure for the group\" % id\n print \"Valid options are:\", self.mstats.keys()\n # print \"Extra information (for debugging): Unable to find definition for node %s\" % id\n # print \"mstats=\"\n # pp.pprint(self.mstats)\n traceback.print_stack()\n sys.exit(1)",
"def from_id(self, id_):\n return self._id_to_loadout.get(id_)",
"def get_climatology(gt_id, mask_df=None, shift=None):\n # Load global climatology if US climatology requested\n gt_id = gt_id.replace(\"us_\", \"global_\")\n climatology_file = os.path.join(\"data\", \"dataframes\",\n \"official_climatology-\"+gt_id+\"-1981-2010.h5\")\n return load_measurement(climatology_file, mask_df, shift)",
"def from_id(self, id_):\n return self._name_to_operator.get(id_)",
"def stich_from_id(id, title):\n response = requests.get('https://vangoghmuseum-assetserver.appspot.com/tiles?id=%s' % id)\n data = json.loads(response.text)\n stich(data, title)",
"def get(self, id):\n file = (\n self.drive.files()\n .get(\n fileId=id,\n fields=\"id, name\",\n supportsAllDrives=self.shared_drive[0],\n )\n .execute()\n )\n return file",
"def _get_depth_map_scale_subfolder(self):\n if self.im_scale <= 0.25:\n if self.im_scale <= 0.125:\n return \"Depth/0.125/\"\n else:\n return \"Depth/0.25/\"\n else: \n return \"Depth/\"",
"def get_by_id(self, id: str) -> \"Dataset\":\n raise NotImplementedError",
"def get_specific_tile(idx, tiles_gdf):\n tile_poly = tiles_gdf.iloc[idx]['geometry']\n # print(tile_poly.bounds)\n return tile_poly",
"def get_tile(self, tile, as_png=False, overwrite=True):\n zoom, row, col = tile\n output_path = self.config[\"output_name\"]\n zoomdir = os.path.join(output_path, str(zoom))\n rowdir = os.path.join(zoomdir, str(row))\n image_path = os.path.join(rowdir, str(col)+\".png\")\n if os.path.isfile(image_path):\n return send_file(image_path, mimetype='image/png')\n else:\n try:\n self.save_tile(tile)\n except:\n print \"tile not available\", tile\n size = self.tile_pyramid.tile_size\n empty_image = Image.new('RGBA', (size, size))\n return empty_image.tobytes()\n return send_file(image_path, mimetype='image/png')",
"def get_tile(cls, tile_id):\n\n return Tile.tile_listing.get(tile_id, None)",
"def get_by_id(self, id):\n return self._mzml_parser.get_by_id(id)",
"def get_file(self, sys_id):\n url = \"{}/file\".format(self._target(sys_id))\n r = self._client.session.get(url, stream=True)\n return r",
"def _get_pubchem_template_path(self, het_id):\n path = os.path.join(self.pubchem_templates, f\"{het_id}.sdf\")\n\n return path if os.path.isfile(path) else \"\"",
"def get_tile(tilefile,level,x,y):\n\t\n\ttf=file(tilefile,\"r\")\n\t\n\ttd=pickle.load(tf)\n\ta=td[(level,x,y)]\n\t\n\ttf.seek(a[0],1)\n\tret=tf.read(a[1])\n\t\n\ttf.close()\n\treturn ret",
"def _get_image_scale_subfolder(self):\n if self.im_scale <= 0.25:\n if self.im_scale <= 0.125:\n return \"Rectified_rescaled/0.125/\"\n else:\n return \"Rectified_rescaled/0.25/\"\n else:\n return \"Rectified/\"",
"def get_network_by_id(self, id):\n return self.network.get_network(id)",
"def get_image_by_id(id):\n return ImageModel.query.filter(ImageModel.id == id) \\\n .first()",
"def getGrid(dataKey, fileID):\r\n\r\n fileMeta = rmn.fstprm(dataKey)\r\n fileMeta['iunit'] = fileID\r\n gridID = rmn.ezqkdef(fileMeta)\r\n return gridID",
"def get_scalingip(context, id, fields=None):\n LOG.info('get_scalingip %s for tenant %s' % (id, context.tenant_id))\n filters = {'address_type': ip_types.SCALING, '_deallocated': False}\n scaling_ip = db_api.floating_ip_find(context, id=id, scope=db_api.ONE,\n **filters)\n if not scaling_ip:\n raise q_exc.ScalingIpNotFound(id=id)\n return v._make_scaling_ip_dict(scaling_ip)",
"def get_geolevel(self, idattr):\n return self.get_node(\n '/DistrictBuilder/GeoLevels/GeoLevel[@id=\"%s\"]' % idattr)",
"def get_server(self, id):\n\t\treturn self.__servers.get_server(id)",
"def _load_camera_geometry_from_fits_file(cam_id, geomfile='chercam.fits.gz'):\n camtable = Table.read(geomfile, hdu=\"CHERCAM\")\n geom = camtable[camtable['CAM_ID'] == cam_id]\n return geom",
"def fromId(id):\n if id == 'COPERNICUS/S2':\n return Sentinel2()\n elif id == 'COPERNICUS/S2_SR':\n return Sentinel2('SR')\n else:\n msg = '{} not recognized as a Sentinel 2 ID'\n raise ValueError(msg.format(id))",
"def get_collated_file(run):\n d=get_collated_dir(run)\n fname=\"%s-collated.fits\" % run\n return os.path.join(d, fname)",
"def _get_file_by_id(id):\n query = \"\"\"SELECT * FROM files WHERE id = (:id) LIMIT 1\"\"\"\n param_obj = {'id': id}\n return _execute(query, param_obj)",
"def import_gds(filename, cellname = None, flatten = False):\n gdsii_lib = gdspy.GdsLibrary()\n gdsii_lib.read_gds(filename)\n top_level_cells = gdsii_lib.top_level()\n if cellname is not None:\n if cellname not in gdsii_lib.cells:\n raise ValueError('[PHIDL] import_gds() The requested cell '\n '(named %s) is not present in file %s' \\\n % (cellname,filename))\n topcell = gdsii_lib.cells[cellname]\n elif cellname is None and len(top_level_cells) == 1:\n topcell = top_level_cells[0]\n elif cellname is None and len(top_level_cells) > 1:\n raise ValueError('[PHIDL] import_gds() There are multiple top-level '\n 'cells, you must specify `cellname` to select of '\n 'one of them')\n\n if flatten == False:\n D_list = []\n c2dmap = {}\n for cell in gdsii_lib.cells.values():\n D = Device(name = cell.name)\n D.polygons = cell.polygons\n D.references = cell.references\n D.name = cell.name\n for label in cell.labels:\n rotation = label.rotation\n if rotation is None:\n rotation = 0\n l = D.add_label(text = label.text,\n position = np.asfarray(label.position),\n magnification = label.magnification,\n rotation = rotation*180/np.pi,\n layer = (label.layer, label.texttype))\n l.anchor = label.anchor\n c2dmap.update({cell:D})\n D_list += [D]\n\n for D in D_list:\n # First convert each reference so it points to the right Device\n converted_references = []\n for e in D.references:\n ref_device = c2dmap[e.ref_cell]\n if isinstance(e, gdspy.CellReference):\n dr = DeviceReference(\n device = ref_device,\n origin = e.origin,\n rotation = e.rotation,\n magnification = e.magnification,\n x_reflection = e.x_reflection\n )\n dr.owner = D\n converted_references.append(dr)\n elif isinstance(e, gdspy.CellArray):\n dr = CellArray(\n device = ref_device,\n columns = e.columns,\n rows = e.rows,\n spacing = e.spacing,\n origin = e.origin,\n rotation = e.rotation,\n magnification = e.magnification,\n x_reflection = e.x_reflection,\n )\n dr.owner = D\n converted_references.append(dr)\n D.references = converted_references\n # Next convert each Polygon\n temp_polygons = list(D.polygons)\n D.polygons = []\n for p in temp_polygons:\n D.add_polygon(p)\n\n topdevice = c2dmap[topcell]\n return topdevice\n\n elif flatten == True:\n D = Device('import_gds')\n polygons = topcell.get_polygons(by_spec = True)\n\n for layer_in_gds, polys in polygons.items():\n D.add_polygon(polys, layer = layer_in_gds)\n return D",
"def get_t1_logical_router_path_by_id(self, router_id=None):\n t1_info = self.get_t1_logical_router(router_id=router_id)\n t1_path = t1_info.get(\"path\")\n return t1_path",
"def get_mof_structure_by_id(self,strucid, mol = False):\n lines,name = self.mfp.get_mof_structure_by_id(strucid)\n return lines",
"def showId(self):\n #Here I'm supposing that the name of the table, and the extent polygon gives a unique mapping.\n try:\n extent = self.geometry.extent\n name = self.grid_name\n res = self.dArea\n string = \"%s:%s:%s:%s\" %(self.parent_id,name,extent,res)\n return string\n except:\n logger.error(\"[biospatial.gbif.taxonomy.GriddedTaxonomy] \\n The total geometry area has not been defined. Try running mergeGeometries first\")\n raise Exception(\"Geometry Extent has not been instantiated\")\n return None",
"def get(self, resource_id, file_id):\n d = Deposition.get(resource_id, user=current_user)\n df = d.get_file(file_id)\n if df is None:\n abort(404, message=\"File does not exist\", status=404)\n return d.type.marshal_file(df)",
"def get_info(file_path):\n \n with h5py.File(file_path, 'r') as f:\n max_zoom = f.attrs.get('max-zoom')\n \n if max_zoom is None:\n logger.info('no zoom found')\n raise ValueError(\n 'The `max_zoom` attribute is missing.'\n )\n \n c = cooler.Cooler(f[\"0\"])\n \n (chroms, chrom_sizes, chrom_cum_lengths) = get_chromosome_names_cumul_lengths(c)\n \n total_length = int(chrom_cum_lengths[-1])\n max_zoom = f.attrs['max-zoom']\n bin_size = int(f[str(max_zoom)].attrs['bin-size'])\n \n max_width = bin_size * TILE_SIZE * 2**max_zoom\n \n info = {\n 'min_pos': [0.0, 0.0],\n 'max_pos': [total_length, total_length],\n 'max_zoom': max_zoom,\n 'max_width': max_width,\n 'bins_per_dimension': TILE_SIZE,\n }\n \n return info",
"def get_data(self):\n return self.topo_data_flattened",
"def get_location_from_id(id):\n tree = ET.parse('./devset_topics.xml')\n root = tree.getroot()\n for item in root.findall('./topic'):\n if id == item[0].text:\n return item[1].text",
"def find_partition(self, id: str) -> Optional[\"GraphInfo\"]:\n if id == self.id:\n return self\n current_length = len(self.id)\n if len(id) > current_length:\n if id[current_length] == \"0\" and self.upper_graph_info is not None:\n return self.upper_graph_info.find_partition(id)\n elif id[current_length] == \"1\" and self.lower_graph_info is not None:\n return self.lower_graph_info.find_partition(id)\n return None",
"def get_contest_template_file(gt_id, horizon):\n return os.path.join(\"data\", \"fcstrodeo_nctemplates\",\n get_contest_id(gt_id, horizon)+\"_template.nc\")",
"def get_subnet_by_id(self, id):\n return self.network.get_subnet(id)",
"def get_orig_scat_file(scat_name, tilename):\n if '-dg' in scat_name:\n fname=get_dg_scat_file(scat_name, tilename)\n else:\n d=get_cat_dir(scat_name)\n fname='{scat_name}-{tilename}.fits'.format(scat_name=scat_name,\n tilename=tilename)\n fname=os.path.join(d, fname)\n\n return fname",
"def get_hypervisor_std_file(uuid):\n return DIR + uuid + \"-hypervisor-usage-std.csv\"",
"def get_file(self, file_id):\n LOG.debug(\"Getting a file from mattermost\")\n url = '%s/api/v4/files/%s' % (self.server_url, file_id)\n LOG.debug(\"Sending: %s\", url)\n response = self._request(self._session.get, url)\n\n if response.status_code != 200:\n raise RuntimeError(\"Server unhappy. (%s)\", response)\n\n return response.content",
"def get_UFS_grid_area(dir, tile, i, j):\n #this information is in the supergrid files\n \n filename_pattern = '*grid.tile{0}.nc'.format(tile)\n \n for f_name in os.listdir(dir):\n if fnmatch.fnmatch(f_name, filename_pattern):\n filename = f_name\n if not filename:\n message = 'No filenames matching the pattern {0} found in {1}'.format(filename_pattern,dir)\n logging.critical(message)\n raise Exception(message)\n \n nc_file = Dataset('{0}/{1}'.format(dir,filename))\n \n # extract out area of grid cell\n \n #calculate supergrid indices from regular grid indices\n jpt2 = j*2+1\n ipt2 = i*2+1\n \n #from Phil Pegion: the area is calculated by adding up the 4 components of the contained supergrid cells\n area_in=nc_file['area'][jpt2-1:jpt2+1,ipt2-1:ipt2+1]\n \n return area_in.sum()",
"def get_canonical(self, id):\n canonical = None\n if id in self.canonicals:\n canonical = self.canonicals[id]\n return canonical",
"def id_to_relative_raw_path(self, id):\n return osp.join(self.id_to_base_id(id) + '.ply')",
"def get_cab_route_by_id(self, id):\n cab_route = self.admin_repository.get_cab_route_by_id(id)\n if cab_route:\n print(\"Cab Number : {}\".format(cab_route[1]))\n print(\"Route Id : {}\".format(cab_route[2]))\n print(\"Stop Name : {}\".format(cab_route[3]))\n print(\"Stop stage : {}\".format(cab_route[4]))\n print(\"Timings : {}\".format(cab_route[5]))\n return cab_route\n else:\n print(\"Invalid Input\")\n return False",
"def get(self, problem_id=None):\n problem = self.sess.query(\n DetailedProblem,\n func.ST_AsGeoJSON(DetailedProblem.location)).filter(\n DetailedProblem.id == problem_id)\n data = generate_data(problem)[0]\n self.write(data)",
"def get(id: str) -> DataSet:\n pass",
"def get(self, cls, id):\n\n return FileStorage.__objects[key(cls, id)]",
"def getFileSpace(fs_id):\n result = None\n session = Queries.createSession()\n try:\n result = session.execute(sqlalchemy.select([FileSpace])\n .where(FileSpace.id == fs_id)\n ).fetchone()\n except sqlalchemy.exc.ArgumentError:\n print 'SQLAlchemy ERROR: Invalid or conflicting function argument is supplied'\n except sqlalchemy.exc.CompileError:\n print 'SQLAlchemy ERROR: Error occurs during SQL compilation'\n finally:\n session.close()\n return result",
"def get(id=None):\n \n d = os.path.abspath(__file__)\n stn = os.path.join(os.path.split(d)[0], 'stations.json')\n with open(stn, 'r') as f:\n data = json.loads(f.read())\n \n if id in data.keys():\n return data[id]\n \n return data",
"def GetSubBasin(self,xsid):\n loc = np.where(self.crosssections['xsid'] == xsid)[0][0]\n return self.crosssections.loc[loc,'swimid']",
"def _get_geometry_protein(self, protein_id: int):\n path_protein, _ = self._get_path(protein_id)\n mol_protein = Molecule(path_protein)\n mol_protein.filter(\"protein\")\n if (self.type_feature == \"bio_properties\" or self.type_feature == \"bio_all_properties\"):\n mol_protein = prepareProteinForAtomtyping(mol_protein, verbose = False)\n coords_protein = mol_protein.coords\n coords_protein = np.asarray(coords_protein)\n return coords_protein",
"def get_image(id_num):\n return sqldb.get_image(id_num)",
"def get_scat_file(scat_vers, tilename):\n d=get_scat_dir(scat_vers)\n fname='{scat_vers}-{tilename}.dat'\n fname=fname.format(scat_vers=scat_vers,\n tilename=tilename)\n return os.path.join(d,fname)",
"def getInfo(self, id):\n facade = self._getFacade()\n monitor = facade.get(id)\n data = Zuul.marshal(ITreeNode(monitor))\n return DirectResponse.succeed(data=data)",
"def borealis_site_to_dmap_files(filename, borealis_filetype, slice_id, dmap_filename):\n borealis_converter = BorealisConvert(filename, borealis_filetype,\n dmap_filename, slice_id, borealis_file_structure='site')\n\n dmap_filename = borealis_converter.sdarn_filename # overwrite to as generated\n\n bz2_filename = compress_bz2(dmap_filename) # compress (and adds .bz2 to filename)\n os.remove(dmap_filename) # remove uncompressed\n\n return bz2_filename",
"def borealis_site_to_dmap_files(filename, borealis_filetype, slice_id, dmap_filename):\n borealis_converter = BorealisConvert(filename, borealis_filetype,\n dmap_filename, slice_id, borealis_file_structure='site')\n\n dmap_filename = borealis_converter.sdarn_filename # overwrite to as generated\n\n bz2_filename = compress_bz2(dmap_filename) # compress (and adds .bz2 to filename)\n os.remove(dmap_filename) # remove uncompressed\n\n return bz2_filename",
"def get_dimension(self, dim_id):\n for dim in self.dimensions:\n if dim_id == dim.id:\n return dim",
"def get_grid_shape(self, grid_id):\n return self._grid_shape[grid_id].copy()",
"def get_batch(self, batch_id):\n #fmt = lambda x: join(self.path, self.simulation_paths[x])\n fmt = lambda x: self.simulation_paths[x]\n simulation_paths = [fmt(i) for i in self.batch_indices[batch_id]]\n return Batch(simulation_paths, root=self.path)",
"def geo2cell(geofile, posfile):",
"def get_tile(self):\n return Tile.get_tile(self.get_number())",
"def getSubductionTypeByID(self, eventid):\n if self.verbose:\n self.logger.info(\"Inside getSubductionTypeByID...\")\n lat, lon, depth, tensor_params = self.getOnlineTensor(eventid)\n if self.verbose:\n self.logger.info(\"Tensor Parameters: %s\" % str(tensor_params))\n if lat is None:\n raise AttributeError(\"Event %s is not found in ComCat.\" % eventid)\n\n lat = float(lat)\n lon = float(lon)\n results = self.getSubductionType(lat, lon, depth, tensor_params=tensor_params)\n return results",
"def get(id=None):\n return requests.get(\"/{}\".format(id))",
"def load_from_poscar(self, filename):\n with open( filename, 'r' ) as F:\n F = open( filename, 'r' )\n self.fileID = filename\n self.name = F.readline()\n scale = float(F.readline())\n self.unit_cell = mat( fromfile( F, dtype('float'), 9, ' ' ).reshape((3,3)) )\n \n # Scale < 0 means that it gives the volume we want to have\n if scale < 0.0:\n scale=(-scale/self.volume)**(1./3.)\n self.unit_cell *= scale\n\n # If the next line does not contain just numbers, then it is treated as a list of species\n line = F.readline()\n self.species = None\n try:\n self.num_per_type = [int(n) for n in line.split()]\n except:\n species = line.split()\n line = F.readline()\n self.num_per_type = [int(n) for n in line.split()]\n self.species = []\n for n in self.num_per_type:\n self.species.extend(n*[species[0]])\n species = species[1:]\n \n self.num_atoms = 0\n self.num_atoms=sum(self.num_per_type)\n \n mode = F.readline()\n \n self.atoms = mat(fromfile( F, dtype('float'), self.num_atoms*3, ' ' ).reshape(( self.num_atoms,3)))\n \n if re.search('^[cCkK]',mode):\n pass\n else:\n self.atoms = self.atoms*self.unit_cell\n \n if self.name.split()[0] == \"SUPERCELL\":\n self.is_supercell = True\n self.supercell_repetitions = self.name.split()[1].split('x')\n self.supercell_repetitions = [int(i) for i in self.supercell_repetitions]",
"def load_spitzer_image(): # pragma: no cover\n\n path = get_path('spitzer_example_image.fits', location='remote')\n hdu = fits.open(path)[0]\n\n return hdu",
"def get_orig_scat_file_full(scat_name):\n d=get_cat_dir(scat_name)\n fname='{scat_name}.fits'.format(scat_name=scat_name)\n return os.path.join(d, fname)",
"def convert_tile(fname, out_fname, compression, filter_opts):\n with h5py.File(out_fname, 'w') as fid:\n with rasterio.open(fname) as ds:\n # global attributes\n attach_attributes(fid, ds.tags())\n\n # find and convert every subsdataset (sds)\n for sds_name in ds.subdatasets:\n with rasterio.open(sds_name) as sds:\n ds_name = Path(sds_name.replace(':', '/')).name\n\n # create empty or copy the user supplied filter options\n if not filter_opts:\n f_opts = dict()\n else:\n f_opts = filter_opts.copy()\n\n # use sds native chunks if none are provided\n if 'chunks' not in f_opts:\n f_opts['chunks'] = list(sds.block_shapes[0])\n\n # modify to have 3D chunks if we have a multiband sds\n if sds.count == 3:\n # something could go wrong if a user supplies\n # a 3D chunk eg (2, 256, 340)\n f_opts['chunks'].insert(0, 1)\n f_opts['chunks'] = tuple(f_opts['chunks'])\n else:\n f_opts['chunks'] = tuple(f_opts['chunks'])\n\n # subdataset attributes and spatial attributes\n attrs = sds.tags()\n attrs['geotransform'] = sds.transform.to_gdal()\n attrs['crs_wkt'] = sds.crs.wkt\n\n # ensure single band sds is read a 2D not 3D\n data = sds.read() if sds.count == 3 else sds.read(1)\n\n # write to disk as an IMAGE Class Dataset\n write_h5_image(data, ds_name, fid, attrs=attrs,\n compression=compression,\n filter_opts=f_opts)",
"def get_by_id(dataobj_id):\n results = list(get_data_dir().rglob(f\"{dataobj_id}-*.md\"))\n return results[0] if results else None",
"async def get(id):\n cluster = clusters.get_by_id(id)\n\n if cluster is None:\n raise HTTPException(status_code=404, detail=\"Cluster not found for ID: {0}\".format(id))\n\n return cluster.export()",
"def find_cluster(self, id):\n raise NotImplementedError",
"def get_shp_file(self):\n files = os.listdir(self.targetpath)\n file = files[0].split('.')[0]\n return self.targetpath + '/' + file",
"def load_fits_primary(filename, transpose=True):\n output = pyfits.open(filename)\n output = np.array(output[0].data)\n if transpose:\n output = output.transpose()\n\n return output",
"def find_tile(loc, dir):\n #returns the integer tile number\n \n # should be looking in the directory with supergrid data (probably \"fix\" directory)\n filename_pattern = '*grid.tile*.nc'\n \n #find all supergrid files in the directory\n grid_fnames = []\n for f_name in os.listdir(dir):\n if fnmatch.fnmatch(f_name, filename_pattern):\n grid_fnames.append(f_name)\n if not grid_fnames:\n message = 'No filenames matching the pattern {0} found in {1}'.format(filename_pattern,dir)\n logging.critical(message)\n raise Exception(message)\n \n #non-polar tiles can use traditional 2D point-in-polygon methods; if a point is not in a non-polar tile,\n #it is in one of the polar tiles, and the tile can be distinguished by the sign of latitude of the point\n polar_tile_filenames = []\n found_tile = False\n for f_name in grid_fnames:\n if not found_tile:\n nc_file = Dataset('{0}/{1}'.format(dir,f_name))\n longitude = np.array(nc_file['x']).swapaxes(0,1)\n latitude = np.array(nc_file['y']).swapaxes(0,1)\n nc_file.close()\n \n adj_long = False \n #look for reversal of longitude; if found, adjust longitude so that 0-360 transition doesn't exist\n for row in longitude:\n if not (np.all(np.diff(row) >= 0) or np.all(np.diff(row) <= 0)):\n adj_long = True\n if adj_long:\n longitude[longitude < 180] += 360\n \n #get lon/lat pairs for all edges of the tiles\n \n edge_1_lon = longitude[0,:]\n edge_1_lat = latitude[0,:]\n edge_1 = list(zip(edge_1_lon, edge_1_lat))\n \n edge_2_lon = longitude[:,-1]\n edge_2_lat = latitude[:,-1]\n edge_2 = list(zip(edge_2_lon, edge_2_lat))\n \n edge_3_lon = longitude[-1,:]\n edge_3_lat = latitude[-1,:]\n edge_3 = list(zip(edge_3_lon, edge_3_lat))\n edge_3.reverse() #need to reverse the direction of this edge to form a regular polygon\n \n edge_4_lon = longitude[:,0]\n edge_4_lat = latitude[:,0]\n edge_4 = list(zip(edge_4_lon, edge_4_lat))\n edge_4.reverse() #need to reverse the direction of this edge to form a regular polygon\n \n polygon_points = edge_1 + edge_2 + edge_3 + edge_4\n \n tile_polygon = Polygon(polygon_points)\n tile_polygon = tile_polygon.simplify(0)\n \n if tile_polygon.is_valid: #this will be True unless the tile is a polar tile, which will not form a regular polygon in Cartesian space using lon/lat data\n temp_loc = copy.deepcopy(loc)\n if adj_long:\n if loc[0] < 180:\n temp_loc[0] += 360\n loc_point = Point(temp_loc)\n if tile_polygon.contains(loc_point):\n found_tile = True\n return f_name.split('tile')[1].split('.nc')[0] \n else:\n polar_tile_filenames.append(f_name)\n \n #if the tile hasn't been found by this point, it must be contained within a polar tile\n for f_name in polar_tile_filenames:\n nc_file = Dataset('{0}/{1}'.format(dir,f_name))\n latitude = np.array(nc_file['y']).swapaxes(0,1)\n nc_file.close()\n \n #if the sign of the mean latitude of the tile is the same as that of the point, the tile has been found\n if np.sign(np.mean(latitude)) == np.sign(loc[1]):\n found_tile = True\n return f_name.split('tile')[1].split('.nc')[0] \n return -1",
"def getById(self, id_cog_score:int):\n\n self.function += str(id_genus) + '/'\n\n result_get = GetRest(function = self.function).performRequest()\n return result_get",
"def get_scinv_file(pz_vers, pz_type, cosmo_vers, chunk=None):\n dir=get_scinv_dir(pz_vers,pz_type,cosmo_vers)\n name='DES_scinv_%s_%s_%s' % (pz_vers, pz_type,cosmo_vers)\n\n if chunk is not None:\n dir=os.path.join(dir,'chunks')\n name='%s_%06d' % (name,chunk)\n name='%s.fits' % name\n return os.path.join(dir, name)",
"def getTree(self, id):\n facade = self._getFacade()\n monitors = facade.query()\n nodes = map(ITreeNode, monitors)\n data = Zuul.marshal(nodes)\n return data",
"def getTile(self):\n return self.tile",
"def getnode(self, id: int) -> node_data:\n return self.Nodes[id]",
"def open_tile(filename):\n geoimg = gippy.GeoImage(filename, True)\n z, x, y = map(int, geoimg.basename().split('-')[0:4])\n tile = Tile.from_google(google_x=x, google_y=y, zoom=z)\n geoimg.set_srs('EPSG:3857')\n minpt = tile.bounds[0].meters\n maxpt = tile.bounds[1].meters\n affine = np.array(\n [\n minpt[0], (maxpt[0]-minpt[0])/geoimg.xsize(), 0.0,\n maxpt[1], 0.0, -(maxpt[1]-minpt[1])/geoimg.ysize()\n ])\n geoimg.set_affine(affine)\n geoimg.set_nodata(-1)\n return geoimg",
"def getSO(self, pixel_id):\n pixel_ids = [so.id for so in self]\n try:\n index = pixel_ids.index(pixel_id)\n return self[index]\n # Pixel ID was not found\n except ValueError:\n return None",
"def subimage(self, *args, **kwargs):\n return _coordsys.coordsys_subimage(self, *args, **kwargs)",
"def _get_normal_map_scale_subfolder(self):\n if self.im_scale <= 0.25:\n if self.im_scale <= 0.125:\n return \"Normals/0.125/\"\n else:\n return \"Normals/0.25/\"\n else: \n return \"Normals/\"",
"def static_cell_for_cell(self, cell_id):\n\t\tassert np.all(self.is_cell_id(cell_id))\n\t\t# Mask out the time\n\t\treturn cell_id & self.t2static_mask"
] | [
"0.5464149",
"0.50709033",
"0.50375205",
"0.5031053",
"0.48634726",
"0.48348683",
"0.4817579",
"0.4804395",
"0.47939923",
"0.47609693",
"0.46538952",
"0.4650901",
"0.46247876",
"0.46213096",
"0.45644408",
"0.45580676",
"0.4558013",
"0.4542269",
"0.45237747",
"0.45166185",
"0.45030698",
"0.4500567",
"0.44944832",
"0.44865754",
"0.4479755",
"0.44778517",
"0.44692793",
"0.4466645",
"0.44433647",
"0.44399855",
"0.44384825",
"0.43869472",
"0.43796796",
"0.43763834",
"0.43761784",
"0.43724456",
"0.43679154",
"0.43582213",
"0.43396124",
"0.43365687",
"0.4335907",
"0.43304506",
"0.4325799",
"0.43242818",
"0.4318042",
"0.43155402",
"0.43049437",
"0.4299618",
"0.4283094",
"0.42798385",
"0.4270349",
"0.4266201",
"0.42647043",
"0.42642492",
"0.42587584",
"0.4245182",
"0.42392316",
"0.423578",
"0.4234108",
"0.42324352",
"0.42311037",
"0.4228879",
"0.4224466",
"0.42229766",
"0.4218949",
"0.42168936",
"0.4216482",
"0.41973075",
"0.41921574",
"0.41890264",
"0.41810274",
"0.41781193",
"0.41781193",
"0.41766647",
"0.41756648",
"0.4172985",
"0.41700384",
"0.41676113",
"0.41672242",
"0.4163128",
"0.4163079",
"0.41629425",
"0.4159858",
"0.41580117",
"0.41573596",
"0.41555303",
"0.41519538",
"0.41513172",
"0.4138747",
"0.41358617",
"0.41345373",
"0.41339436",
"0.4132353",
"0.41282737",
"0.4127583",
"0.4127525",
"0.4122876",
"0.41214365",
"0.4116723",
"0.41146073"
] | 0.7465723 | 0 |
Gets the orients file for a given id supercell id. | def get_orients(self,id):
lines = self.mfp.get_orients(id)
return lines | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_orientations(self):\n for atom in self.invarioms:\n atom.get_orientation()",
"def orient(self):\n self._read(False)\n return self._readings.orient",
"def get_orientations(self, int32 dim, codim=None):\n if codim is not None:\n dim = self.tdim - codim\n\n if dim == 1:\n return self.edge_oris\n\n elif dim == 2:\n return self.face_oris\n\n else:\n raise ValueError('only edges or faces have orientations! (%d)'\n % dim)",
"def get(self, id):\n file = (\n self.drive.files()\n .get(\n fileId=id,\n fields=\"id, name\",\n supportsAllDrives=self.shared_drive[0],\n )\n .execute()\n )\n return file",
"def __data_file_for_row_id(self, row_id):\n file_row_id = int(row_id) % int(self.rows_per_page)\n if file_row_id == 0:\n second_number = (int(row_id) // int(self.rows_per_page)) * int(self.rows_per_page)\n first_number = second_number - int(self.rows_per_page) + 1\n else:\n first_number = (int(row_id) // int(self.rows_per_page)) * int(self.rows_per_page) + 1\n second_number = first_number + int(self.rows_per_page) - 1\n path = self.path + '/data' + str(first_number) + '_' + str(second_number) + '.dat'\n return path",
"def orient(self):\n return self.__ph.get('orient', PH_ORIENT_HORZ)",
"def get_mof_structure_by_id(self,strucid, mol = False):\n lines,name = self.mfp.get_mof_structure_by_id(strucid)\n return lines",
"def get_path(self, path_id):\n\t\tpass",
"def get_orientation(self):\r\n return self.__orientation",
"def getsameIDList(id, file):\n glineList = []\n newread = []\n \n for line in open(file):\n itemList = line[:-1].split('\\t')\n line_id = getsubString(itemList[0],'|')\n \n if id == line_id:\n glineList.append(line)\n else:\n newread.append(line)\n return glineList",
"def id_to_index(self, id):\n raise NotImplementedError",
"def _get_file_by_id(id):\n query = \"\"\"SELECT * FROM files WHERE id = (:id) LIMIT 1\"\"\"\n param_obj = {'id': id}\n return _execute(query, param_obj)",
"def escribir_indir(self, FILESYS, id,name_file=\"Xx.xXx.xXx.xXx.\",\n size_file=\"\",inicluster=\"\",cdate=\"\",mdate=\"\",no_use=\"\"):\n byte = 512\n tamanno_indir = 64\n id = int(id)\n try:\n FILESYS[byte+(tamanno_indir*id):byte+(tamanno_indir*id)+15] =\\\n ((\" \"*(15-len(str(name_file))))+str(name_file)).encode('ascii')\n except:\n print(\"Nombre no valido\")\n return False\n FILESYS[byte+(tamanno_indir*id)+16:byte+(tamanno_indir*id)+24] =\\\n (\"0\"*(8-len(str(size_file)))+str(size_file)).encode('ascii')\n FILESYS[byte+(tamanno_indir*id)+25:byte+(tamanno_indir*id)+30] =\\\n (\"0\"*(5-len(str(inicluster)))+str(inicluster)).encode('ascii')\n FILESYS[byte+(tamanno_indir*id)+31:byte+(tamanno_indir*id)+45] =\\\n (\"0\"*(14 - len(str(cdate)))+str(cdate)).encode('ascii')\n FILESYS[byte+(tamanno_indir*id)+46:byte+(tamanno_indir*id)+60] =\\\n (\"0\"*(14 - len(str(mdate)))+str(mdate)).encode('ascii')\n FILESYS[byte+(tamanno_indir*id)+61:byte+(tamanno_indir*id)+64] =\\\n (\"\\x00\"*(3 - len(str(no_use)))+str(no_use)).encode('ascii')\n return True",
"def get_orientation(self):\n return self._orientation",
"def _levelFromIfd(self, ifd, baseifd):\n sizeX = ifd['tags'][tifftools.Tag.ImageWidth.value]['data'][0]\n sizeY = ifd['tags'][tifftools.Tag.ImageLength.value]['data'][0]\n tileWidth = baseifd['tags'][tifftools.Tag.TileWidth.value]['data'][0]\n tileHeight = baseifd['tags'][tifftools.Tag.TileLength.value]['data'][0]\n for tag in {\n tifftools.Tag.SamplesPerPixel.value,\n tifftools.Tag.BitsPerSample.value,\n tifftools.Tag.PlanarConfig.value,\n tifftools.Tag.Photometric.value,\n tifftools.Tag.Orientation.value,\n tifftools.Tag.Compression.value,\n tifftools.Tag.TileWidth.value,\n tifftools.Tag.TileLength.value,\n }:\n if ((tag in ifd['tags'] and tag not in baseifd['tags']) or\n (tag not in ifd['tags'] and tag in baseifd['tags']) or\n (tag in ifd['tags'] and\n ifd['tags'][tag]['data'] != baseifd['tags'][tag]['data'])):\n msg = 'IFD does not match first IFD.'\n raise TileSourceError(msg)\n sizes = [(self.sizeX, self.sizeY)]\n for level in range(self.levels - 1, -1, -1):\n if (sizeX, sizeY) in sizes:\n return level\n altsizes = []\n for w, h in sizes:\n w2f = int(math.floor(w / 2))\n h2f = int(math.floor(h / 2))\n w2c = int(math.ceil(w / 2))\n h2c = int(math.ceil(h / 2))\n w2t = int(math.floor((w / 2 + tileWidth - 1) / tileWidth)) * tileWidth\n h2t = int(math.floor((h / 2 + tileHeight - 1) / tileHeight)) * tileHeight\n for w2, h2 in [(w2f, h2f), (w2f, h2c), (w2c, h2f), (w2c, h2c), (w2t, h2t)]:\n if (w2, h2) not in altsizes:\n altsizes.append((w2, h2))\n sizes = altsizes\n msg = 'IFD size is not a power of two smaller than first IFD.'\n raise TileSourceError(msg)",
"def get_deposition(self, id: uplink.Path):\n pass",
"def get_org_spec_dir(self, org_id):\n return self._get_org_base_dir(org_id)",
"def _get_organisms_file_path(self, gene_name, gene_id):\n return os.path.join(os.getcwd(), \"src\", \"data\", \"organisms\", \"{}_{}.txt\".format(gene_name, gene_id))",
"def read(self):\n if self.getiddname() is None:\n errortxt = (\n \"IDD file needed to read the idf file. \"\n \"Set it using IDF.setiddname(iddfile)\"\n )\n raise IDDNotSetError(errortxt)\n readout = idfreader1(\n self.idfname, self.iddname, self, commdct=self.idd_info, block=self.block\n )\n (self.idfobjects, block, self.model, idd_info, idd_index, idd_version) = readout\n self.setidd(idd_info, idd_index, block, idd_version)",
"def getRoiInfo(self, fh):\n fn = fh.name()\n rf = open(fn[:-4]+'.roi', 'r')\n rois = np.loadtxt(rf)\n return rois",
"def orientation(self):\n agents = self.board[self.agent_locs_idx]\n out = (agents & CellTypes.orientation_mask) >> CellTypes.orientation_bit\n return out.astype(np.int64)",
"def read_hierarchy(self, fid):\r\n\r\n lin = self.read_line(fid)\r\n \r\n while lin != 'end':\r\n parts = lin.split()\r\n if lin != 'begin':\r\n ind = self.get_index_by_name(parts[0])\r\n for i in range(1, len(parts)):\r\n self.vertices[ind].children.append(self.get_index_by_name(parts[i]))\r\n lin = self.read_line(fid)\r\n lin = self.read_line(fid)\r\n return lin",
"def __row_id_in_file(self, row_id):\n #if our table doesn't have any rows yet\n if row_id == 0:\n return 0\n else:\n file_row_id = int(row_id) % int(self.rows_per_page)\n if file_row_id == 0:\n file_row_id = int(file_row_id) + int(self.rows_per_page)\n return file_row_id",
"def get_read_orientation_outward(self, ctx, params):\n # ctx is the context object\n # return variables are: returnVal\n #BEGIN get_read_orientation_outward\n\n if 'workspace_name' not in params:\n raise ValueError('Parameter workspace_name is not set in input arguments')\n workspace_name = params['workspace_name']\n if 'id' not in params:\n raise ValueError('Parameter id is not set in input arguments')\n objid = params['id']\n\n token = ctx['token']\n wsClient = workspaceService(self.workspaceURL, token=token)\n try:\n\n objref = workspace_name + '/' + str(objid)\n\n # Note that results from the workspace are returned in a list\n returnVal = wsClient.get_objects([{'ref': objref}])[0]\n\n if returnVal is not None:\n if returnVal['data']['single_genome'] is not None:\n returnVal = returnVal['data']['single_genome']\n\n print \"is_single_genome issingle \" + str(returnVal)\n except:\n exc_type, exc_value, exc_traceback = sys.exc_info()\n lines = traceback.format_exception(exc_type, exc_value, exc_traceback)\n orig_error = ''.join(' ' + line for line in lines)\n raise ValueError('Error from workspace:\\n' + orig_error)\n\n #END get_read_orientation_outward\n\n # At some point might do deeper type checking...\n if not isinstance(returnVal, int):\n raise ValueError('Method get_read_orientation_outward return value ' +\n 'returnVal is not type int as required.')\n # return the results\n return [returnVal]",
"def galaxy2_orbital_orientation(self):\n return self._galaxy2_orbital_orientation",
"def getID(filePath):\r\n\r\n fileID = rmn.fstopenall(filePath,rmn.FST_RO)\r\n return fileID",
"def get_entrance_junction(self, id):\n return self.sections[id][0]",
"def calculateOrientation(self):\n orientation = [\n [Orientation.LANDSCAPE, Orientation.PORTRAIT],\n [Orientation.CW_LANDSCAPE, Orientation.CW_PORTRAIT],\n [Orientation.CCW_LANDSCAPE, Orientation.CCW_PORTRAIT],\n [Orientation.FLIPPED_LANDSCAPE, Orientation.FLIPPED_PORTRAIT],\n ]\n exif = self.getExif()\n if exif and exif['Orientation'] == 3: #180\n rotation = 3\n elif exif and exif['Orientation'] == 6: #90 CCW\n rotation = 2\n elif exif and exif['Orientation'] == 8: #90 CW\n rotation = 1\n else:\n rotation = 0\n\n if self.isLandscape():\n return orientation[rotation][0]\n else:\n return orientation[rotation][1]",
"def interacs_maker(path, filenames, users_ids, output):\n\tids_dict = {}\n\twith open(users_ids, 'r', encoding='utf-8') as f:\n\t\tfor line in f:\n\t\t\tline = line.strip().split(',',1)\n\t\t\tid, screen_name = line[0], line[1]\n\t\t\tids_dict[screen_name] = id\n\n\t# with open(users_ids, 'r', encoding='utf-8') as f:\n\t# \tids = f.read().split(\"\\n\")\n\t# enum_ids = enumerate(ids)\n\n\tpaths = complete_paths(path, filenames)\n\t\n\tinteractions = []\n\n\tfor i in range(0, len(paths)):\n\t\tscreen_name = paths[i][36:-4] # 乁〳 ❛ д ❛ 〵ㄏ\n\t\tid = ids_dict[screen_name]\n\t\t# index = [i for i, s in enum_ids if screen_name in s][0]\n\t\t# id = re.search(r'\\d+', ids[index]).group()\n\t\t\n\t\twith open(paths[i], 'r', encoding='utf-8') as f:\n\t\t\tprint(paths[i])\n\t\t\tfor line in f:\n\t\t\t\tprint(line)\n\n\t\t\t\tif line=='\\n' or line=='':\n\t\t\t\t\tcontinue\n\n\t\t\t\tline = line.strip().split(',') # Ya no hay temor: números enteros. Coma sólo separa rating de item_id\n\t\t\t\tinteractions.append( [id, line[1], line[0]] ) # [user_id, item_id, rating]\n\n\twith open(output, 'w+') as f:\n\t\tfor triple in interactions:\n\t\t\tf.write(\"{0},{1},{2}\\n\".format( triple[0], triple[1], triple[2] ) )\n\n\treturn 0",
"def get_files(self, sid):\n try:\n return self.datas.get(sid)\n except Exception as ex:\n raise ex",
"def find_rent(self, id):\n allR=self.__loadFromFile()\n for bk in allR:\n if bk.getId()==id:\n return bk",
"def iter_sector(self, id):\n # hard coded because I suck at math.\n # this is a map of sector id to tuples of points: \n # (starting row, starting column), (ending row, ending column)\n # where the end values are inclusive.\n sector_map = {\n 0: ((0, 0), (2, 2)),\n 1: ((3, 0), (5, 2)),\n 2: ((6, 0), (8, 2)),\n 3: ((0, 3), (2, 5)),\n 4: ((3, 3), (5, 5)),\n 5: ((6, 3), (8, 5)),\n 6: ((0, 6), (6, 8)),\n 7: ((3, 6), (5, 8)),\n 8: ((6, 6), (8, 8)),\n }\n\n sector = sector_map.get(id)\n if not sector:\n raise Exception('Invalid sector ID: {}'.format(id))\n (start_col, start_row), (end_col, end_row) = sector\n for row in range(start_row, end_row + 1):\n for col in range(start_col, end_col + 1):\n yield self.table[row][col], row, col",
"def get_data_folder(self, mode='absolute'):\n\n path = Path(f'sub-{self.sub_id}', f'ses-{self.ses_id}', self.modality)\n\n if mode == 'absolute':\n if self.basedir is None:\n raise ValueError('No base directory set.')\n path = self.basedir / path\n\n return path",
"def _get_path(self, protein_id: int):\n protein_name = self.files_refined[protein_id]\n path_protein = os.path.join(\n self.init_refined, protein_name, protein_name + \"_protein.pdb\"\n )\n path_ligand = os.path.join(\n self.init_refined, protein_name, protein_name + \"_ligand.mol2\"\n )\n return path_protein, path_ligand",
"def get(self, cls, id):\n\n return FileStorage.__objects[key(cls, id)]",
"def getbyid(self, id):\n\n return esd.retrieve(id)",
"def erai(self, name='daily-deg1'):\n if os.path.exists('./data/erai-raw'):\n return os.path.join('./data/erai-raw/', f'{name}.zarr')\n return self.gcs.get_mapper(os.path.join(self.bucket, 'erai-raw', f'{name}.zarr'))",
"def open_book_file(book_id):\n # book exists in the root folder\n for f in FORMATS:\n filename=os.path.join(books_folder,book_id+f)\n if os.path.exists(filename):\n # it is not a directory\n if os.path.isfile(filename):\n return open(filename,encoding='utf-8',errors='ignore').read()\n\n # book is in the folder root/bookid\n for f in FORMATS:\n filename=os.path.join(books_folder,book_id,book_id+f)\n if os.path.exists(filename):\n # it is not a directory\n if os.path.isfile(filename):\n return open(filename,encoding='utf-8',errors='ignore').read()",
"def _get_resource_loc(model_id):\n \"\"\" and live photos for version <= Photos 4.0 \"\"\"\n # determine folder where Photos stores edited version\n # edited images are stored in:\n # Photos Library.photoslibrary/resources/media/version/XX/00/fullsizeoutput_Y.jpeg\n # where XX and Y are computed based on RKModelResources.modelId\n\n # file_id (Y in above example) is hex representation of model_id without leading 0x\n file_id = hex_id = hex(model_id)[2:]\n\n # folder_id (XX) in above example if first two chars of model_id converted to hex\n # and left padded with zeros if < 4 digits\n folder_id = hex_id.zfill(4)[0:2]\n\n return folder_id, file_id",
"def get_ROIs(self, base):\n locs3d = self.locs3d\n #print loc3d\n base_locs = locs3d[base]\n ROI_dic = dict((i, [Id]) for i,Id in enumerate(base))\n for i, loc in enumerate(locs3d):\n if i not in base:\n dist = np.sqrt(np.sum((base_locs - loc)**2, 1))\n min_i = np.argmin(dist)\n ROI_dic[min_i].append(i)\n out = ROI_dic.values()\n return out",
"def GetOrientation(self):\n return self._orient",
"def get_row2id( self, ratios_standardized, db ):\n\t\trow_info_collection = db.row_info\n\t\trow_name = []\n\t\trow_id = []\n\t\tfor i in row_info_collection.find():\n\t\t\trow_name.append( i[\"egrin2_row_name\"] )\n\t\t\trow_id.append( i[\"row_id\"] )\n\t\tfor i in ratios_standardized.index.values:\n\t\t\tif i not in row_name:\n\t\t\t\trow_name.append( i )\n\t\t\tif len(row_id) > 0:\n\t\t\t\trow_id.append( max(row_id)+1 )\n\t\t\telse:\n\t\t\t\trow_id.append(0)\n\t\trow_info = pd.DataFrame( zip(row_id, row_name), index = row_name, columns = [ \"row_id\", \"egrin2_row_name\"] )\n\t\treturn row_info",
"def fileid(self):\n if self._fileid is None:\n rv = M.mexec('''set s1=$order(^DIC(\"B\",s0,0))''', str(self.filename[:30]), M.INOUT(\"\"))[0]\n if rv != '':\n self._fileid = rv\n return self._fileid",
"def get_index_by_id(self, id):\r\n for i in range(len(self.vertices)):\r\n if self.vertices[i].id == id:\r\n return i\r\n raise ValueError('Reverse look up of id failed.')",
"def getIRODSdir(self, inirodsdir, subname):\n tokens = inirodsdir.split('/')\n irodsdir=inirodsdir if tokens[-1]==subname else '/'.join([inirodsdir,subname])\n return irodsdir",
"def compute_orientation(x,y,lx,ly,nfil):\n # number of molecules\n natoms = len(x)\n nmol = natoms/nfil\n # allocate aray for results\n phi = np.zeros((natoms), dtype = np.float64)\n tx = np.zeros((natoms), dtype = np.float64)\n ty = np.zeros((natoms), dtype = np.float64)\n # loop over all polymers\n k = 0\n for i in range(nmol):\n for j in range(nfil):\n if j == 0:\n x1 = x[k]\n y1 = y[k]\n x2 = x[k+1]\n y2 = y[k+1]\n elif j == nfil-1:\n x1 = x[k-1]\n y1 = y[k-1]\n x2 = x[k]\n y2 = y[k]\n else:\n x1 = x[k-1]\n y1 = y[k-1]\n x2 = x[k+1]\n y2 = y[k+1]\n # compute nearest neighbor\n dx = neigh_min(x2-x1,lx)\n dy = neigh_min(y2-y1,ly)\n # compute angle using atan2\n pi = math.atan2(dy,dx)\n phi[k] = pi\n tx[k] = dx / np.sqrt(dx**2 + dy**2)\n ty[k] = dy / np.sqrt(dx**2 + dy**2)\n # increment k\n k = k + 1\n return phi, tx, ty",
"def file(self):\n\n dlos_filename = super(DlosPhotoz, self).file()\n\n photoz_str = 'DLOS_photoz_'\n \n file_name = photoz_str.join( \n dlos_filename.split('DLOS_')\n ) \n\n return file_name",
"def mode(self):\n st = os.stat(self.path)\n return stat.S_IMODE(st.st_mode)",
"def getOrientation(self):\r\n return self.orientation",
"def getFileSpace(fs_id):\n result = None\n session = Queries.createSession()\n try:\n result = session.execute(sqlalchemy.select([FileSpace])\n .where(FileSpace.id == fs_id)\n ).fetchone()\n except sqlalchemy.exc.ArgumentError:\n print 'SQLAlchemy ERROR: Invalid or conflicting function argument is supplied'\n except sqlalchemy.exc.CompileError:\n print 'SQLAlchemy ERROR: Error occurs during SQL compilation'\n finally:\n session.close()\n return result",
"def get_user_sector_altitude_range(db_id):\n found, sector = user_sector_finder(\"id\", int(db_id))\n if found:\n return (sector[0]['min_altitude'], sector[0]['max_altitude'])\n else:\n raise NotFoundException(db_id, \"User sector not found.\")",
"def get_id_path():\n\n root = os.path.dirname(os.path.abspath(__file__))[:-5]\n id_path = os.path.join(root, \"client\\\\files\\\\id.txt\")\n\n return id_path",
"def get_by_id(dataobj_id):\n results = list(get_data_dir().rglob(f\"{dataobj_id}-*.md\"))\n return results[0] if results else None",
"def get_vib_modes(id : int\n ,stordir : str\n ) -> typ.List[typ.Tuple[int,float,int]]:\n raise NotImplementedError\n return [] # type: ignore",
"def get_file_id(self, lfn):\n\n raise NotImplementedError('get_file_id')",
"def getId(self):\n if getattr(self,'id',None):\n return self.id\n name = self.name\n #--Singleton records \n if name in frozenset(('FMAP','GAME','JOUR','KLST','PCDT','REFR','SPLM','TES3')):\n return None\n #--Special records.\n elif name == 'CELL':\n reader = self.getReader()\n srName = reader.findSubRecord('NAME',name)\n srData = reader.findSubRecord('DATA',name)\n (flags,gridX,gridY) = struct.unpack('3i',record.data)\n if flags & 1:\n self.id = cstrip(srName)\n else:\n self.id = '[%d,%d]' % (gridX,gridY)\n elif name == 'INFO':\n srData = self.getReader().findSubRecord('INAM',name)\n self.id = cstrip(srData)\n elif name == 'LAND':\n srData = self.getReader().findSubRecord('INTV',name)\n self.id = '[%d,%d]' % struct.unpack('2i',srData)\n elif name == 'PGRD':\n reader = self.getReader()\n srData = reader.findSubRecord('DATA',name)\n srName = reader.findSubRecord('NAME',name)\n gridXY = struct.unpack('2i',srData[:8])\n if srData != (0,0) or not srName:\n self.id = '[%d,%d]' % gridXY\n else:\n self.id = cstrip(srName)\n elif name == 'SCPT':\n srData = self.getReader().findSubRecord('SCHD',name)\n self.id = cstrip(srData[:32])\n #--Most records: id in NAME record.\n else:\n srData = self.getReader().findSubRecord('NAME',name)\n self.id = srData and cstrip(srData)\n #--Done\n return self.id",
"def get_political_orientation(newspaper):\n return political_orientations[newspaper]",
"def _get_org_base_dir(self, org_id):\n return self._get_persistent_mpe_dir().joinpath(org_id)",
"def getId(self):\n if self.id: return self.id\n reader = self.getReader()\n subData = reader.findSubRecord('INTV','LAND')\n (self.gridX,self.gridY) = struct.unpack('ii',subData)\n self.id = '[%d,%d]' % (self.gridX,self.gridY)\n return self.id",
"def open_idf(self):\n\n self.save()\n\n filepath = self.idfname\n\n import os\n import platform\n import subprocess\n\n if platform.system() == \"Darwin\": # macOS\n subprocess.call((\"open\", filepath))\n elif platform.system() == \"Windows\": # Windows\n os.startfile(filepath)\n else: # linux variants\n subprocess.call((\"xdg-open\", filepath))",
"def get_dicom_file_content(self) -> bytes:\n return self.client.get_instances_id_file(self.id_)",
"def get_file(self, sys_id):\n url = \"{}/file\".format(self._target(sys_id))\n r = self._client.session.get(url, stream=True)\n return r",
"def get_files_info(self, sid):\n try:\n return self.datas.get_file_info(sid)\n except Exception as ex:\n raise ex",
"def _create_img_id_to_idx(self):\n with h5py.File(self.image_features_path, 'r') as features_file:\n coco_ids = features_file['ids'][()]\n coco_id_to_index = {id: i for i, id in enumerate(coco_ids)}\n return coco_id_to_index",
"def get_associated_files(self, observation_id, *, verbose=False):\n query = (f\"select art.artifact_id as filename, p.calibration_level, art.archive_class as type, \"\n f\"pg_size_pretty(art.size_uncompr) as size_uncompressed from ehst.artifact art \"\n f\"join ehst.plane p on p.plane_id = art.plane_id where \"\n f\"art.observation_id = '{observation_id}'\")\n return self.query_tap(query=query)",
"def get_index(self):\n return self.disk.partitions.index(self)",
"def get_file_view(self, bucket_id, file_id):\n\n \n path = '/storage/buckets/{bucketId}/files/{fileId}/view'\n params = {}\n if bucket_id is None:\n raise AppwriteException('Missing required parameter: \"bucket_id\"')\n\n if file_id is None:\n raise AppwriteException('Missing required parameter: \"file_id\"')\n\n path = path.replace('{bucketId}', bucket_id)\n path = path.replace('{fileId}', file_id)\n\n\n return self.client.call('get', path, {\n 'content-type': 'application/json',\n }, params)",
"def get_for_id(id,table):\n # Implement this function\n for row in range(1, len(table)):\n for col in range(len(table[0])):\n if id in table[row][col]:\n return table[row]",
"def get_ids():\n # Filename for SALAMI IA metadata\n metadata_file = os.path.join(\n dpath.SALAMI, 'metadata', 'id_index_internetarchive.csv')\n\n ids = []\n\n with open(metadata_file, \"r\") as rwc_file:\n reader = csv.reader(rwc_file)\n next(reader) #skip header\n for row in reader:\n ids.append(int(row[0]))\n\n return ids",
"def GetSubBasin(self,xsid):\n loc = np.where(self.crosssections['xsid'] == xsid)[0][0]\n return self.crosssections.loc[loc,'swimid']",
"def list_files(excel_file, data_folder):\n\n wb = xlrd.open_workbook(excel_file)\n sheet = wb.sheet_by_index(0)\n sheet.cell_value(0, 0)\n\n # Extracting number of rows\n nsampes = sheet.nrows\n vol_paths, seg_paths =[],[]\n\n for i in range(1, nsampes):\n row = sheet.row_values(i)\n row = row[0]\n\n filename= row.split('.')[0]\n folder = row.split('C')[0]\n folder = folder[:-1]\n vol_paths.append(os.path.join(data_folder,folder,filename +'.nii.gz'))\n segname= filename+'_seg.nii.gz'\n seg_paths.append(os.path.join(data_folder, folder, segname))\n\n return vol_paths, seg_paths",
"def get_office(party_id):\n return [vars(office) for office in OFFICES if office.id == party_id]",
"def get_rep_mol_indexes():\n f = open(FILE_WITH_REP_MOL_IDXS, \"r\")\n rd = csv.reader(f)\n mols = rd.next()\n f.close()\n mol_idxs = [int(i) - 1 for i in mols]\n os.unlink(FILE_WITH_REP_MOL_IDXS)\n return mol_idxs",
"def get_echelle_angle_files(self):\n angle_fits_file = 'keck_hires_angle_fits.fits'\n composite_arc_file = 'keck_hires_composite_arc.fits'\n\n return [angle_fits_file, composite_arc_file]",
"def _get_file_positions(self,filename):\n if os.path.exists(self._ahfBasename + 'fpos'):\n f = util.open_(self._ahfBasename + 'fpos')\n for i in range(self._nhalos):\n self._halos[i+1].properties['fstart'] = int(f.readline())\n f.close()\n else:\n f = util.open_(filename)\n for h in xrange(self._nhalos):\n if len((f.readline().split())) == 1:\n f.readline()\n self._halos[h+1].properties['fstart'] = f.tell()\n for i in xrange(self._halos[h+1].properties['npart']):\n f.readline()\n f.close()",
"def ifaces_file(self):\n return self.system_path(self._ifaces_file)",
"def _getCadastroCursosSuperiores(self, id_cadastro):\n return self.execSql(\"select_cadastro_cursos_superiores\",\n id_cadastro=int(id_cadastro))",
"def id_to_base_id(self, id):\n if self.xy_tiling is None and self.pc_tiling is None:\n return id\n return self.get_tile_from_path(id)[1]",
"def _get_filename(self, id):\n if re.findall('[^a-zA-Z0-9]', id):\n raise Exception\n\n return os.path.join(self._datadir, str(id))",
"def orientation(self):\n return self._orientation",
"def orientation(self):\n return self._orientation",
"def read_root(self, fid):\r\n lin = self.read_line(fid) \r\n while lin[0] != ':':\r\n parts = lin.split()\r\n if parts[0]=='order':\r\n order = []\r\n for i in range(1, len(parts)):\r\n if parts[i].lower()=='rx':\r\n chan = 'Xrotation'\r\n order.append('x')\r\n elif parts[i].lower()=='ry':\r\n chan = 'Yrotation'\r\n order.append('y')\r\n elif parts[i].lower()=='rz':\r\n chan = 'Zrotation'\r\n order.append('z')\r\n elif parts[i].lower()=='tx':\r\n chan = 'Xposition'\r\n elif parts[i].lower()=='ty':\r\n chan = 'Yposition'\r\n elif parts[i].lower()=='tz':\r\n chan = 'Zposition'\r\n elif parts[i].lower()=='l':\r\n chan = 'length'\r\n self.vertices[0].meta['channels'].append(chan)\r\n # order is reversed compared to bvh\r\n self.vertices[0].meta['order'] = order[::-1]\r\n\r\n elif parts[0]=='axis':\r\n # order is reversed compared to bvh\r\n self.vertices[0].meta['axis_order'] = parts[1][::-1].lower()\r\n elif parts[0]=='position':\r\n self.vertices[0].meta['offset'] = [float(parts[1]),\r\n float(parts[2]),\r\n float(parts[3])]\r\n elif parts[0]=='orientation':\r\n self.vertices[0].meta['orientation'] = [float(parts[1]),\r\n float(parts[2]),\r\n float(parts[3])]\r\n lin = self.read_line(fid)\r\n return lin",
"def lmode(self):\n if self.islink():\n st = os.lstat(self.path)\n else:\n st = os.stat(self.path)\n return stat.S_IMODE(st.st_mode)",
"def get_exoid(runid):\n cpu_ws = np.array([0])\n io_ws = np.array([0])\n exoname = runid + \".exo\"\n exoid, ierr = exolib.py_excre(exoname, EX_CLOBBER, cpu_ws, io_ws)\n if ierr:\n raise ExodusIIWriterError(\"Error creating exodus output\")\n return exoname, exoid",
"def getUserCatalogOnFilespace(fs_id):\n result = None\n session = Queries.createSession()\n try:\n result = session.execute(sqlalchemy.select([Catalog])\n .where(Catalog.fs_id == fs_id)\n .order_by(asc(Catalog.id))\n ).fetchone()\n except sqlalchemy.exc.ArgumentError:\n print 'SQLAlchemy ERROR: Invalid or conflicting function argument is supplied'\n except sqlalchemy.exc.CompileError:\n print 'SQLAlchemy ERROR: Error occurs during SQL compilation'\n finally:\n session.close()\n return result",
"def getArmy(self, id):\n return self.__armies[id];",
"def fetch_eic(bin_lid,suffix='',tmp=None):\n if tmp is None:\n tmp = mkdirs(os.path.join(scratch(bin_lid),'tmp'))\n bin_url = DATA_NAMESPACE + bin_lid + '.json'\n eic = os.path.join(tmp,bin_lid + '.eic')\n with open(eic,'w') as fout:\n for tup in metadata2eic(bin_url):\n tup[0] = remove_extension(tup[0]) + suffix\n print >> fout, ' '.join(tup)\n return eic",
"def get_fpath(self, sid):\n\n\t\treturn os.path.join(self.cm.get_booknlp_dirpath(sid), 'characters.json')",
"def ui_getrow(self):\n return [self.locked*'L',self.idx,self.guid,printsz(self.size),\n printsz(self.cachesize),self.vendor,self.model,\n len(self.paths),len(self.partitions),len(self.usedinluns)\n ]",
"def GetOrientation(self):\r\n\r\n return self.orientation",
"def get_col2id( self, ratios_standardized, db ):\n\t\tcol_info_collection = db.col_info\n\t\tcol_name = []\n\t\tcol_id = []\n\t\tfor i in col_info_collection.find():\n\t\t\tcol_name.append(i[\"egrin2_col_name\"])\n\t\t\tcol_id.append(i[\"col_id\"])\n\t\tfor i in ratios_standardized.columns.values:\n\t \t\tif i not in col_name:\n\t \t\t\tcol_name.append( i )\n\t \t\t\tif len(col_id) > 0:\n\t\t\t\t\tcol_id.append(max(col_id)+1)\n\t\t\t\telse:\n\t\t\t\t\tcol_id.append(0)\n\t \tcol_info = pd.DataFrame( zip( col_id, col_name ), index = col_name, columns = [ \"col_id\", \"egrin2_col_name\"] )\n\t \treturn col_info",
"def landscape_info(self):\n return self._landscape_info",
"def get_office(office_id):\n\n office = OfficesModel().get_office_by_id(office_id)\n office = json.loads(office)\n if office:\n return make_response(jsonify({\n \"status\": \"200\",\n \"message\": \"success\",\n \"office\": office\n }), 200)\n return make_response(jsonify({\n \"status\": \"404\",\n \"message\": \"office not found\"\n }), 404)",
"def topods_face(self):\n return self.topods_shape()",
"def _get_sub_folder_id(self, base_folder_id):\n find_sub_folder = find_my_folder_by_name_by_searching_files(self.sub_folder_name)\n if not find_sub_folder:\n folder_id = create_folder_in_drive(self.sub_folder_name, base_folder_id)\n else:\n folder_id = find_my_folder_by_name_by_searching_files(self.sub_folder_name)['id']\n\n return folder_id",
"def get_room_by_id(self, id):\n if not isinstance(id, int):\n id = int(id)\n if self.rooms.has_key(id):\n return self.rooms[id]\n raise RuntimeError, \"Room not known\"",
"def get(isamAppliance, id, check_mode=False, force=False):\n return isamAppliance.invoke_get(\"Retrieving the details for a particular federated directory\",\n \"/isam/runtime_components/federated_directories/{0}/v1\".format(id))",
"def get_curve_path(curves_dir, star_id):\n curve_file = \"%s.csv\" % star_id\n curve_path = path.join(curves_dir, curve_file)\n\n return curve_path",
"def _GetPath(self, kind, id):\n\n return { Database.RESOURCE : self.GetResourcePath,\n Database.TEST : self.GetTestPath,\n Database.SUITE : self.GetSuitePath } [kind] (id)",
"def enumerate_files(self, table):\n for i in range(self.nrofrecords()):\n data = self.bank.readrec(i + 1)\n if data and data[0] == table.tableid:\n yield i + 1, data[1:]"
] | [
"0.5392666",
"0.5238473",
"0.50075924",
"0.47091204",
"0.46686122",
"0.46168557",
"0.4579395",
"0.45001397",
"0.44533232",
"0.44359028",
"0.44122255",
"0.44016522",
"0.439454",
"0.4392099",
"0.43870813",
"0.43759003",
"0.4370313",
"0.43686095",
"0.43557945",
"0.43431535",
"0.43205103",
"0.43056014",
"0.42863637",
"0.42746025",
"0.4250367",
"0.4248899",
"0.42487305",
"0.4245979",
"0.42392868",
"0.42298105",
"0.4214074",
"0.4212386",
"0.42120156",
"0.42119342",
"0.42073113",
"0.42005956",
"0.41889936",
"0.4188251",
"0.41723165",
"0.41694856",
"0.41667947",
"0.4164088",
"0.41575468",
"0.41562164",
"0.41495648",
"0.41456017",
"0.41418898",
"0.41366568",
"0.41320843",
"0.41277435",
"0.41231176",
"0.41158292",
"0.41126123",
"0.40991908",
"0.4097771",
"0.40955725",
"0.40936783",
"0.40849495",
"0.4081236",
"0.40804368",
"0.40718383",
"0.40655258",
"0.40646",
"0.4063617",
"0.40606838",
"0.40589282",
"0.40423286",
"0.40385062",
"0.40301302",
"0.40296748",
"0.40286422",
"0.40257847",
"0.4024954",
"0.402481",
"0.40237996",
"0.40228766",
"0.40177095",
"0.40166622",
"0.40161842",
"0.40145105",
"0.40145105",
"0.40039957",
"0.40039197",
"0.39972782",
"0.3987539",
"0.39824167",
"0.3980265",
"0.39778712",
"0.39759997",
"0.39732352",
"0.39637613",
"0.39632276",
"0.39586884",
"0.39538777",
"0.39538196",
"0.39531246",
"0.39503223",
"0.39500585",
"0.39452192",
"0.39446673"
] | 0.65023 | 0 |
Attach a text label above each bar in rects, displaying its height. | def autolabel(rects):
for rect in rects:
height = rect.get_height()
ax.annotate('{}'.format(height),
xy=(rect.get_x() + rect.get_width() / 2, height),
xytext=(0, 3), # 3 points vertical offset
textcoords="offset points",
ha='center', va='bottom') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def autolabel(rects, ax):\n global BAR_NUMBER_SIZE\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2.,\n height,\n str(round(height, 1)),\n ha ='center',\n va ='bottom',\n size = 8)",
"def autolabel(rects): #source: [.........]\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2., 1.05*height,\n '%d' % height.round(1),\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2., height,\n \"{:.3f}\".format(height),\n ha='center', va='bottom')",
"def autolabel_bar(ax, rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2, height,\n '%.2f' % rect.get_height(),\n ha='center', va='bottom', weight='bold', size='xx-small')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n # ax.text(rect.get_x() + rect.get_width() / 2., 1.05 * height,\n # '%d' % int(height),\n # ha='center', va='bottom')",
"def attach_text_labels(rects, axes):\n\n for rect in rects:\n height = rect.get_height()\n label = \"{}\".format(height)\n axes.annotate(label,\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3),\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects, text, extra_height=0):\n for index, rect in enumerate(rects):\n\n height = rect.get_height()\n if extra_height != 0 and index == 2:\n extra_height = 0.5\n if extra_height != 0 and index == 0:\n extra_height = 2.5\n\n plt.text(rect.get_x() + rect.get_width() / 2., height + 4 + extra_height,\n text,\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n # ax.text(rect.get_x() + rect.get_width() / 2., 1.22 * height,\n # '%d' % int(height),\n # ha='center', va='bottom')",
"def autolabel(rects):\r\n for rect in rects:\r\n height = rect.get_height()\r\n ax.text(rect.get_x() + rect.get_width()/2., 1.0*height, '%d' % int(height), ha='center', va='bottom')",
"def autolabel(rects, ax):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()*0.5, 0.25*height,\n '%.3g' % height,\n ha='center', va='bottom')",
"def AutoLabel(rects, ax):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2., 1.05*height,\n \"%d \" % int(height),\n ha=\"center\", va=\"bottom\")",
"def __autolabel(ax, rects):\n for rect in rects:\n height = rect.get_height()\n if math.isnan(height):\n continue\n w = rect.get_x() + rect.get_width()/2.\n ax.text(w, 1.05*height,\n '%d' % int(height),\n ha='center', va='bottom', fontsize='x-large')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2., 1.05*height,\n '%.2f' % height,\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2., height + 5,\n '%d' % int(height),\n ha='center', va='bottom',\n rotation=\"vertical\", fontsize=6)",
"def autolabel(rects, ax):\r\n for rect in rects:\r\n height = rect.get_height()\r\n ax.text(rect.get_x() + rect.get_width()/2, (height+0.2),\r\n '%.1f' % height,\r\n ha='center', va='bottom', fontsize=12)",
"def autolabel(rects):\r\n for rect in rects:\r\n height = rect.get_height()\r\n ax.text(rect.get_x() + rect.get_width()/2., 1.05*height,\r\n '%d' % int(height),\r\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2., height+30,\n '%d%s' % (int(height),'%'),ha='center', va='bottom',size='smaller',color='k')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2., 1.05*height,\n '%d' % int(height),\n ha='center', va='bottom')",
"def autolabel(ax, rects):\n for rect in rects:\n height = rect.get_height()\n if height > 90:\n factor_text = 0.8\n else:\n factor_text = 1.05\n ax.text(\n rect.get_x() + rect.get_width() / 2.0,\n (factor_text * height),\n f\"{height}\",\n ha=\"center\",\n va=\"bottom\",\n fontsize=32,\n )",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2., 1.05*height,\n '%d' % int(height),\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n plt.text(rect.get_x() + rect.get_width()/2., 1*height,\n '%d' % int(height),\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(\n rect.get_x() + rect.get_width() / 2.,\n 1.005 * height,\n '%.1f' % height,\n ha='center',\n va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n plt.text(rect.get_x() + rect.get_width() / 2., 1.0 * height,\n '%d' % int(height),\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n height = np.round(height, 3)\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom',\n fontsize=20)",
"def autolabel(rects):\n\t for rect in rects:\n\t\theight = rect.get_height()\n\t\tax.text(rect.get_x() + rect.get_width()/2., 1.01*height,\n\t\t '%d' % int(height),\n\t\t ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n pyplot.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, -75), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom', rotation=90)",
"def autolabel_heights(self, ax, rects, rotation: int = 0):\n y_offset = 3 if rotation == 0 else 10\n for rect in rects:\n height = rect.get_height()\n if height == 0:\n continue\n\n ax.annotate(\n '{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, y_offset), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center',\n va='bottom',\n rotation=rotation)",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate(\"%.2f\"%(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom', fontsize=7)",
"def autolabel(ax, rects):\n # attach some text labels\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2., 1.05*height,\n '%.3f' % height,\n ha='center', va='bottom')",
"def autolabel(ax, bars):\n for bar in bars:\n height = bar.get_height()\n ax.annotate('{:.1f}'.format(height),\n xy=(bar.get_x() + bar.get_width() / 3, height),\n xytext=(5, 5), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom',\n fontsize=20)",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(np.round(height, 2)),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = float(\"%.3f\" % (rect.get_height()))\n ax.annotate(\n \"{}\".format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha=\"center\",\n va=\"bottom\",\n )",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{0:.2f}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects, ylim):\n for rect in rects:\n height = rect.get_height()\n label_y = 1.05 * height if 1.10 * height < ylim else 0.75 * ylim\n ax.text(rect.get_x() + rect.get_width()/2., label_y,\n '%d' % int(height),\n ha='center', va='bottom',\n rotation='vertical')",
"def autolabel(rects):\r\n for rect in rects:\r\n height = rect.get_height()\r\n ax.annotate('{}'.format(height),\r\n xy=(rect.get_x() + rect.get_width() / 2, height),\r\n xytext=(0, 3), # 3 points vertical offset\r\n textcoords=\"offset points\",\r\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax3.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 2), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects, ax, c):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2., height,\n \"%.2f\" % (height),\n ha='center', va='top', color = c)",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax5.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 2), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(np.around(height,2)),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\r\n for rect in rects:\r\n height = rect.get_height()\r\n ax.annotate('{}'.format(height),\r\n xy=(rect.get_x() + rect.get_width() / 2, height),\r\n xytext=(0, 3), # 3 points vertical offset\r\n textcoords=\"offset points\",\r\n ha='center', va='bottom')",
"def autolabel(rects):\r\n for rect in rects:\r\n height = rect.get_height()\r\n ax.annotate('{}'.format(height),\r\n xy=(rect.get_x() + rect.get_width() / 2, height),\r\n xytext=(0, 3), # 3 points vertical offset\r\n textcoords=\"offset points\",\r\n ha='center', va='bottom')",
"def autolabel(rects):\r\n for rect in rects:\r\n height = rect.get_height()\r\n ax.annotate('{}'.format(height),\r\n xy=(rect.get_x() + rect.get_width() / 2, height),\r\n xytext=(0, 3), # 3 points vertical offset\r\n textcoords=\"offset points\",\r\n ha='center', va='bottom')",
"def autolabel(rects,ax):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate(\n \"{}\".format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha=\"center\",\n va=\"bottom\",\n )",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate(\n \"{}\".format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha=\"center\",\n va=\"bottom\",\n )",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{:.1f}'.format(height/1e9),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 3, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects, ax):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords='offset points',\n ha='center', va='bottom', color='white', size=10)",
"def autolabel(rects, ax):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects, ax):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(ax, bars):\n for bar in bars:\n height = bar.get_height()\n ax.annotate(\n \"{:.1f}\".format(height),\n xy=(bar.get_x() + bar.get_width() / 2, height),\n # 3 points vertical offset\n xytext=(0, 3),\n textcoords=\"offset points\",\n ha=\"center\",\n va=\"bottom\",\n )",
"def autolabel(ax, rects, thresh):\n for rect in rects:\n height = rect.get_height()\n width = rect.get_width()\n if height > thresh:\n color = \"green\"\n else:\n color = \"black\"\n\n if height != 0:\n ax.text(\n rect.get_x() + width / 2.,\n width + 1. * height,\n \"%d\" % int(height),\n ha=\"center\",\n va=\"bottom\",\n color=color,\n size=14,\n )\n return ax",
"def autolabel(rects):\n for rect in rects:\n\n height = rect.get_height()\n cof=0\n if(height<0):\n cof=(min_y/100.0)*5\n ax.annotate(\"%.2f\"%(height),\n xy=(rect.get_x() + rect.get_width() / 2, height+cof),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom', fontsize=7)",
"def autolabel(rects):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3),\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(ax, rects, vals, fsize):\n for i in range(len(rects)):\n rect = rects[i]\n val = vals[i]\n# for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2., 1.0*height,\n '%d' % int(val), fontsize=fsize,\n ha='center', va='bottom')",
"def autolabel(rects,ax):\r\n for rect in rects:\r\n height = rect.get_height()\r\n ax.annotate('{}'.format(height),\r\n xy=(rect.get_x() + rect.get_width() / 2, height),\r\n xytext=(0, 2), \r\n textcoords=\"offset points\",\r\n ha='center', va='bottom', rotation=0)",
"def autolabel(self, rects, counts):\n for x, rect in zip(counts, rects):\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()/2., height,\n 'n = ' + str(x),\n ha='center', va='bottom')",
"def autolabel(ps):\n for rect in ps:\n height = np.round(rect.get_height(), 2)\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects,ax,total_count=None,step=1,):\n for index in np.arange(len(rects),step=step):\n rect = rects[index]\n height = rect.get_height()\n # print height\n if not total_count is None:\n ax.text(rect.get_x() + rect.get_width()/2., 1.005*height,\n '{:}\\n({:.6f})'.format(int(height),height/float(total_count)),\n ha='center', va='bottom')\n else:\n ax.text(rect.get_x() + rect.get_width()/2., 1.005*height,\n '{:}'.format(int(height)),\n ha='center', va='bottom')",
"def autolabel(fig, rects):\n for rect in rects:\n height = rect.get_height()\n fig.annotate('%.2f' % height,\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects):",
"def autolabel(rects, ax, fprop=None):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{:.2}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom', fontproperties=fprop)",
"def autolabel(rects, ax, fmt='{}'):\n for rect in rects:\n height = rect.get_height()\n ax.annotate(fmt.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, abs(height)),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects, ax):\n # Get y-axis height to calculate label position from.\n (y_bottom, y_top) = ax.get_ylim()\n y_height = y_top - y_bottom\n\n for rect in rects:\n height = rect.get_height()\n label_position = height + (y_height * 0.01)\n\n ax.text(rect.get_x() + rect.get_width() / 2., label_position,\n '%d' % int(height),\n ha='center', va='bottom')",
"def autolabel(rects, scale):\n\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(round(height * scale, 0)/scale),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects, ax, fontsize=12):\n #for times new roman fonts, see: https://stackoverflow.com/questions/33955900/matplotlib-times-new-roman-appears-bold\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{:.2f}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n zorder=100,\n ha='center', va='bottom', fontname=\"Times New Roman\", fontsize=fontsize)",
"def autolabel(rects, ax, offset=0):\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2+offset, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel_horizontal(rects,ax):\n for rect in rects:\n width = rect.get_width()\n ax.text(rect.get_x() + rect.get_width()+3, rect.get_y() + rect.get_height()/2.,\n '%.2f' % width,\n ha='center', va='center', color='black', fontsize=15)",
"def autolabel(rects, r, p):\n for j in range(len(rects)):\n rect = rects[j]\n height = rect.get_width()\n # print(\"height: \", height)\n ax.annotate( \"F1: \" + '{}'.format(height) + \" (P: \" + str(p[j]) + \"; R: \" + str(r[j]) + \")\",\n xy=(height, rect.get_y() + rect.get_height() / 2),\n xytext=(90, -9), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom', fontsize=15)",
"def autolabel(rects):\n #for rect in rects:\n for i in range(len(rects)):\n rect = rects[i]\n height = rect.get_height()\n ax.annotate('{}'.format(('%.2f' % (height)) + '% of\\n' + ('%d' % range_data[i].shape[0]) + ' people' ),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom')",
"def autolabel(rects, rotation=0):\n ax = plt.gca()\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, 0),\n xytext=(0, 3), # 3 points vertical offset\n textcoords=\"offset points\",\n ha='center', va='bottom', color=(1,1,1),\n rotation=rotation)",
"def autolabel(X_pos,values,height_lift):\r\n\theight= np.round(np.nan_to_num(values),2);y_pos = height_lift*height\r\n\tfor i in range(len(height)):\r\n\t\tax.text(X_pos[i],y_pos[i],'%4.2f' % height[i], ha='center', va='bottom',size=4)",
"def autolabel(rects,array,axis,dist):\n ctr = 0\n label_array = [EM.truncate(v*100,1) for v in array]\n for entry in range(len(label_array)):\n if(label_array[entry]>=0) and (label_array[entry]<=1):\n label_array[entry] = EM.truncate(array[entry]*100,2)\n\n\n for rect in rects:\n height = rect.get_height()\n if(axis=='1'):\n ax1.text(rect.get_x() + rect.get_width()/2., height+dist,\n label_array[ctr],fontsize=fonts[3],\n #'%d' % int(height),\n ha='center', va='bottom',rotation=90)\n elif(axis=='2'):\n ax2.text(rect.get_x() + rect.get_width()/2., height+dist,\n label_array[ctr],fontsize=fonts[3],\n #'%d' % int(height),\n ha='center', va='bottom',rotation=90)\n elif(axis=='3'):\n ax3.text(rect.get_x() + rect.get_width()/2., height+dist,\n label_array[ctr],fontsize=fonts[3],\n #'%d' % int(height),\n ha='center', va='bottom',rotation=90)\n elif(axis=='4'):\n ax4.text(rect.get_x() + rect.get_width()/2., height+dist,\n label_array[ctr],fontsize=fonts[3],\n #'%d' % int(height),\n ha='center', va='bottom',rotation=90)\n ctr = ctr + 1",
"def autolabel(rects, n, add_value=[]):\n if rects.__len__() == add_value.__len__() and abs_val_legend:\n for rect, val in zip(rects, add_value):\n height = rect.get_height()\n if not (np.isnan(height) or height == 0):\n ax.text(rect.get_x() + rect.get_width()/2., 1.03 * height,\n ('%1.' + str(n) + 'f') % height + '\\n' + val + '',\n ha='center', va='bottom')\n else:\n for rect in rects:\n height = rect.get_height()\n if not (np.isnan(height) or height == 0):\n ax.text(rect.get_x() + rect.get_width()/2., 1.07* height,\n ('%1.' + str(n) + 'f') % height,\n ha='center', va='bottom')",
"def add_value_labels(ax, spacing=5):\n\n # For each bar: Place a label\n for rect in ax.patches:\n # Get X and Y placement of label from rect.\n y_value = rect.get_height()\n x_value = rect.get_x() + rect.get_width() / 2\n\n # Number of points between bar and label. Change to your liking.\n space = spacing\n # Vertical alignment for positive values\n va = 'bottom'\n\n # If value of bar is negative: Place label below bar\n if y_value < 0:\n # Invert space to place label below\n space *= -1\n # Vertically align label at top\n va = 'top'\n\n # Use Y value as label and format number with one decimal place\n label = \"{:.1f}\".format(y_value)\n\n # Create annotation\n ax.annotate(\n label, # Use `label` as label\n (x_value, y_value), # Place label at end of the bar\n xytext=(0, space), # Vertically shift label by `space`\n textcoords=\"offset points\", # Interpret `xytext` as offset in points\n ha='center', # Horizontally center label\n va=va,fontsize=10, weight='bold') # Vertically align label differently for\n # positive and negative values.",
"def autolabel(ax, rects, xpos='center'):\n\n xpos = xpos.lower() # normalize the case of the parameter\n ha = {'center': 'center', 'right': 'left', 'left': 'right'}\n offset = {'center': 0.5, 'right': 0.57, 'left': 0.43} # x_txt = x + w*off\n\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()*offset[xpos], 1.01*height,\n '{}'.format(height), ha=ha[xpos], va='bottom')",
"def autolabel(rects, xpos='center'):\n\n xpos = xpos.lower() # normalize the case of the parameter\n ha = {'center': 'center', 'right': 'left', 'left': 'right'}\n offset = {'center': 0.5, 'right': 0.57, 'left': 0.43} # x_txt = x + w*off\n\n for rect in rects:\n \n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()*offset[xpos], 1.01*height,\n '{}'.format(height), ha=ha[xpos], va='bottom')",
"def autolabel(rects, xpos='center'):\n\n ha = {'center': 'center', 'right': 'left', 'left': 'right'}\n offset = {'center': 0, 'right': 1, 'left': -1}\n\n for rect in rects:\n height = rect.get_height().round(2)\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(offset[xpos]*3, 3), \n textcoords=\"offset points\", \n ha=ha[xpos], va='bottom', fontsize=14)",
"def autolabel(rects, ax, format='{}', xpos='center'):\n\n xpos = xpos.lower() # normalize the case of the parameter\n ha = {'center': 'center', 'right': 'left', 'left': 'right'}\n offset = {'center': 0.5, 'right': 0.57, 'left': 0.43} # x_txt = x + w*off\n\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()*offset[xpos], 1.01*height,\n format.format(height), ha=ha[xpos], va='bottom')",
"def autolabel(rects, xpos='center'):\n\n xpos = xpos.lower() # normalize the case of the parameter\n ha = {'center': 'center', 'right': 'left', 'left': 'right'}\n offset = {'center': 0.5, 'right': 0.57, 'left': 0.43} # x_txt = x + w*off\n\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width()*offset[xpos], 1.01*height,\n '{}'.format(height), ha=ha[xpos], va='bottom')",
"def add_annotations(self):\n for i in range(8):\n self.text.append(self.canvas.create_text(-self.width / 2,\n (self.width / 2) + (i * self.width),\n font=(\"Purisa\", 12), anchor=\"nw\"))\n self.canvas.itemconfig(self.text[i], text=str((i - 8) * -1))\n for i in range(8):\n self.text.append(self.canvas.create_text((self.width / 2) + (i * self.width),\n self.width * 8 + 10, font=(\"Purisa\", 12), anchor=\"nw\"))\n self.canvas.itemconfig(self.text[i + 8], text=string.ascii_lowercase[i])",
"def autolabel(rects, xpos='center'):\n\n ha = {'center': 'center', 'right': 'left', 'left': 'right'}\n offset = {'center': 0, 'right': 1, 'left': -1}\n\n for rect in rects:\n height = rect.get_height()\n ax.annotate('{}'.format(height),\n xy=(rect.get_x() + rect.get_width() / 2, height),\n xytext=(offset[xpos]*2, 2), # use 3 points offset\n textcoords=\"offset points\", # in both directions\n ha=ha[xpos], va='bottom')",
"def getVerticalLabels(labels, font, textGap):\n\n maxWidth = 0\n height = 0\n textHeight = font.getsize(\"testq\")[1]\n for label in labels:\n maxWidth = max(maxWidth, font.getsize(label)[0])\n if height > 0: height += textGap\n height += textHeight\n size = (maxWidth, height)\n textCanvas = Image.new(\"RGB\", size, WHITE)\n textdraw = ImageDraw.Draw(textCanvas)\n py = 0\n for label in labels:\n indent = (maxWidth - font.getsize(label)[0]) / 2\n textdraw.text((indent, py), label, font=font, fill=(0,0,0))\n py += textHeight + textGap\n return textCanvas.rotate(90)",
"def drawUI(self):\n cv2.rectangle(self.root, (0, self.height - 80), (self.width, self.height), (50, 50, 50), -1) # bar\n cv2.putText(self.root, 'Zavri s Q...', (20, self.height - 25), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 1, cv2.LINE_AA)",
"def barPlot2():\n n = 10\n X = np.arange(n)\n Y1 = (1-X/float(n)) * np.random.uniform(0.5,1.0,n)\n plt.bar(X, +Y1, facecolor='#9999ff', edgecolor='white')\n\n for x,y in zip(X,Y1):\n plt.text(x+0.2, y+0.05, '%.2f' % y, ha='center', va= 'bottom')\n\n plt.ylim(0,1.25)\n plt.show()",
"def autolabel(rects, ax, model_op, xpos='center', ypos = 'up'):\n\n xpos = xpos.lower() # normalize the case of the parameter\n ypos = ypos.lower()\n va = {'center': 'center', 'top': 'bottom', 'bottom': 'top'}\n ha = {'center': 'center', 'left': 'right', 'right': 'left'}\n offset = {'center': 0.5, 'top': 0.57, 'bottom': 0.43} # x_txt = x + w*off\n\n for rect, std in zip(rects, model_op['std'].values):\n width = rect.get_width()\n ax.text(1.01 * width, rect.get_y() + rect.get_height() * offset[ypos],\n '{0:.2f}'.format(round(width,2)) + u'\\u00b1' + '{0:.2f}'.format(round(std,2)),\n va=va[ypos], ha=ha[xpos], rotation=0)",
"def stat_display_labels(parent, text, labels, row=0, column=0, columnspan=1):\n\n frame = tk.LabelFrame(parent, text=text, padx=5, pady=5)\n frame.grid(\n row=row, column=column, padx=5, pady=5, sticky=\"w\", columnspan=columnspan\n )\n stats_label = tk.Label(frame, text=\"\\n\".join(labels), justify=\"right\")\n stats_label.grid(row=0, column=0)\n return frame",
"def draw_bar(t, height):\n t.begin_fill() # Added this line\n t.left(90)\n t.forward(height)\n # t.write(\" \"+ str(height))\n t.right(90)\n t.forward(10)\n t.right(90)\n t.forward(height)\n t.left(90)\n t.end_fill() # Added this line\n t.forward(10)",
"def subplotLabel(axs):\n for ii, ax in enumerate(axs):\n ax.text(-0.2, 1.2, ascii_uppercase[ii], transform=ax.transAxes, fontsize=16, fontweight=\"bold\", va=\"top\")"
] | [
"0.807113",
"0.79336554",
"0.7861467",
"0.7835557",
"0.7822906",
"0.77769476",
"0.7776375",
"0.7745001",
"0.7730597",
"0.7730573",
"0.7708679",
"0.77040344",
"0.7690377",
"0.7677892",
"0.76754576",
"0.76730984",
"0.76562864",
"0.7648832",
"0.7645011",
"0.7640306",
"0.76307166",
"0.76046956",
"0.75935924",
"0.7584293",
"0.7583782",
"0.7559714",
"0.7480944",
"0.74619603",
"0.7454694",
"0.74375594",
"0.7421532",
"0.7419154",
"0.74096423",
"0.74053675",
"0.7395104",
"0.73941106",
"0.73910624",
"0.7385536",
"0.73785794",
"0.737723",
"0.73654854",
"0.73654854",
"0.73654854",
"0.7360457",
"0.73544943",
"0.73544943",
"0.73536825",
"0.7351217",
"0.7351217",
"0.7351217",
"0.7351217",
"0.7351217",
"0.7351217",
"0.7351217",
"0.73424345",
"0.7333629",
"0.7326828",
"0.7326828",
"0.7316663",
"0.7298067",
"0.7276161",
"0.7274236",
"0.72647935",
"0.7260917",
"0.7219177",
"0.7195712",
"0.7195695",
"0.7195655",
"0.7145975",
"0.7144318",
"0.71108353",
"0.7109291",
"0.7043364",
"0.6981907",
"0.6960407",
"0.69353414",
"0.69238454",
"0.69028556",
"0.6836462",
"0.66919726",
"0.6654711",
"0.65831035",
"0.6577919",
"0.63006145",
"0.6241199",
"0.61822736",
"0.6153809",
"0.6148378",
"0.61444855",
"0.6140762",
"0.60102195",
"0.59365153",
"0.5921276",
"0.59200263",
"0.5911303",
"0.590813",
"0.5906624"
] | 0.7365519 | 43 |
A greenlet for handling a single client. | def _process_socket(self, client):
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException:
pass
except:
log.error(traceback.format_exc())
itrans.close()
otrans.close() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def client():",
"def handle_client(self):\n e = threading.Event()\n reg_t = threading.Thread(target=self.handle_reg_client, args=(e,))\n stream_t = threading.Thread(target=self.handle_stream_client,\n args=(e,))\n reg_t.start()\n stream_t.start()",
"def client():\n\n client = Client()\n return client",
"def _accept_client(self, client_reader, client_writer):\n\n print(\"New client\", client_reader, client_writer)\n # start a new Task to handle this specific client connection\n task = asyncio.Task(self._handle_client(client_reader, client_writer))\n self.clients[task] = (client_reader, client_writer)\n\n def client_done(task):\n print(\"client task done:\", task, file=sys.stderr)\n del self.clients[task]\n\n task.add_done_callback(client_done)",
"async def start_client(self) -> None:\n\n if hasattr(Config().algorithm,\n 'cross_silo') and not Config().is_edge_server():\n # Contact one of the edge servers\n logging.info(\"[Client #%d] Contacting Edge server #%d.\",\n self.client_id, self.edge_server_id)\n else:\n logging.info(\"[Client #%d] Contacting the central server.\",\n self.client_id)\n uri = 'ws://{}:{}'.format(Config().server.address, self.server_port)\n\n try:\n async with websockets.connect(uri,\n ping_interval=None,\n max_size=2**30) as websocket:\n logging.info(\"[Client #%d] Signing in at the server.\",\n self.client_id)\n\n await websocket.send(pickle.dumps({'id': self.client_id}))\n\n while True:\n logging.info(\"[Client #%d] Waiting to be selected.\",\n self.client_id)\n server_response = await websocket.recv()\n data = pickle.loads(server_response)\n\n if data['id'] == self.client_id:\n self.process_server_response(data)\n logging.info(\"[Client #%d] Selected by the server.\",\n self.client_id)\n\n if not self.data_loaded:\n self.load_data()\n\n if 'payload' in data:\n server_payload = await self.recv(\n self.client_id, data, websocket)\n self.load_payload(server_payload)\n\n heartbeat_proc = Process(\n target=Client.heartbeat_process,\n args=(self.client_id, ))\n heartbeat_proc.start()\n report, payload = await self.train()\n heartbeat_proc.terminate()\n\n if Config().is_edge_server():\n logging.info(\n \"[Server #%d] Model aggregated on edge server (client #%d).\",\n os.getpid(), self.client_id)\n else:\n logging.info(\"[Client #%d] Model trained.\",\n self.client_id)\n\n # Sending the client report as metadata to the server (payload to follow)\n client_report = {\n 'id': self.client_id,\n 'report': report,\n 'payload': True\n }\n await websocket.send(pickle.dumps(client_report))\n\n # Sending the client training payload to the server\n await self.send(websocket, payload)\n\n except OSError as exception:\n logging.info(\"[Client #%d] Connection to the server failed.\",\n self.client_id)\n logging.error(exception)",
"def client(self):\n raise NotImplementedError()",
"def clients():\n pass",
"def serveThread(self):\r\n while True:\r\n try:\r\n client = self.clients.get()\r\n self.serveClient(client)\r\n except Exception, x:\r\n logging.exception(x)",
"def client():\n yield tests.example_server.app.test_client()",
"def _dummy_client(self):\n logger.warning('Running dummy client for task #%s',\n self.task_data.get('task_id', 0))\n Client(LISTENER_ADDRESS).close()",
"def client(self):\n return self._thread._client",
"def client(clients: int) -> None:\n from DLA.server.client import run_clients\n asyncio.run(run_clients(clients))",
"def client():\n _, p, _ = docker_run_etcd_main()\n c = Client(host, p, protocol)\n yield c\n c.close()",
"def handle_reg_client(self, event):\n try:\n while True:\n client_req = self.receive_msg()\n self.choose_action(client_req[ZERO], client_req[ONE:], event)\n except socket.error as e:\n print(e)",
"def client(self):\n\t\t# pylint: disable=invalid-name\n\t\treturn self._client",
"def sanic_client(loop):\n clients = []\n\n async def create_client(app, **kwargs):\n client = TestClient(app, **kwargs)\n await client.start_server()\n clients.append(client)\n return client\n\n yield create_client\n\n # Clean up\n if clients:\n for client in clients:\n loop.run_until_complete(client.close())",
"def handle_client(client): # Takes client socket as argument.\r\n name = client.recv(BUFSIZ).decode(\"utf8\")\r\n welcome = 'Welcome %s! If you ever want to quit, type {quit} to exit.' % name\r\n client.send(bytes(welcome, \"utf8\"))\r\n msg = \"%s has joined the chat!\" % name\r\n broadcast(bytes(msg, \"utf8\"))\r\n clients[client] = name\r\n while True:\r\n msg = client.recv(BUFSIZ)\r\n if msg != bytes(\"{quit}\", \"utf8\"):\r\n broadcast(msg, name+\": \")\r\n else:\r\n client.send(bytes(\"{quit}\", \"utf8\"))\r\n client.close()\r\n del clients[client]\r\n broadcast(bytes(\"%s has left the chat.\" % name, \"utf8\"))\r\n break",
"def client(self, msg, *args, **kwargs):\r\n return log(self.CLIENT, msg, *args, **kwargs)",
"def handle_client(client, name): # Takes client socket as argument.\n\n welcome = 'Welcome %s! If you ever want to quit, type {quit} to exit.' % name\n client.send(bytes(welcome, \"utf8\"))\n msg = \"%s has joined the chat!\" % name\n broadcast(bytes(msg, \"utf8\"))\n clients[client] = name\n\n while True:\n msg = client.recv(BUFSIZ)\n if msg != bytes(\"{quit}\", \"utf8\"):\n broadcast(msg, name+\": \")\n else:\n client.send(bytes(\"{quit}\", \"utf8\"))\n client.close()\n del clients[client]\n broadcast(bytes(\"%s has left the chat.\" % name, \"utf8\"))\n break",
"def client_request(self, evt):\n threads.deferToThread(self.cli_db.accept, evt)",
"def listen_for_client(self):\n #PART 2:LISTEN FOR CLIENT We wait for the clients connection request and once a\n #successful connection is made we dispatch the request in a separate thread,\n #making ourselves available for the next request.\n #This allows us to handle multiple requests simultaneously which boosts the performance of the \n #server multifold times. -> we need a function for threading and to get client name!!!\n\n\n while True:\n (clientSocket, client_address) = self.serverSocket.accept() # Establish the connection\n d = threading.Thread(name=self._getClientName(client_address), target=self.proxy_thread, args=(clientSocket, client_address))\n d.setDaemon(True)\n d.start()\n self.shutdown(0,0)",
"def client(self, reactor, serverAddress):\n raise NotImplementedError()",
"def run_client(self, event_loop, irc_client):\n # Deliberately written in \"synchronous\" style with run_until_complete()\n # instead of await because async generators don't work in Python 3.5.\n with self.mock_open_connection():\n # Start the client\n run_fut = event_loop.create_task(irc_client.run())\n event_loop.run_until_complete(irc_client.connected.wait())\n # Allow the test to run\n yield\n # Cleanly end the read loop and wait for client to exit\n irc_client.disconnect()\n event_loop.run_until_complete(run_fut)",
"def handle_client(client): # Takes client socket as argument.\n\n name = client.recv(BUFSIZ).decode(\"utf8\")\n welcome = 'Welcome %s! If you ever want to quit, type {quit} to exit.' % name\n client.send(bytes(welcome))\n msg = \"%s has joined the chat!\" % name\n broadcast(bytes(msg))\n clients[client] = name\n\n while True:\n msg = client.recv(BUFSIZ)\n if msg == bytes(\"{quit}\"):\n client.send(bytes(\"{quit}\"))\n client.close()\n del clients[client]\n broadcast(bytes(\"%s has left the chat.\" % name))\n break\n elif msg[0:7] == bytes(\"{emoji}\"):\n broadcast(msg, \"\")\n else:\n broadcast(msg, name + \": \")",
"def Client(self) -> Socket:",
"def Client(self) -> Socket:",
"def client(sandbox):\n sandbox.add_node(0)\n client = sandbox.client(0)\n yield client",
"def client(self):\n return self._client",
"def handle_client(client): # Takes client socket as argument.\n\tname = client.recv(2048).decode(\"utf8\")\n\twelcome = 'Welcome %s! Enter {quit} to exit.' % name\n\ttry:\n\t\tclient.send(bytes(welcome, \"utf8\"))\n\t\tmsg = \"%s: has joined the chat!\" % name\n\t\tbroadcast(bytes(msg, \"utf8\"))\n\t\tclients[client] = name\n\t\ttemp_client = {'Address':addresses[client],'Name':clients[client]}\n\t\tactive.append(temp_client)\n\t\tbroadcast(bytes(str(active),'utf-8'))\n\t\twhile True:\n\t\t\tmsg = client.recv(2048)\n\t\t\ttry:\n\t\t\t\tif '(' in msg.decode('utf-8') and ')' in msg.decode('utf-8'):\n\t\t\t\t\ttemp = msg.decode('utf-8').split(')')\n\t\t\t\t\taddress = temp[0] + ')'\n\t\t\t\t\tprivate_message(address,temp[1])\n\t\t\t\telif msg != bytes(\"{quit}\", \"utf8\"):\n\t\t\t\t\tbroadcast(msg, \"<global>\" + name + \": \")\n\t\t\t\t\tprint(client)\n\t\t\t\telse:\n\t\t\t\t\t#client.send(bytes(\"{quit}\", \"utf8\"))\n\t\t\t\t\tclient.close()\n\t\t\t\t\tactive.remove({'Address':addresses[client],'Name':clients[client]})\n\t\t\t\t\tdel clients[client]\n\t\t\t\t\tbroadcast(bytes(\"%s has left the chat.\" % name, \"utf8\"))\n\t\t\t\t\tbroadcast(bytes(str(active),'utf-8'))\n\t\t\t\t\tbreak\n\t\t\texcept:\n\t\t\t\tprint(msg)\n\t\t\t\tbroadcast_file(msg)\n\texcept Exception as e:\n\t\tprint(e)",
"def handle_accept(self):\r\n pair = self.accept()\r\n if pair is not None:\r\n sock, addr = pair\r\n server_log.info('Client connection from {}, assigning client id {}'.format(repr(addr), self.client_id))\r\n handler = ClientHandler(sock, addr, self.client_id)\r\n self.client_list.update({self.client_id: handler})\r\n self.client_id += 1",
"def server(conn, address):\n print(\"Client Connection Open\")\n while True:\n request = server_read(conn)\n if request:\n print(request)\n manage_client(request, conn)",
"def handle_client(self,conn,addr):\n print(f\"[NEW CONNECTION] {addr} connected\")\n client_id = \"\"\n connected = True\n while connected:\n try:\n try:\n msg_length = conn.recv(PREFIX).decode(FORMAT)\n except:\n print(f\"[{addr}] DISCONNECTED\")\n self.handle_unexpected_disconnect(client_id,conn)\n return\n\n if msg_length:\n try:\n msg_length = int(msg_length)\n try:\n raw_msg = conn.recv(msg_length).decode(FORMAT)\n except:\n print(f\"[{addr}] DISCONNECTED\")\n self.handle_unexpected_disconnect(client_id,conn)\n return\n message = json.loads(raw_msg)\n except ValueError:\n message = FAILURE_MESSAGE\n\n if message[\"HEADER\"] == DISCONNECT_MESSAGE:\n connected = False\n self.handle_disconnect(message,conn)\n\n elif message[\"HEADER\"] == \"CREATE\":\n session_id = \"\".join(random.choices(string.ascii_uppercase + string.digits, k = 4))\n indentifer = json.loads(message[\"MESSAGE\"])\n tokenDict = json.loads(indentifer[\"spotify_token\"])\n client_id = message[\"ID\"]\n self.create_session(session_id, message[\"ID\"], indentifer[\"display_name\"], tokenDict)\n self.add_connection_entry(message[\"ID\"], indentifer[\"display_name\"], session_id, True, conn, addr)\n self.create_spotify_player(session_id)\n if not self.sessions[session_id][\"HOST\"][\"spotify_player\"].is_spotify_running():\n self.send(\"STC\", client_id, \"PLEASE START SPOTIFY\")\n\n self.send(\"SESSION_ID\", client_id, str(session_id))\n\n elif message[\"HEADER\"] == \"GET_CURRENT_SONG\":\n player = self.get_session_player(self.get_session_from_user(message[\"ID\"]))\n if not self.sessions[session_id][\"HOST\"][\"spotify_player\"].is_spotify_running():\n self.send(\"STC\", client_id, \"PLEASE START SPOTIFY\")\n else:\n current_track = {}\n current_track[\"name\"] = player.sp.currently_playing()['item']['name']\n current_track[\"artist\"] = player.sp.currently_playing()['item']['album']['artists'][0]['name']\n track_json = json.dumps(current_track)\n self.send(\"CURRENT_SONG\", message[\"ID\"],track_json)\n\n elif message[\"HEADER\"] == \"SKIP\":\n player = self.get_session_player(self.get_session_from_user(message[\"ID\"]))\n session_id = self.get_session_from_user(message[\"ID\"])\n session_queue = self.get_session_queue(session_id)\n if len(session_queue) > 0:\n player.add_to_queue(session_queue[0][1])\n session_queue.pop(0)\n self.send_queue_update(session_id)\n player.next_track()\n\n elif message[\"HEADER\"] == \"REWIND\":\n player = self.get_session_player(self.get_session_from_user(message[\"ID\"]))\n player.previous_track()\n\n elif message[\"HEADER\"] == \"PLAY\":\n session_id = self.get_session_from_user(message[\"ID\"])\n player = self.get_session_player(self.get_session_from_user(message[\"ID\"]))\n player.toggle_playback()\n\n elif message[\"HEADER\"] == \"SEARCH\":\n player = self.get_session_player(self.get_session_from_user(message[\"ID\"]))\n song = message[\"MESSAGE\"]\n self.send(\"SEARCH_RESULTS\", message[\"ID\"], json.dumps(player.search(song)))\n\n\n\n\n elif message[\"HEADER\"] == \"ADD_TO_QUEUE\":\n track_data = json.loads(message[\"MESSAGE\"])\n self.add_to_session_queue(message[\"ID\"], (track_data[\"name\"],track_data['uri']))\n session_id = self.get_session_from_user(message[\"ID\"])\n\n\n elif message[\"HEADER\"] == \"QUEUE_UPDATE\":\n options = json.loads(message[\"MESSAGE\"])\n self.update_queue(message[\"ID\"],options)\n\n elif message[\"HEADER\"] == \"GET_USERS\":\n session_id = self.get_session_from_user(message[\"ID\"])\n users = self.sessions[session_id][\"USERS\"]\n self.send(\"USERS\", message[\"ID\"], json.dumps(users))\n\n elif message[\"HEADER\"] == \"SET_PERMISSION\":\n msg = json.loads(message[\"MESSAGE\"])\n session_id = self.get_session_from_user(message[\"ID\"])\n self.change_user_permissions(session_id, msg[\"client_id\"], msg[\"permission\"])\n new_permissions = {}\n new_permissions[\"permission\"] = msg[\"permission\"]\n new_permissions[\"value\"] = self.sessions[session_id][\"USERS\"][msg[\"client_id\"]][\"permissions\"][msg[\"permission\"]]\n self.send(\"PERMISSION_UPDATE\",msg[\"client_id\"], json.dumps(new_permissions))\n\n elif message[\"HEADER\"] == \"JOIN\":\n msg = json.loads(message[\"MESSAGE\"])\n session_id = msg[\"session_id\"]\n if session_id in self.sessions.keys():\n self.add_user_to_session(session_id,message[\"ID\"],msg[\"display_name\"])\n self.add_connection_entry(message[\"ID\"],msg[\"display_name\"],session_id, False, conn, addr)\n client_id = message[\"ID\"]\n\n session_info = {}\n session_info[\"session_id\"] = session_id\n session_info[\"host\"] = self.sessions[session_id][\"HOST\"][\"NAME\"]\n\n self.send(\"SESSION_INFO\", message[\"ID\"], json.dumps(session_info))\n self.send(\"QUEUE_UPDATE\", message[\"ID\"], json.dumps(self.get_session_queue(session_id)))\n self.broadcast_to_session(session_id,\"USERS\", json.dumps(self.sessions[session_id][\"USERS\"]))\n else:\n self.add_connection_entry(message[\"ID\"],msg[\"display_name\"],session_id, False, conn, addr)\n self.send(\"FAILURE\", message[\"ID\"], \"Session does not exist\")\n self.send(DISCONNECT_MESSAGE,message[\"ID\"],DISCONNECT_MESSAGE)\n self.delete_connection_entry(message[\"ID\"])\n break\n elif message[\"HEADER\"] == \"SET_PERMISSIONS\":\n msg = json.loads(message[\"MESSAGE\"])\n user_id = msg[\"client_id\"]\n permissions = json.loads(msg[\"permissions\"])\n for key in permissions.keys():\n self.set_permissions(user_id,key,permissions[key])\n self.print_sessions()\n\n elif message[\"HEADER\"] == \"BROADCAST_S\":\n session_id = self.connections[message[\"ID\"]][\"session_id\"]\n self.broadcast_to_session(session_id,\"BROADCAST_S\", message[\"MESSAGE\"])\n elif message[\"HEADER\"] == \"BROADCAST\":\n self.broadcast_to_all(\"BROADCAST\", message[\"MESSAGE\"])\n\n elif message[\"HEADER\"] == \"PLAYBACK\":\n session_id = self.connections[message[\"ID\"]][\"session_id\"]\n sp = self.sessions[session_id][\"HOST\"][\"spotify_player\"]\n if not sp.toggle_playback():\n self.broadcast_to_session(self.get_session_from_user(client_id), \"FAILURE\", \"Please Start Spotify\")\n\n else:\n print(message[\"MESSAGE\"])\n except Exception as ex:\n print(str(ex))\n\n print(\"Thread Closing\")",
"def handler(self):\n\t\tself.exitClient()",
"def Client(self):\n return self._client",
"def run(self):\n client = ProcessorClient()\n try:\n client.connect(self.address)\n except Exception as e:\n self.error = e\n logging.error(e)\n else:\n self.clients[self.name] = client",
"def client(self):\r\n if self._client is None:\r\n self._client = self._client_cls(self._server, self._params, self)\r\n return self._client",
"def createHandlerClientConnected(loop, distanthost, distantport):\n @asyncio.coroutine\n def handleClientConnected(client_reader, client_writer):\n \"\"\"The coroutine that will take care of one connection.\"\"\"\n loop.TOTAL_CONNECTIONS += 1\n addr = client_writer.get_extra_info('peername')\n print(\"Connected to\", addr, \"(%s pending connections)\" % loop.TOTAL_CONNECTIONS)\n distantreader, distantwriter = yield from asyncio.open_connection(distanthost, distantport, loop=loop)\n t1 = loop.create_task(transmitData(loop, distantreader, client_writer))\n t2 = loop.create_task(transmitData(loop, client_reader, distantwriter))\n transmitted1, transmitted2 = yield from asyncio.gather(t1, t2)\n loop.TOTAL_CONNECTIONS -= 1\n print(\"End of communication to\", addr, \"(%s pending connections)\" % loop.TOTAL_CONNECTIONS, \"(Transmitted: %s/%s)\" % (transmitted1, transmitted2))\n \n return handleClientConnected",
"def register(self, client):\n self.clients.append(client)",
"async def handle_client(self, reader: StreamReader, writer: StreamWriter):\n peername = writer.transport.get_extra_info(\"peername\")\n log.info(\"handle_client : %s\", peername)\n\n try:\n\n remote_host, remote_port, req = await parse_http_request_header(\n reader, writer\n )\n\n remote_reader, remote_writer = await asyncio.open_connection(\n remote_host, remote_port\n )\n if req:\n log.info(\"req: %s\", req)\n remote_writer.write(req)\n\n asyncio.create_task(http_channel(remote_reader, writer))\n\n asyncio.create_task(http_channel(reader, remote_writer))\n\n except Exception as ex:\n log.exception(ex)",
"def _handle_client(self, client_reader, client_writer):\n while True:\n data = (yield from client_reader.readline()).decode(\"utf-8\")\n if not data: # an empty string means the client disconnected\n break\n cmd, *args = data.rstrip().split(' ')\n if cmd == 'add':\n arg1 = float(args[0])\n arg2 = float(args[1])\n retval = arg1 + arg2\n client_writer.write(\"{!r}\\n\".format(retval).encode(\"utf-8\"))\n elif cmd == 'repeat':\n times = int(args[0])\n msg = args[1]\n client_writer.write(\"begin\\n\".encode(\"utf-8\"))\n for idx in range(times):\n client_writer.write(\"{}. {}\\n\".format(idx+1, msg)\n .encode(\"utf-8\"))\n client_writer.write(\"end\\n\".encode(\"utf-8\"))\n else:\n print(\"Bad command {!r}\".format(data), file=sys.stderr)\n\n # This enables us to have flow control in our connection.\n yield from client_writer.drain()",
"def attach_one_client(self, csocket):\n\n self._current_client = csocket\n self.next_seq = -1\n\n # manual requests\n\n self.enter_read_loop()",
"def uclient():\n return IceCubedSyncClient()",
"def client(self):\n\n if self._client is None:\n self._client = self._get_client()\n return self._client",
"def add_client(self, client):\n\n now = int(time.time())\n\n self.send_line(\"%s N %s 1 %d %s %s +ik ]]]]]] %s :%s\" %\\\n (self.config[\"numeric\"], client.nick, now, client.user,\n client.host, client.uid, client.gecos))",
"async def handle_new_client(self, reader, writer):\n\n log.debug(\"Got brand new client!\")\n\n self.fakeclient = Client(None, None, reader, writer)\n req = await read(self.fakeclient, 'uuid', 'username')\n self.fakeclient = None\n\n uuid = req['uuid']\n username = req['username']\n self.clients[uuid] = Client(username, PlayerPrivateStatus(), reader,\n writer)\n\n if self.state == 'waiting for owner response':\n # don't accept any request from players when a request has already\n # been send to the owner\n # So, we tell the player the owner's busy.\n log.debug(\"Send owner busy with request.\")\n await write(self.clients[uuid], {'kind': 'request state change',\n 'state': 'declined',\n 'reason': 'owner busy'})\n del self.clients[uuid]\n return\n\n if self.state == \"waiting for player\":\n # Here, we have a request from a player to join the onwer\n # the reader and the writer are the other player's, not the owner's\n log.debug(f\"Send requests infos to owner {uuid!r} {username!r}\")\n # send the uuid and username to the owner\n await write(self.clients[self.owneruuid], {'kind': 'new request',\n 'uuid': uuid,\n 'username': username})\n # feeds data because we were listening for nothing before\n # (not for nothing, just so that the server knows if the client\n # leaves)\n self.state = 'waiting for owner response'\n # wait for owner to reply\n res = await self.watch_owner\n log.debug(f\"Response from owner {res!r}\")\n # he said yes!\n if res['accepted'] is True:\n # to the client (the one that wanted to join)\n await write(self.clients[uuid], {\n 'kind': 'request state change',\n 'reason': None,\n 'accepted': True\n })\n return await self.hero_selection()\n else:\n if res['accepted'] is not False:\n log.error(\"Got unexpected value for response to request\"\n f\"{res['accepted']!r} (expecting a bool)\")\n self.state = 'waiting for player'\n await write(self.clients[uuid], {'kind': 'request state change',\n 'accepted': False,\n 'reason': 'owner declined'})\n del self.clients[uuid]\n # start all over again\n self.loop.create_task(self.handle_new_client(reader, writer))\n return\n\n if req['kind'] != 'identification':\n raise ValueError(f\"Got request of kind {req['kind']!r}, was \"\n \"expecting 'identification'\")\n # here, state must be 'waiting for owner'\n if uuid == self.owneruuid:\n self.state = \"waiting for player\"\n await write(self.clients[uuid], {\n 'kind': 'identification state change',\n 'state': 'success'\n })\n self.watch_owner = read(self.clients[self.owneruuid], 'accepted',\n kind='request state change')\n else:\n log.warning(f\"Got fake request pretenting to be owner \"\n f\"{uuid!r} {username!r}\")\n await write(self.clients[uuid], {\n 'kind': 'identification state change',\n 'state': 'failed'\n })\n writer.write_eof()\n await writer.drain()\n writer.close()",
"def client(self, id):\n return self.query(Client).filter(Client.id == id).one()",
"def heartbeat_process(client_id):\n asyncio.run(Client.heartbeat(client_id))",
"def monitor(self):\n while True:\n client_socket, client_address = self.server_socket.accept()\n print(\"New client connection accepted: {}:{}\".format(*client_address))\n threading.Thread(target=self.handle_client, args=[client_socket]).start()",
"def client(self):\n\n return self._client",
"def add_client(self, cli):\n if self.clients.count(cli) is 0:\n self.clients.append(cli)",
"def get_client(self):\n return self.client",
"def serveClient(self, client):\r\n itrans = self.inputTransportFactory.getTransport(client)\r\n otrans = self.outputTransportFactory.getTransport(client)\r\n iprot = self.inputProtocolFactory.getProtocol(itrans)\r\n oprot = self.outputProtocolFactory.getProtocol(otrans)\r\n try:\r\n while True:\r\n self.processor.process(iprot, oprot)\r\n except TTransport.TTransportException, tx:\r\n pass\r\n except Exception, x:\r\n logging.exception(x)\r\n\r\n itrans.close()\r\n otrans.close()",
"def run_client(host, port, cafile):\n loop = asyncio.get_event_loop()\n client = ChatClient()\n\n if cafile:\n print('Encrpyted')\n print(cafile)\n purpose = ssl.Purpose.SERVER_AUTH\n context = ssl.create_default_context(purpose, cafile=cafile)\n coro = loop.create_connection(lambda: client, host, port, ssl=context, server_hostname='localhost')\n loop.run_until_complete(coro)\n asyncio.async(handle_user_input(loop, client))\n\n else:\n coro = loop.create_connection(lambda: client, host, port)\n loop.run_until_complete(coro)\n asyncio.async(handle_user_input(loop, client))\n\n try:\n loop.run_forever()\n finally:\n loop.close()",
"def remove_client(self, client):\n self.clients.remove(client)\n #print(\"removing:\" + str(client))",
"def main():\n\n global _CLIENT\n\n logging.basicConfig(level=logging.DEBUG)\n app.logger.setLevel(logging.INFO)\n\n _CLIENT = Client('192.168.0.120', 443, 'root', 'calvin')\n _CLIENT.connect()\n\n\n app.run(debug=True)",
"def request_client_id(self) -> None:\n GCR.log.log(Logger.INFORMATION, \"Demande d'un id client\")\n self.send({\"action\": \"request_id\", \"username\": self.username})",
"async def websocket_client(self):\n return await websocket(CLIENT, \"/websocket\")",
"def serve_forever(self):\n try:\n while True:\n client, from_addr = self._socket.accept()\n LOGGER.debug(client)\n secure_sock = self._ssl_ctx.wrap_socket(client, server_side=True)\n new_client_conn = ClientHandlerThread(from_addr, secure_sock)\n new_client_conn.start()\n Server.CLIENT_CONNS.append(new_client_conn)\n except ssl.SSLError:\n LOGGER.exception('SSLError')\n except KeyboardInterrupt:\n self.cleanup()\n sys.exit(0)\n except socket.error as sock_err:\n LOGGER.warning(str(sock_err))\n self.cleanup()\n sys.exit(0)\n except Exception:\n LOGGER.exception('Unknown exception encountered!')",
"def client_server():\n client_server_pair = ClientServer()\n\n yield client_server_pair.client\n\n shutdown_response = client_server_pair.client._endpoint.request(\"shutdown\").result(\n timeout=CALL_TIMEOUT\n )\n assert shutdown_response is None\n client_server_pair.client._endpoint.notify(\"exit\")",
"def run(self):\n\n\t\t#Begin running the clientHandler\n\t\tself.running = True\n\t\tself.rxThread.start()\n\n\t\twhile self.running:\n\t\t\ttime.sleep(0.1)\n\t\n\t\t\t#Keep a count of the number of missing Hello requests, over 5 kill client\n\t\t\tif self.missingCount >= 5:\n\t\t\t\tself.running = False",
"async def websocket_client(hass, hass_ws_client):\n return await hass_ws_client(hass)",
"def choose(self, _id):\n app = App.get_running_app()\n self.manager.client = app.session.query(Client).filter(Client.id == _id).one()\n self.manager.current = 'info'",
"def handle(self):\n try:\n peers = Peers([\n gevent.spawn(self.route.proxy_input, self.client.sock,\n self.sock, self.buf, self.extra),\n gevent.spawn(self.route.proxy_connected, self.sock, \n self.client.sock, self.extra)])\n gevent.joinall(peers.greenlets)\n finally:\n self.sock.close()",
"def session_client(session_app):\n yield Client(session_app)",
"def test_output_one_client(self):\n self.test_case = 'one_client'\n self._run_test_case()",
"def serveClient(self, client):\r\n itrans = self.inputTransportFactory.getTransport(client)\r\n otrans = self.outputTransportFactory.getTransport(client)\r\n iprot = self.inputProtocolFactory.getProtocol(itrans)\r\n oprot = self.outputProtocolFactory.getProtocol(otrans)\r\n\r\n try:\r\n while True:\r\n self.processor.process(iprot, oprot)\r\n except TTransportException, tx:\r\n pass\r\n except Exception, x:\r\n logging.exception(x)\r\n\r\n itrans.close()\r\n otrans.close()",
"def get_client(self, clientname):\n client = self.dbsession.query(Client).filter_by(clientname=clientname).all()\n if not client:\n return self.create_client({'clientname': clientname})\n else:\n return client[0]",
"def handle_client(client): # Takes client socket as argument.\n\tr_packet = client.recv(BUFSIZ).decode(\"utf8\")\n\tar_packet = r_packet\n\tr_packet = r_packet.split(\"~\")\n\n\tfor sock in clients:\n\t\tif(clients[sock] == r_packet[0]):\n\t\t\tsock.send(bytes(ar_packet,\"utf8\"))",
"def tcp_incoming_connections():\n while True:\n client, client_address = SERVER.accept()\n print(\"%s:%s has connected.\" % client_address)\n client.send(bytes(\"Greetings from the cave! Now type your name and press enter!\"))\n addresses[client] = client_address\n Thread(target=handle_client, args=(client,)).start()",
"def __init__(self, client_ident):\n\t\tthreading.Thread.__init__(self, None)\n\t\tself.client_ident\t\t= client_ident\n\t\tself.start()",
"def __accept_new_client(self):\n client_socket, client_addr = self.__server_socket.accept()\n new_client = client.Client(client_socket, client_addr)\n self.__logger.info(\"Accepted new client from {}\".format(client_addr))\n self.__connected_clients.append(new_client)",
"def remove_client(self, client):\n client_conn = self.all_clients[client]\n\n self.all_clients.pop(client)\n self.all_connections.remove(client_conn)\n\n # client_conn.shutdown(2)\n client_conn.close()",
"def release_client(client):\n client.disconnect()\n client.loop_stop()\n wait_for_disconnection(WAIT_CONNECTION_TIMEOUT)",
"def __ServiceClient(self,Client):\n\t\twhile True:\n\t\t\tDataClient = Client.recv(1024)\n\t\t\tprint(DataClient)\n\t\t\t# your source code here\n\t\t\tmessage = DataClient\n\t\t\t# data to be sent to api\n\t\t\tdata = {'message': message}\n\t\t\t# sending post request and saving response as response object\n\t\t\tr = requests.post(url = self.API_ENDPOINT, data = data)\n\t\t\t# extracting response text\n\t\t\t#pastebin_url = r.text\n\t\t\t#print(\"The pastebin URL is:%s\"%pastebin_url)",
"async def http_client(hass, hass_client_no_auth):\n await async_setup_component(hass, webhook.DOMAIN, {})\n return await hass_client_no_auth()",
"def _dispatch_from_client_request(self):\n # Listen for client connection\n self._from_client_request.listen()\n\n while not self._exit_request:\n readable, _, _ = select([self._from_client_request], [], [self._from_client_request], 0.1)\n\n if readable:\n client_conn, client_addr = readable[0].accept()\n client_conn.setblocking(False)\n\n client_name_read, _, _ = select([client_conn], [], [client_conn])\n if client_name_read:\n client_name = json.loads(client_name_read[0].recv(cfg.HEADER).decode('utf-8'))\n else:\n print(\"Connection closed\")\n continue\n\n self._thread_lock.acquire()\n self._from_client_connections[client_conn] = client_name\n self._state[client_name] = 0\n self._thread_lock.release()\n\n print(\"Receiving commands from [\" + client_name + \", \" + client_addr[0] + \", \" + str(client_addr[1]) + ']')",
"def add_client(name):\n return create_client(name)",
"async def get_client_async(\n client_id: str, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs\n):\n request = GetClient.create(\n client_id=client_id,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )",
"def __init__(self, client):\n self.client = client",
"def client(self) -> 'BaseClient':\n return self",
"def handleClientConnected(client_reader, client_writer):\n loop.TOTAL_CONNECTIONS += 1\n addr = client_writer.get_extra_info('peername')\n print(\"Connected to\", addr, \"(%s pending connections)\" % loop.TOTAL_CONNECTIONS)\n distantreader, distantwriter = yield from asyncio.open_connection(distanthost, distantport, loop=loop)\n t1 = loop.create_task(transmitData(loop, distantreader, client_writer))\n t2 = loop.create_task(transmitData(loop, client_reader, distantwriter))\n transmitted1, transmitted2 = yield from asyncio.gather(t1, t2)\n loop.TOTAL_CONNECTIONS -= 1\n print(\"End of communication to\", addr, \"(%s pending connections)\" % loop.TOTAL_CONNECTIONS, \"(Transmitted: %s/%s)\" % (transmitted1, transmitted2))",
"def set_client_id(self):\n data = self.receive() # deserialized data\n client_id = data['clientid'] # extracts client id from data\n self.client_id = client_id # sets the client id to this client\n print(\"Successfully connected to server: \" + self.userInfo['host'] + \" / \" + str(self.userInfo['port']))\n print(\"Your client info is:\\n\" + \"Client Name: \" + self.userInfo['name'] + \"\\nClient ID: \" + str(client_id))",
"def client(self) -> mqtt.Client:\n return self._client",
"def __handle_peer(self, client_sock):\n\t\tself.__debug('New child ' + str(threading.currentThread().getName()))\n\t\tself.__debug('Connected ' + str(client_sock.getpeername()))\n\n\t\tsd = Request(sock=client_sock, debug=self.debug)\n\t\ttry:\n\t\t\twhile not self.shutdown:\n\t\t\t\tmessage = sd.recvdata()\n\t\t\t\tif self.callback.has_key(message.__class__.__name__):\n\t\t\t\t\tself.callback[message.__class__.__name__]()\n\t\texcept EOFError:\n\t\t\tself.__debug('EOFError...')\n\t\tclient_sock.close()",
"def __run_client(self):\n\n self._client = CoapClient(server_hostname=self._hostname, server_port=self._port, src_port=self._src_port)\n self._client_running = True\n\n if self.use_polling:\n super(CoapSensor, self).on_start()\n else:\n self.observe_topic()",
"def manage_send_request(self, client):\n try:\n client.send()\n except Exception as err:\n # condition, when encountered, denotes socket closing\n (this_client_host, this_client_port) = client.getpeername()\n print('closing connection from {}, port {} - {}'.format(this_client_host, this_client_port, err_to_str(err)))\n client.close()\n self.clients.remove(client)\n raise",
"def index_client(indexd_client):\n return indexd_client",
"def run(self):\n\n listen_port = DEBUGGER_PORT if \"RENPY_DEBUGGER_PORT\" not in os.environ else os.environ[\"RENPY_DEBUGGER_PORT\"]\n\n server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n server.bind((\"0.0.0.0\", listen_port))\n server.listen(0)\n\n while True:\n client, client_address = server.accept()\n self.attach_one_client(client)",
"async def client_ssh_handler(process):\n log.debug(f\"clients.py:client_ssh_handler - SSH details are: {dir(process)}\")\n reader = process.stdin\n writer = process.stdout\n client_details = process.get_extra_info(\"peername\")\n addr, port, *rest = client_details\n\n connection = PlayerConnection(addr, port, \"ssh\")\n\n await register_client(connection)\n\n tasks = [\n asyncio.create_task(client_read(reader, connection), name=f\"{connection.uuid} read\"),\n asyncio.create_task(client_write(writer, connection), name=f\"{connection.uuid} write\"),\n ]\n\n asyncio.current_task().set_name(f\"{connection.uuid} handler\")\n\n # We want to .wait until the first task is completed. Completed could be an actual finishing\n # of execution or an exception. If either the read or writer \"completes\", we want to ensure\n # we move beyond this point and cleanup the tasks associated with this client.\n _, rest = await asyncio.wait(tasks, return_when=\"FIRST_COMPLETED\")\n\n await unregister_client(connection)\n\n process.close()\n process.exit(0)\n\n for task in rest:\n task.cancel()",
"def main():\n # Clear the terminal before a new run\n os.system('cls') \n\n # Create the server_socket object and bind it to the desired address\n server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n server_socket.bind(SERVER_ADDRESS)\n \n # Start listening for new connections\n server_socket.listen()\n print(f\"[LISTENING] SERVER IS NOW LISTENING FOR NEW CONNECTIONS ON {SERVER_ADDRESS}\")\n\n while True:\n # Accept a new connection\n conn, addr = server_socket.accept()\n # Start a new thread handling the new connection\n client_thread = threading.Thread(target=handle_client, args=(conn, addr))\n client_thread.start()",
"def admin_client():\n host = '127.0.0.1'\n port = 8126\n return TcpClient(host, port)",
"def _require_client(self, client):\n if client is None:\n client = self._client\n return client",
"def manage_client(client):\r\n #information about the player\r\n msg_client('Ora inserisci il tuo nome: ', client)\r\n name = client.recv(BUFSIZ)\r\n clients[client] = name\r\n \r\n init_player(client)\r\n \r\n #get player's role\r\n msg_client('Il tuo ruolo è: ' + str(roles[client]), client)\r\n msg_client('Scrivi {quit} per uscire dal gioco', client)\r\n \r\n insert_number_player(client)\r\n \r\n start_question(client)\r\n \r\n check_player_ready(client)\r\n \r\n start_game(client)\r\n \r\n search_winner()\r\n \r\n close_client(client)",
"def run(self):\n try:\n self.server = socket(AF_INET, SOCK_STREAM)\n self.server.bind(self.address)\n self.server.listen(5) # Allows up to 5 waiting clients\n\n while True:\n self.myView.updateStatus('Waiting for connection ...')\n client, address = self.server.accept()\n self.myView.updateStatus('... connected from ' + str(address))\n handler = ClientHandler(client, self.bank, self.myView)\n handler.start()\n\n except Exception as message:\n self.myView.updateStatus(message)\n self.server.close()\n self.myView.updateStatus(\"Server shutting down.\")",
"def handle(self):\n self.ip = self.client_address[0]\n self.port = self.client_address[1]\n self.connection = self.request\n #self.logged_in = False\n\n # Loop that listens for messages from the client\n while True:\n received_string = self.connection.recv(4096).strip()\n if received_string:\n jsonObject = json.loads(received_string)\n request = jsonObject.get('request')\n #print(received_string)\n #self.handle_data(received_string)\n if request == 'login':\n print 'logging in'\n self.login(jsonObject)\n elif request == 'logout':\n self.logout()\n elif request == 'msg':\n self.send(jsonObject)\n elif request == 'names':\n self.getNames()\n elif request == 'help':\n return \"geiegohruuhiegr\"\n else:\n return \"you suck\"\n\n else:\n print('The client is disconnected.')\n break \n # TODO: Add handling of received payload from client",
"def accept_incoming_connections():\n while True:\n client, client_address = SERVER.accept()\n print(\"%s:%s has connected.\" % client_address)\n #client.send(bytes(\"Greetings from the cave! Now type your name and press enter!\", \"utf8\"))\n addresses[client] = client_address\n Thread(target=handle_client, args=(client,)).start()",
"def client():\n return app.test_client()",
"def client_server(request):\n def build(handler, host=\"localhost\", port=None, *, loop=None):\n loop = loop or asyncio.get_event_loop()\n port = port or get_next_port(host)\n\n server = serve(handler, host, port, klass=WebSocket, loop=loop)\n server = loop.run_until_complete(server)\n\n client = connect(\"ws://{}:{}\".format(host, port))\n client = loop.run_until_complete(client)\n return client, server\n\n return build",
"def get(self, id: int) -> Client:\n\n return self.__clients[id]",
"def get_client_by_id(self, client_id):\r\n cursor = self.conn.cursor()\r\n cursor.execute(\"\"\"SELECT * FROM CLIENT WHERE id={}\"\"\".format(client_id))\r\n return cursor.fetchall()",
"def on_client_connect(self, client):\r\n\t\tself.connection_logger.info('Received client connection from %s:%u' % (client.address, client.port))\r\n\t\tif (self.db_connection is False):\r\n\t\t\tclient.send('A critical database error has occurred. Please reconnect later.\\n')\r\n\t\t\tclient.socket_send()\r\n\t\t\tclient.deactivate()\r\n\t\t\tclient.sock.close()\r\n\t\t\treturn\r\n\t\tclient.send(self.welcome_message_data)\r\n\t\tself.pending_connection_list.append(client)\r\n\t\tself.post_client_connect.send(sender=client)"
] | [
"0.6872704",
"0.66223794",
"0.6563431",
"0.6314243",
"0.6278947",
"0.6241105",
"0.6189508",
"0.61751866",
"0.6150069",
"0.6113049",
"0.61002946",
"0.607193",
"0.60383105",
"0.6022606",
"0.5996656",
"0.59760684",
"0.5955252",
"0.5949449",
"0.59433",
"0.5927668",
"0.591821",
"0.5888377",
"0.5887917",
"0.5866311",
"0.5853782",
"0.5853782",
"0.584101",
"0.582946",
"0.58088595",
"0.5784581",
"0.5765891",
"0.5758705",
"0.57570153",
"0.57431114",
"0.5710044",
"0.5709901",
"0.56820506",
"0.5666524",
"0.5644673",
"0.56382334",
"0.5635157",
"0.56282854",
"0.560803",
"0.5607453",
"0.56021595",
"0.5601398",
"0.5596885",
"0.55582416",
"0.55530065",
"0.5528808",
"0.5525901",
"0.5525534",
"0.551318",
"0.55131453",
"0.5509596",
"0.5501831",
"0.5500126",
"0.54957914",
"0.5478449",
"0.54784334",
"0.5475304",
"0.5470944",
"0.5468182",
"0.5467035",
"0.5462509",
"0.54522204",
"0.5445266",
"0.5429931",
"0.54271376",
"0.54193705",
"0.5415764",
"0.54114574",
"0.53909844",
"0.53876966",
"0.53873974",
"0.5383264",
"0.53807026",
"0.53785884",
"0.5374621",
"0.5373417",
"0.53697395",
"0.5361849",
"0.5352248",
"0.53428984",
"0.53423107",
"0.534218",
"0.5340237",
"0.53364134",
"0.532592",
"0.5323306",
"0.53159785",
"0.5306328",
"0.5302192",
"0.5294612",
"0.5292396",
"0.52909845",
"0.529047",
"0.52847296",
"0.52836865",
"0.52813244",
"0.5278724"
] | 0.0 | -1 |
A greenlet for handling a single client. | def _process_socket(self, client, address):
log.info('func=open|client=%s:%d|pool_size=%d', address[0], address[1], len(self.pool))
client = SocketTransport(client)
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException:
pass
except EOFError:
pass
except:
log.error(traceback.format_exc())
itrans.close()
otrans.close()
log.info('func=close|client=%s:%d', address[0], address[1]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def client():",
"def handle_client(self):\n e = threading.Event()\n reg_t = threading.Thread(target=self.handle_reg_client, args=(e,))\n stream_t = threading.Thread(target=self.handle_stream_client,\n args=(e,))\n reg_t.start()\n stream_t.start()",
"def client():\n\n client = Client()\n return client",
"def _accept_client(self, client_reader, client_writer):\n\n print(\"New client\", client_reader, client_writer)\n # start a new Task to handle this specific client connection\n task = asyncio.Task(self._handle_client(client_reader, client_writer))\n self.clients[task] = (client_reader, client_writer)\n\n def client_done(task):\n print(\"client task done:\", task, file=sys.stderr)\n del self.clients[task]\n\n task.add_done_callback(client_done)",
"async def start_client(self) -> None:\n\n if hasattr(Config().algorithm,\n 'cross_silo') and not Config().is_edge_server():\n # Contact one of the edge servers\n logging.info(\"[Client #%d] Contacting Edge server #%d.\",\n self.client_id, self.edge_server_id)\n else:\n logging.info(\"[Client #%d] Contacting the central server.\",\n self.client_id)\n uri = 'ws://{}:{}'.format(Config().server.address, self.server_port)\n\n try:\n async with websockets.connect(uri,\n ping_interval=None,\n max_size=2**30) as websocket:\n logging.info(\"[Client #%d] Signing in at the server.\",\n self.client_id)\n\n await websocket.send(pickle.dumps({'id': self.client_id}))\n\n while True:\n logging.info(\"[Client #%d] Waiting to be selected.\",\n self.client_id)\n server_response = await websocket.recv()\n data = pickle.loads(server_response)\n\n if data['id'] == self.client_id:\n self.process_server_response(data)\n logging.info(\"[Client #%d] Selected by the server.\",\n self.client_id)\n\n if not self.data_loaded:\n self.load_data()\n\n if 'payload' in data:\n server_payload = await self.recv(\n self.client_id, data, websocket)\n self.load_payload(server_payload)\n\n heartbeat_proc = Process(\n target=Client.heartbeat_process,\n args=(self.client_id, ))\n heartbeat_proc.start()\n report, payload = await self.train()\n heartbeat_proc.terminate()\n\n if Config().is_edge_server():\n logging.info(\n \"[Server #%d] Model aggregated on edge server (client #%d).\",\n os.getpid(), self.client_id)\n else:\n logging.info(\"[Client #%d] Model trained.\",\n self.client_id)\n\n # Sending the client report as metadata to the server (payload to follow)\n client_report = {\n 'id': self.client_id,\n 'report': report,\n 'payload': True\n }\n await websocket.send(pickle.dumps(client_report))\n\n # Sending the client training payload to the server\n await self.send(websocket, payload)\n\n except OSError as exception:\n logging.info(\"[Client #%d] Connection to the server failed.\",\n self.client_id)\n logging.error(exception)",
"def client(self):\n raise NotImplementedError()",
"def clients():\n pass",
"def serveThread(self):\r\n while True:\r\n try:\r\n client = self.clients.get()\r\n self.serveClient(client)\r\n except Exception, x:\r\n logging.exception(x)",
"def client():\n yield tests.example_server.app.test_client()",
"def _dummy_client(self):\n logger.warning('Running dummy client for task #%s',\n self.task_data.get('task_id', 0))\n Client(LISTENER_ADDRESS).close()",
"def client(self):\n return self._thread._client",
"def client(clients: int) -> None:\n from DLA.server.client import run_clients\n asyncio.run(run_clients(clients))",
"def client():\n _, p, _ = docker_run_etcd_main()\n c = Client(host, p, protocol)\n yield c\n c.close()",
"def handle_reg_client(self, event):\n try:\n while True:\n client_req = self.receive_msg()\n self.choose_action(client_req[ZERO], client_req[ONE:], event)\n except socket.error as e:\n print(e)",
"def client(self):\n\t\t# pylint: disable=invalid-name\n\t\treturn self._client",
"def sanic_client(loop):\n clients = []\n\n async def create_client(app, **kwargs):\n client = TestClient(app, **kwargs)\n await client.start_server()\n clients.append(client)\n return client\n\n yield create_client\n\n # Clean up\n if clients:\n for client in clients:\n loop.run_until_complete(client.close())",
"def handle_client(client): # Takes client socket as argument.\r\n name = client.recv(BUFSIZ).decode(\"utf8\")\r\n welcome = 'Welcome %s! If you ever want to quit, type {quit} to exit.' % name\r\n client.send(bytes(welcome, \"utf8\"))\r\n msg = \"%s has joined the chat!\" % name\r\n broadcast(bytes(msg, \"utf8\"))\r\n clients[client] = name\r\n while True:\r\n msg = client.recv(BUFSIZ)\r\n if msg != bytes(\"{quit}\", \"utf8\"):\r\n broadcast(msg, name+\": \")\r\n else:\r\n client.send(bytes(\"{quit}\", \"utf8\"))\r\n client.close()\r\n del clients[client]\r\n broadcast(bytes(\"%s has left the chat.\" % name, \"utf8\"))\r\n break",
"def client(self, msg, *args, **kwargs):\r\n return log(self.CLIENT, msg, *args, **kwargs)",
"def handle_client(client, name): # Takes client socket as argument.\n\n welcome = 'Welcome %s! If you ever want to quit, type {quit} to exit.' % name\n client.send(bytes(welcome, \"utf8\"))\n msg = \"%s has joined the chat!\" % name\n broadcast(bytes(msg, \"utf8\"))\n clients[client] = name\n\n while True:\n msg = client.recv(BUFSIZ)\n if msg != bytes(\"{quit}\", \"utf8\"):\n broadcast(msg, name+\": \")\n else:\n client.send(bytes(\"{quit}\", \"utf8\"))\n client.close()\n del clients[client]\n broadcast(bytes(\"%s has left the chat.\" % name, \"utf8\"))\n break",
"def client_request(self, evt):\n threads.deferToThread(self.cli_db.accept, evt)",
"def listen_for_client(self):\n #PART 2:LISTEN FOR CLIENT We wait for the clients connection request and once a\n #successful connection is made we dispatch the request in a separate thread,\n #making ourselves available for the next request.\n #This allows us to handle multiple requests simultaneously which boosts the performance of the \n #server multifold times. -> we need a function for threading and to get client name!!!\n\n\n while True:\n (clientSocket, client_address) = self.serverSocket.accept() # Establish the connection\n d = threading.Thread(name=self._getClientName(client_address), target=self.proxy_thread, args=(clientSocket, client_address))\n d.setDaemon(True)\n d.start()\n self.shutdown(0,0)",
"def client(self, reactor, serverAddress):\n raise NotImplementedError()",
"def run_client(self, event_loop, irc_client):\n # Deliberately written in \"synchronous\" style with run_until_complete()\n # instead of await because async generators don't work in Python 3.5.\n with self.mock_open_connection():\n # Start the client\n run_fut = event_loop.create_task(irc_client.run())\n event_loop.run_until_complete(irc_client.connected.wait())\n # Allow the test to run\n yield\n # Cleanly end the read loop and wait for client to exit\n irc_client.disconnect()\n event_loop.run_until_complete(run_fut)",
"def handle_client(client): # Takes client socket as argument.\n\n name = client.recv(BUFSIZ).decode(\"utf8\")\n welcome = 'Welcome %s! If you ever want to quit, type {quit} to exit.' % name\n client.send(bytes(welcome))\n msg = \"%s has joined the chat!\" % name\n broadcast(bytes(msg))\n clients[client] = name\n\n while True:\n msg = client.recv(BUFSIZ)\n if msg == bytes(\"{quit}\"):\n client.send(bytes(\"{quit}\"))\n client.close()\n del clients[client]\n broadcast(bytes(\"%s has left the chat.\" % name))\n break\n elif msg[0:7] == bytes(\"{emoji}\"):\n broadcast(msg, \"\")\n else:\n broadcast(msg, name + \": \")",
"def Client(self) -> Socket:",
"def Client(self) -> Socket:",
"def client(sandbox):\n sandbox.add_node(0)\n client = sandbox.client(0)\n yield client",
"def client(self):\n return self._client",
"def handle_client(client): # Takes client socket as argument.\n\tname = client.recv(2048).decode(\"utf8\")\n\twelcome = 'Welcome %s! Enter {quit} to exit.' % name\n\ttry:\n\t\tclient.send(bytes(welcome, \"utf8\"))\n\t\tmsg = \"%s: has joined the chat!\" % name\n\t\tbroadcast(bytes(msg, \"utf8\"))\n\t\tclients[client] = name\n\t\ttemp_client = {'Address':addresses[client],'Name':clients[client]}\n\t\tactive.append(temp_client)\n\t\tbroadcast(bytes(str(active),'utf-8'))\n\t\twhile True:\n\t\t\tmsg = client.recv(2048)\n\t\t\ttry:\n\t\t\t\tif '(' in msg.decode('utf-8') and ')' in msg.decode('utf-8'):\n\t\t\t\t\ttemp = msg.decode('utf-8').split(')')\n\t\t\t\t\taddress = temp[0] + ')'\n\t\t\t\t\tprivate_message(address,temp[1])\n\t\t\t\telif msg != bytes(\"{quit}\", \"utf8\"):\n\t\t\t\t\tbroadcast(msg, \"<global>\" + name + \": \")\n\t\t\t\t\tprint(client)\n\t\t\t\telse:\n\t\t\t\t\t#client.send(bytes(\"{quit}\", \"utf8\"))\n\t\t\t\t\tclient.close()\n\t\t\t\t\tactive.remove({'Address':addresses[client],'Name':clients[client]})\n\t\t\t\t\tdel clients[client]\n\t\t\t\t\tbroadcast(bytes(\"%s has left the chat.\" % name, \"utf8\"))\n\t\t\t\t\tbroadcast(bytes(str(active),'utf-8'))\n\t\t\t\t\tbreak\n\t\t\texcept:\n\t\t\t\tprint(msg)\n\t\t\t\tbroadcast_file(msg)\n\texcept Exception as e:\n\t\tprint(e)",
"def handle_accept(self):\r\n pair = self.accept()\r\n if pair is not None:\r\n sock, addr = pair\r\n server_log.info('Client connection from {}, assigning client id {}'.format(repr(addr), self.client_id))\r\n handler = ClientHandler(sock, addr, self.client_id)\r\n self.client_list.update({self.client_id: handler})\r\n self.client_id += 1",
"def server(conn, address):\n print(\"Client Connection Open\")\n while True:\n request = server_read(conn)\n if request:\n print(request)\n manage_client(request, conn)",
"def handle_client(self,conn,addr):\n print(f\"[NEW CONNECTION] {addr} connected\")\n client_id = \"\"\n connected = True\n while connected:\n try:\n try:\n msg_length = conn.recv(PREFIX).decode(FORMAT)\n except:\n print(f\"[{addr}] DISCONNECTED\")\n self.handle_unexpected_disconnect(client_id,conn)\n return\n\n if msg_length:\n try:\n msg_length = int(msg_length)\n try:\n raw_msg = conn.recv(msg_length).decode(FORMAT)\n except:\n print(f\"[{addr}] DISCONNECTED\")\n self.handle_unexpected_disconnect(client_id,conn)\n return\n message = json.loads(raw_msg)\n except ValueError:\n message = FAILURE_MESSAGE\n\n if message[\"HEADER\"] == DISCONNECT_MESSAGE:\n connected = False\n self.handle_disconnect(message,conn)\n\n elif message[\"HEADER\"] == \"CREATE\":\n session_id = \"\".join(random.choices(string.ascii_uppercase + string.digits, k = 4))\n indentifer = json.loads(message[\"MESSAGE\"])\n tokenDict = json.loads(indentifer[\"spotify_token\"])\n client_id = message[\"ID\"]\n self.create_session(session_id, message[\"ID\"], indentifer[\"display_name\"], tokenDict)\n self.add_connection_entry(message[\"ID\"], indentifer[\"display_name\"], session_id, True, conn, addr)\n self.create_spotify_player(session_id)\n if not self.sessions[session_id][\"HOST\"][\"spotify_player\"].is_spotify_running():\n self.send(\"STC\", client_id, \"PLEASE START SPOTIFY\")\n\n self.send(\"SESSION_ID\", client_id, str(session_id))\n\n elif message[\"HEADER\"] == \"GET_CURRENT_SONG\":\n player = self.get_session_player(self.get_session_from_user(message[\"ID\"]))\n if not self.sessions[session_id][\"HOST\"][\"spotify_player\"].is_spotify_running():\n self.send(\"STC\", client_id, \"PLEASE START SPOTIFY\")\n else:\n current_track = {}\n current_track[\"name\"] = player.sp.currently_playing()['item']['name']\n current_track[\"artist\"] = player.sp.currently_playing()['item']['album']['artists'][0]['name']\n track_json = json.dumps(current_track)\n self.send(\"CURRENT_SONG\", message[\"ID\"],track_json)\n\n elif message[\"HEADER\"] == \"SKIP\":\n player = self.get_session_player(self.get_session_from_user(message[\"ID\"]))\n session_id = self.get_session_from_user(message[\"ID\"])\n session_queue = self.get_session_queue(session_id)\n if len(session_queue) > 0:\n player.add_to_queue(session_queue[0][1])\n session_queue.pop(0)\n self.send_queue_update(session_id)\n player.next_track()\n\n elif message[\"HEADER\"] == \"REWIND\":\n player = self.get_session_player(self.get_session_from_user(message[\"ID\"]))\n player.previous_track()\n\n elif message[\"HEADER\"] == \"PLAY\":\n session_id = self.get_session_from_user(message[\"ID\"])\n player = self.get_session_player(self.get_session_from_user(message[\"ID\"]))\n player.toggle_playback()\n\n elif message[\"HEADER\"] == \"SEARCH\":\n player = self.get_session_player(self.get_session_from_user(message[\"ID\"]))\n song = message[\"MESSAGE\"]\n self.send(\"SEARCH_RESULTS\", message[\"ID\"], json.dumps(player.search(song)))\n\n\n\n\n elif message[\"HEADER\"] == \"ADD_TO_QUEUE\":\n track_data = json.loads(message[\"MESSAGE\"])\n self.add_to_session_queue(message[\"ID\"], (track_data[\"name\"],track_data['uri']))\n session_id = self.get_session_from_user(message[\"ID\"])\n\n\n elif message[\"HEADER\"] == \"QUEUE_UPDATE\":\n options = json.loads(message[\"MESSAGE\"])\n self.update_queue(message[\"ID\"],options)\n\n elif message[\"HEADER\"] == \"GET_USERS\":\n session_id = self.get_session_from_user(message[\"ID\"])\n users = self.sessions[session_id][\"USERS\"]\n self.send(\"USERS\", message[\"ID\"], json.dumps(users))\n\n elif message[\"HEADER\"] == \"SET_PERMISSION\":\n msg = json.loads(message[\"MESSAGE\"])\n session_id = self.get_session_from_user(message[\"ID\"])\n self.change_user_permissions(session_id, msg[\"client_id\"], msg[\"permission\"])\n new_permissions = {}\n new_permissions[\"permission\"] = msg[\"permission\"]\n new_permissions[\"value\"] = self.sessions[session_id][\"USERS\"][msg[\"client_id\"]][\"permissions\"][msg[\"permission\"]]\n self.send(\"PERMISSION_UPDATE\",msg[\"client_id\"], json.dumps(new_permissions))\n\n elif message[\"HEADER\"] == \"JOIN\":\n msg = json.loads(message[\"MESSAGE\"])\n session_id = msg[\"session_id\"]\n if session_id in self.sessions.keys():\n self.add_user_to_session(session_id,message[\"ID\"],msg[\"display_name\"])\n self.add_connection_entry(message[\"ID\"],msg[\"display_name\"],session_id, False, conn, addr)\n client_id = message[\"ID\"]\n\n session_info = {}\n session_info[\"session_id\"] = session_id\n session_info[\"host\"] = self.sessions[session_id][\"HOST\"][\"NAME\"]\n\n self.send(\"SESSION_INFO\", message[\"ID\"], json.dumps(session_info))\n self.send(\"QUEUE_UPDATE\", message[\"ID\"], json.dumps(self.get_session_queue(session_id)))\n self.broadcast_to_session(session_id,\"USERS\", json.dumps(self.sessions[session_id][\"USERS\"]))\n else:\n self.add_connection_entry(message[\"ID\"],msg[\"display_name\"],session_id, False, conn, addr)\n self.send(\"FAILURE\", message[\"ID\"], \"Session does not exist\")\n self.send(DISCONNECT_MESSAGE,message[\"ID\"],DISCONNECT_MESSAGE)\n self.delete_connection_entry(message[\"ID\"])\n break\n elif message[\"HEADER\"] == \"SET_PERMISSIONS\":\n msg = json.loads(message[\"MESSAGE\"])\n user_id = msg[\"client_id\"]\n permissions = json.loads(msg[\"permissions\"])\n for key in permissions.keys():\n self.set_permissions(user_id,key,permissions[key])\n self.print_sessions()\n\n elif message[\"HEADER\"] == \"BROADCAST_S\":\n session_id = self.connections[message[\"ID\"]][\"session_id\"]\n self.broadcast_to_session(session_id,\"BROADCAST_S\", message[\"MESSAGE\"])\n elif message[\"HEADER\"] == \"BROADCAST\":\n self.broadcast_to_all(\"BROADCAST\", message[\"MESSAGE\"])\n\n elif message[\"HEADER\"] == \"PLAYBACK\":\n session_id = self.connections[message[\"ID\"]][\"session_id\"]\n sp = self.sessions[session_id][\"HOST\"][\"spotify_player\"]\n if not sp.toggle_playback():\n self.broadcast_to_session(self.get_session_from_user(client_id), \"FAILURE\", \"Please Start Spotify\")\n\n else:\n print(message[\"MESSAGE\"])\n except Exception as ex:\n print(str(ex))\n\n print(\"Thread Closing\")",
"def handler(self):\n\t\tself.exitClient()",
"def Client(self):\n return self._client",
"def run(self):\n client = ProcessorClient()\n try:\n client.connect(self.address)\n except Exception as e:\n self.error = e\n logging.error(e)\n else:\n self.clients[self.name] = client",
"def client(self):\r\n if self._client is None:\r\n self._client = self._client_cls(self._server, self._params, self)\r\n return self._client",
"def createHandlerClientConnected(loop, distanthost, distantport):\n @asyncio.coroutine\n def handleClientConnected(client_reader, client_writer):\n \"\"\"The coroutine that will take care of one connection.\"\"\"\n loop.TOTAL_CONNECTIONS += 1\n addr = client_writer.get_extra_info('peername')\n print(\"Connected to\", addr, \"(%s pending connections)\" % loop.TOTAL_CONNECTIONS)\n distantreader, distantwriter = yield from asyncio.open_connection(distanthost, distantport, loop=loop)\n t1 = loop.create_task(transmitData(loop, distantreader, client_writer))\n t2 = loop.create_task(transmitData(loop, client_reader, distantwriter))\n transmitted1, transmitted2 = yield from asyncio.gather(t1, t2)\n loop.TOTAL_CONNECTIONS -= 1\n print(\"End of communication to\", addr, \"(%s pending connections)\" % loop.TOTAL_CONNECTIONS, \"(Transmitted: %s/%s)\" % (transmitted1, transmitted2))\n \n return handleClientConnected",
"def register(self, client):\n self.clients.append(client)",
"async def handle_client(self, reader: StreamReader, writer: StreamWriter):\n peername = writer.transport.get_extra_info(\"peername\")\n log.info(\"handle_client : %s\", peername)\n\n try:\n\n remote_host, remote_port, req = await parse_http_request_header(\n reader, writer\n )\n\n remote_reader, remote_writer = await asyncio.open_connection(\n remote_host, remote_port\n )\n if req:\n log.info(\"req: %s\", req)\n remote_writer.write(req)\n\n asyncio.create_task(http_channel(remote_reader, writer))\n\n asyncio.create_task(http_channel(reader, remote_writer))\n\n except Exception as ex:\n log.exception(ex)",
"def _handle_client(self, client_reader, client_writer):\n while True:\n data = (yield from client_reader.readline()).decode(\"utf-8\")\n if not data: # an empty string means the client disconnected\n break\n cmd, *args = data.rstrip().split(' ')\n if cmd == 'add':\n arg1 = float(args[0])\n arg2 = float(args[1])\n retval = arg1 + arg2\n client_writer.write(\"{!r}\\n\".format(retval).encode(\"utf-8\"))\n elif cmd == 'repeat':\n times = int(args[0])\n msg = args[1]\n client_writer.write(\"begin\\n\".encode(\"utf-8\"))\n for idx in range(times):\n client_writer.write(\"{}. {}\\n\".format(idx+1, msg)\n .encode(\"utf-8\"))\n client_writer.write(\"end\\n\".encode(\"utf-8\"))\n else:\n print(\"Bad command {!r}\".format(data), file=sys.stderr)\n\n # This enables us to have flow control in our connection.\n yield from client_writer.drain()",
"def attach_one_client(self, csocket):\n\n self._current_client = csocket\n self.next_seq = -1\n\n # manual requests\n\n self.enter_read_loop()",
"def uclient():\n return IceCubedSyncClient()",
"def client(self):\n\n if self._client is None:\n self._client = self._get_client()\n return self._client",
"def add_client(self, client):\n\n now = int(time.time())\n\n self.send_line(\"%s N %s 1 %d %s %s +ik ]]]]]] %s :%s\" %\\\n (self.config[\"numeric\"], client.nick, now, client.user,\n client.host, client.uid, client.gecos))",
"async def handle_new_client(self, reader, writer):\n\n log.debug(\"Got brand new client!\")\n\n self.fakeclient = Client(None, None, reader, writer)\n req = await read(self.fakeclient, 'uuid', 'username')\n self.fakeclient = None\n\n uuid = req['uuid']\n username = req['username']\n self.clients[uuid] = Client(username, PlayerPrivateStatus(), reader,\n writer)\n\n if self.state == 'waiting for owner response':\n # don't accept any request from players when a request has already\n # been send to the owner\n # So, we tell the player the owner's busy.\n log.debug(\"Send owner busy with request.\")\n await write(self.clients[uuid], {'kind': 'request state change',\n 'state': 'declined',\n 'reason': 'owner busy'})\n del self.clients[uuid]\n return\n\n if self.state == \"waiting for player\":\n # Here, we have a request from a player to join the onwer\n # the reader and the writer are the other player's, not the owner's\n log.debug(f\"Send requests infos to owner {uuid!r} {username!r}\")\n # send the uuid and username to the owner\n await write(self.clients[self.owneruuid], {'kind': 'new request',\n 'uuid': uuid,\n 'username': username})\n # feeds data because we were listening for nothing before\n # (not for nothing, just so that the server knows if the client\n # leaves)\n self.state = 'waiting for owner response'\n # wait for owner to reply\n res = await self.watch_owner\n log.debug(f\"Response from owner {res!r}\")\n # he said yes!\n if res['accepted'] is True:\n # to the client (the one that wanted to join)\n await write(self.clients[uuid], {\n 'kind': 'request state change',\n 'reason': None,\n 'accepted': True\n })\n return await self.hero_selection()\n else:\n if res['accepted'] is not False:\n log.error(\"Got unexpected value for response to request\"\n f\"{res['accepted']!r} (expecting a bool)\")\n self.state = 'waiting for player'\n await write(self.clients[uuid], {'kind': 'request state change',\n 'accepted': False,\n 'reason': 'owner declined'})\n del self.clients[uuid]\n # start all over again\n self.loop.create_task(self.handle_new_client(reader, writer))\n return\n\n if req['kind'] != 'identification':\n raise ValueError(f\"Got request of kind {req['kind']!r}, was \"\n \"expecting 'identification'\")\n # here, state must be 'waiting for owner'\n if uuid == self.owneruuid:\n self.state = \"waiting for player\"\n await write(self.clients[uuid], {\n 'kind': 'identification state change',\n 'state': 'success'\n })\n self.watch_owner = read(self.clients[self.owneruuid], 'accepted',\n kind='request state change')\n else:\n log.warning(f\"Got fake request pretenting to be owner \"\n f\"{uuid!r} {username!r}\")\n await write(self.clients[uuid], {\n 'kind': 'identification state change',\n 'state': 'failed'\n })\n writer.write_eof()\n await writer.drain()\n writer.close()",
"def client(self, id):\n return self.query(Client).filter(Client.id == id).one()",
"def heartbeat_process(client_id):\n asyncio.run(Client.heartbeat(client_id))",
"def monitor(self):\n while True:\n client_socket, client_address = self.server_socket.accept()\n print(\"New client connection accepted: {}:{}\".format(*client_address))\n threading.Thread(target=self.handle_client, args=[client_socket]).start()",
"def client(self):\n\n return self._client",
"def add_client(self, cli):\n if self.clients.count(cli) is 0:\n self.clients.append(cli)",
"def get_client(self):\n return self.client",
"def serveClient(self, client):\r\n itrans = self.inputTransportFactory.getTransport(client)\r\n otrans = self.outputTransportFactory.getTransport(client)\r\n iprot = self.inputProtocolFactory.getProtocol(itrans)\r\n oprot = self.outputProtocolFactory.getProtocol(otrans)\r\n try:\r\n while True:\r\n self.processor.process(iprot, oprot)\r\n except TTransport.TTransportException, tx:\r\n pass\r\n except Exception, x:\r\n logging.exception(x)\r\n\r\n itrans.close()\r\n otrans.close()",
"def run_client(host, port, cafile):\n loop = asyncio.get_event_loop()\n client = ChatClient()\n\n if cafile:\n print('Encrpyted')\n print(cafile)\n purpose = ssl.Purpose.SERVER_AUTH\n context = ssl.create_default_context(purpose, cafile=cafile)\n coro = loop.create_connection(lambda: client, host, port, ssl=context, server_hostname='localhost')\n loop.run_until_complete(coro)\n asyncio.async(handle_user_input(loop, client))\n\n else:\n coro = loop.create_connection(lambda: client, host, port)\n loop.run_until_complete(coro)\n asyncio.async(handle_user_input(loop, client))\n\n try:\n loop.run_forever()\n finally:\n loop.close()",
"def remove_client(self, client):\n self.clients.remove(client)\n #print(\"removing:\" + str(client))",
"def main():\n\n global _CLIENT\n\n logging.basicConfig(level=logging.DEBUG)\n app.logger.setLevel(logging.INFO)\n\n _CLIENT = Client('192.168.0.120', 443, 'root', 'calvin')\n _CLIENT.connect()\n\n\n app.run(debug=True)",
"def request_client_id(self) -> None:\n GCR.log.log(Logger.INFORMATION, \"Demande d'un id client\")\n self.send({\"action\": \"request_id\", \"username\": self.username})",
"async def websocket_client(self):\n return await websocket(CLIENT, \"/websocket\")",
"def serve_forever(self):\n try:\n while True:\n client, from_addr = self._socket.accept()\n LOGGER.debug(client)\n secure_sock = self._ssl_ctx.wrap_socket(client, server_side=True)\n new_client_conn = ClientHandlerThread(from_addr, secure_sock)\n new_client_conn.start()\n Server.CLIENT_CONNS.append(new_client_conn)\n except ssl.SSLError:\n LOGGER.exception('SSLError')\n except KeyboardInterrupt:\n self.cleanup()\n sys.exit(0)\n except socket.error as sock_err:\n LOGGER.warning(str(sock_err))\n self.cleanup()\n sys.exit(0)\n except Exception:\n LOGGER.exception('Unknown exception encountered!')",
"def client_server():\n client_server_pair = ClientServer()\n\n yield client_server_pair.client\n\n shutdown_response = client_server_pair.client._endpoint.request(\"shutdown\").result(\n timeout=CALL_TIMEOUT\n )\n assert shutdown_response is None\n client_server_pair.client._endpoint.notify(\"exit\")",
"def run(self):\n\n\t\t#Begin running the clientHandler\n\t\tself.running = True\n\t\tself.rxThread.start()\n\n\t\twhile self.running:\n\t\t\ttime.sleep(0.1)\n\t\n\t\t\t#Keep a count of the number of missing Hello requests, over 5 kill client\n\t\t\tif self.missingCount >= 5:\n\t\t\t\tself.running = False",
"async def websocket_client(hass, hass_ws_client):\n return await hass_ws_client(hass)",
"def choose(self, _id):\n app = App.get_running_app()\n self.manager.client = app.session.query(Client).filter(Client.id == _id).one()\n self.manager.current = 'info'",
"def handle(self):\n try:\n peers = Peers([\n gevent.spawn(self.route.proxy_input, self.client.sock,\n self.sock, self.buf, self.extra),\n gevent.spawn(self.route.proxy_connected, self.sock, \n self.client.sock, self.extra)])\n gevent.joinall(peers.greenlets)\n finally:\n self.sock.close()",
"def session_client(session_app):\n yield Client(session_app)",
"def test_output_one_client(self):\n self.test_case = 'one_client'\n self._run_test_case()",
"def serveClient(self, client):\r\n itrans = self.inputTransportFactory.getTransport(client)\r\n otrans = self.outputTransportFactory.getTransport(client)\r\n iprot = self.inputProtocolFactory.getProtocol(itrans)\r\n oprot = self.outputProtocolFactory.getProtocol(otrans)\r\n\r\n try:\r\n while True:\r\n self.processor.process(iprot, oprot)\r\n except TTransportException, tx:\r\n pass\r\n except Exception, x:\r\n logging.exception(x)\r\n\r\n itrans.close()\r\n otrans.close()",
"def get_client(self, clientname):\n client = self.dbsession.query(Client).filter_by(clientname=clientname).all()\n if not client:\n return self.create_client({'clientname': clientname})\n else:\n return client[0]",
"def handle_client(client): # Takes client socket as argument.\n\tr_packet = client.recv(BUFSIZ).decode(\"utf8\")\n\tar_packet = r_packet\n\tr_packet = r_packet.split(\"~\")\n\n\tfor sock in clients:\n\t\tif(clients[sock] == r_packet[0]):\n\t\t\tsock.send(bytes(ar_packet,\"utf8\"))",
"def tcp_incoming_connections():\n while True:\n client, client_address = SERVER.accept()\n print(\"%s:%s has connected.\" % client_address)\n client.send(bytes(\"Greetings from the cave! Now type your name and press enter!\"))\n addresses[client] = client_address\n Thread(target=handle_client, args=(client,)).start()",
"def __init__(self, client_ident):\n\t\tthreading.Thread.__init__(self, None)\n\t\tself.client_ident\t\t= client_ident\n\t\tself.start()",
"def __accept_new_client(self):\n client_socket, client_addr = self.__server_socket.accept()\n new_client = client.Client(client_socket, client_addr)\n self.__logger.info(\"Accepted new client from {}\".format(client_addr))\n self.__connected_clients.append(new_client)",
"def remove_client(self, client):\n client_conn = self.all_clients[client]\n\n self.all_clients.pop(client)\n self.all_connections.remove(client_conn)\n\n # client_conn.shutdown(2)\n client_conn.close()",
"def release_client(client):\n client.disconnect()\n client.loop_stop()\n wait_for_disconnection(WAIT_CONNECTION_TIMEOUT)",
"def __ServiceClient(self,Client):\n\t\twhile True:\n\t\t\tDataClient = Client.recv(1024)\n\t\t\tprint(DataClient)\n\t\t\t# your source code here\n\t\t\tmessage = DataClient\n\t\t\t# data to be sent to api\n\t\t\tdata = {'message': message}\n\t\t\t# sending post request and saving response as response object\n\t\t\tr = requests.post(url = self.API_ENDPOINT, data = data)\n\t\t\t# extracting response text\n\t\t\t#pastebin_url = r.text\n\t\t\t#print(\"The pastebin URL is:%s\"%pastebin_url)",
"async def http_client(hass, hass_client_no_auth):\n await async_setup_component(hass, webhook.DOMAIN, {})\n return await hass_client_no_auth()",
"def _dispatch_from_client_request(self):\n # Listen for client connection\n self._from_client_request.listen()\n\n while not self._exit_request:\n readable, _, _ = select([self._from_client_request], [], [self._from_client_request], 0.1)\n\n if readable:\n client_conn, client_addr = readable[0].accept()\n client_conn.setblocking(False)\n\n client_name_read, _, _ = select([client_conn], [], [client_conn])\n if client_name_read:\n client_name = json.loads(client_name_read[0].recv(cfg.HEADER).decode('utf-8'))\n else:\n print(\"Connection closed\")\n continue\n\n self._thread_lock.acquire()\n self._from_client_connections[client_conn] = client_name\n self._state[client_name] = 0\n self._thread_lock.release()\n\n print(\"Receiving commands from [\" + client_name + \", \" + client_addr[0] + \", \" + str(client_addr[1]) + ']')",
"def add_client(name):\n return create_client(name)",
"async def get_client_async(\n client_id: str, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs\n):\n request = GetClient.create(\n client_id=client_id,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )",
"def __init__(self, client):\n self.client = client",
"def client(self) -> 'BaseClient':\n return self",
"def handleClientConnected(client_reader, client_writer):\n loop.TOTAL_CONNECTIONS += 1\n addr = client_writer.get_extra_info('peername')\n print(\"Connected to\", addr, \"(%s pending connections)\" % loop.TOTAL_CONNECTIONS)\n distantreader, distantwriter = yield from asyncio.open_connection(distanthost, distantport, loop=loop)\n t1 = loop.create_task(transmitData(loop, distantreader, client_writer))\n t2 = loop.create_task(transmitData(loop, client_reader, distantwriter))\n transmitted1, transmitted2 = yield from asyncio.gather(t1, t2)\n loop.TOTAL_CONNECTIONS -= 1\n print(\"End of communication to\", addr, \"(%s pending connections)\" % loop.TOTAL_CONNECTIONS, \"(Transmitted: %s/%s)\" % (transmitted1, transmitted2))",
"def set_client_id(self):\n data = self.receive() # deserialized data\n client_id = data['clientid'] # extracts client id from data\n self.client_id = client_id # sets the client id to this client\n print(\"Successfully connected to server: \" + self.userInfo['host'] + \" / \" + str(self.userInfo['port']))\n print(\"Your client info is:\\n\" + \"Client Name: \" + self.userInfo['name'] + \"\\nClient ID: \" + str(client_id))",
"def client(self) -> mqtt.Client:\n return self._client",
"def __handle_peer(self, client_sock):\n\t\tself.__debug('New child ' + str(threading.currentThread().getName()))\n\t\tself.__debug('Connected ' + str(client_sock.getpeername()))\n\n\t\tsd = Request(sock=client_sock, debug=self.debug)\n\t\ttry:\n\t\t\twhile not self.shutdown:\n\t\t\t\tmessage = sd.recvdata()\n\t\t\t\tif self.callback.has_key(message.__class__.__name__):\n\t\t\t\t\tself.callback[message.__class__.__name__]()\n\t\texcept EOFError:\n\t\t\tself.__debug('EOFError...')\n\t\tclient_sock.close()",
"def __run_client(self):\n\n self._client = CoapClient(server_hostname=self._hostname, server_port=self._port, src_port=self._src_port)\n self._client_running = True\n\n if self.use_polling:\n super(CoapSensor, self).on_start()\n else:\n self.observe_topic()",
"def manage_send_request(self, client):\n try:\n client.send()\n except Exception as err:\n # condition, when encountered, denotes socket closing\n (this_client_host, this_client_port) = client.getpeername()\n print('closing connection from {}, port {} - {}'.format(this_client_host, this_client_port, err_to_str(err)))\n client.close()\n self.clients.remove(client)\n raise",
"def index_client(indexd_client):\n return indexd_client",
"def run(self):\n\n listen_port = DEBUGGER_PORT if \"RENPY_DEBUGGER_PORT\" not in os.environ else os.environ[\"RENPY_DEBUGGER_PORT\"]\n\n server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n server.bind((\"0.0.0.0\", listen_port))\n server.listen(0)\n\n while True:\n client, client_address = server.accept()\n self.attach_one_client(client)",
"async def client_ssh_handler(process):\n log.debug(f\"clients.py:client_ssh_handler - SSH details are: {dir(process)}\")\n reader = process.stdin\n writer = process.stdout\n client_details = process.get_extra_info(\"peername\")\n addr, port, *rest = client_details\n\n connection = PlayerConnection(addr, port, \"ssh\")\n\n await register_client(connection)\n\n tasks = [\n asyncio.create_task(client_read(reader, connection), name=f\"{connection.uuid} read\"),\n asyncio.create_task(client_write(writer, connection), name=f\"{connection.uuid} write\"),\n ]\n\n asyncio.current_task().set_name(f\"{connection.uuid} handler\")\n\n # We want to .wait until the first task is completed. Completed could be an actual finishing\n # of execution or an exception. If either the read or writer \"completes\", we want to ensure\n # we move beyond this point and cleanup the tasks associated with this client.\n _, rest = await asyncio.wait(tasks, return_when=\"FIRST_COMPLETED\")\n\n await unregister_client(connection)\n\n process.close()\n process.exit(0)\n\n for task in rest:\n task.cancel()",
"def main():\n # Clear the terminal before a new run\n os.system('cls') \n\n # Create the server_socket object and bind it to the desired address\n server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n server_socket.bind(SERVER_ADDRESS)\n \n # Start listening for new connections\n server_socket.listen()\n print(f\"[LISTENING] SERVER IS NOW LISTENING FOR NEW CONNECTIONS ON {SERVER_ADDRESS}\")\n\n while True:\n # Accept a new connection\n conn, addr = server_socket.accept()\n # Start a new thread handling the new connection\n client_thread = threading.Thread(target=handle_client, args=(conn, addr))\n client_thread.start()",
"def admin_client():\n host = '127.0.0.1'\n port = 8126\n return TcpClient(host, port)",
"def _require_client(self, client):\n if client is None:\n client = self._client\n return client",
"def manage_client(client):\r\n #information about the player\r\n msg_client('Ora inserisci il tuo nome: ', client)\r\n name = client.recv(BUFSIZ)\r\n clients[client] = name\r\n \r\n init_player(client)\r\n \r\n #get player's role\r\n msg_client('Il tuo ruolo è: ' + str(roles[client]), client)\r\n msg_client('Scrivi {quit} per uscire dal gioco', client)\r\n \r\n insert_number_player(client)\r\n \r\n start_question(client)\r\n \r\n check_player_ready(client)\r\n \r\n start_game(client)\r\n \r\n search_winner()\r\n \r\n close_client(client)",
"def run(self):\n try:\n self.server = socket(AF_INET, SOCK_STREAM)\n self.server.bind(self.address)\n self.server.listen(5) # Allows up to 5 waiting clients\n\n while True:\n self.myView.updateStatus('Waiting for connection ...')\n client, address = self.server.accept()\n self.myView.updateStatus('... connected from ' + str(address))\n handler = ClientHandler(client, self.bank, self.myView)\n handler.start()\n\n except Exception as message:\n self.myView.updateStatus(message)\n self.server.close()\n self.myView.updateStatus(\"Server shutting down.\")",
"def handle(self):\n self.ip = self.client_address[0]\n self.port = self.client_address[1]\n self.connection = self.request\n #self.logged_in = False\n\n # Loop that listens for messages from the client\n while True:\n received_string = self.connection.recv(4096).strip()\n if received_string:\n jsonObject = json.loads(received_string)\n request = jsonObject.get('request')\n #print(received_string)\n #self.handle_data(received_string)\n if request == 'login':\n print 'logging in'\n self.login(jsonObject)\n elif request == 'logout':\n self.logout()\n elif request == 'msg':\n self.send(jsonObject)\n elif request == 'names':\n self.getNames()\n elif request == 'help':\n return \"geiegohruuhiegr\"\n else:\n return \"you suck\"\n\n else:\n print('The client is disconnected.')\n break \n # TODO: Add handling of received payload from client",
"def accept_incoming_connections():\n while True:\n client, client_address = SERVER.accept()\n print(\"%s:%s has connected.\" % client_address)\n #client.send(bytes(\"Greetings from the cave! Now type your name and press enter!\", \"utf8\"))\n addresses[client] = client_address\n Thread(target=handle_client, args=(client,)).start()",
"def client():\n return app.test_client()",
"def client_server(request):\n def build(handler, host=\"localhost\", port=None, *, loop=None):\n loop = loop or asyncio.get_event_loop()\n port = port or get_next_port(host)\n\n server = serve(handler, host, port, klass=WebSocket, loop=loop)\n server = loop.run_until_complete(server)\n\n client = connect(\"ws://{}:{}\".format(host, port))\n client = loop.run_until_complete(client)\n return client, server\n\n return build",
"def get(self, id: int) -> Client:\n\n return self.__clients[id]",
"def get_client_by_id(self, client_id):\r\n cursor = self.conn.cursor()\r\n cursor.execute(\"\"\"SELECT * FROM CLIENT WHERE id={}\"\"\".format(client_id))\r\n return cursor.fetchall()",
"def on_client_connect(self, client):\r\n\t\tself.connection_logger.info('Received client connection from %s:%u' % (client.address, client.port))\r\n\t\tif (self.db_connection is False):\r\n\t\t\tclient.send('A critical database error has occurred. Please reconnect later.\\n')\r\n\t\t\tclient.socket_send()\r\n\t\t\tclient.deactivate()\r\n\t\t\tclient.sock.close()\r\n\t\t\treturn\r\n\t\tclient.send(self.welcome_message_data)\r\n\t\tself.pending_connection_list.append(client)\r\n\t\tself.post_client_connect.send(sender=client)"
] | [
"0.6872704",
"0.66223794",
"0.6563431",
"0.6314243",
"0.6278947",
"0.6241105",
"0.6189508",
"0.61751866",
"0.6150069",
"0.6113049",
"0.61002946",
"0.607193",
"0.60383105",
"0.6022606",
"0.5996656",
"0.59760684",
"0.5955252",
"0.5949449",
"0.59433",
"0.5927668",
"0.591821",
"0.5888377",
"0.5887917",
"0.5866311",
"0.5853782",
"0.5853782",
"0.584101",
"0.582946",
"0.58088595",
"0.5784581",
"0.5765891",
"0.5758705",
"0.57570153",
"0.57431114",
"0.5710044",
"0.5709901",
"0.56820506",
"0.5666524",
"0.5644673",
"0.56382334",
"0.5635157",
"0.56282854",
"0.560803",
"0.5607453",
"0.56021595",
"0.5601398",
"0.5596885",
"0.55582416",
"0.55530065",
"0.5528808",
"0.5525901",
"0.5525534",
"0.551318",
"0.55131453",
"0.5509596",
"0.5501831",
"0.5500126",
"0.54957914",
"0.5478449",
"0.54784334",
"0.5475304",
"0.5470944",
"0.5468182",
"0.5467035",
"0.5462509",
"0.54522204",
"0.5445266",
"0.5429931",
"0.54271376",
"0.54193705",
"0.5415764",
"0.54114574",
"0.53909844",
"0.53876966",
"0.53873974",
"0.5383264",
"0.53807026",
"0.53785884",
"0.5374621",
"0.5373417",
"0.53697395",
"0.5361849",
"0.5352248",
"0.53428984",
"0.53423107",
"0.534218",
"0.5340237",
"0.53364134",
"0.532592",
"0.5323306",
"0.53159785",
"0.5306328",
"0.5302192",
"0.5294612",
"0.5292396",
"0.52909845",
"0.529047",
"0.52847296",
"0.52836865",
"0.52813244",
"0.5278724"
] | 0.0 | -1 |
Retrieve the list of films in which a character appears | def getFilms(character):
ret = []
for film in character.get('films'):
number = int(film.rstrip('/').rpartition('/')[2])
if number not in cache:
response = requests.get(film)
response = response.json()
title = response.get('title')
cache[number] = title
ret.append(cache.get(number))
return ret | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def castFilmography (movies, minAppearances):\n actors = {}\n for (k,v) in movies.items():\n for a in v[2:7]:\n actors[a] = actors.get(a, []) + [k]\n return sorted([ [k] + v for (k,v) in actors.items() if len(v) >= minAppearances ])",
"def get_cards(self):\n return [Flashcard.from_word(word) for word in self.get_words()]",
"def search(self):\n datas = self.cleaned_data\n films = Film.objects\n if datas['title']:\n films = films.filter(Q(title_fr__icontains=datas['title']) | Q(title_en__icontains=datas['title']))\n if datas['character']:\n films = films.filter(Q(actors__firstname__icontains=datas['character']) | Q(actors__lastname__icontains=datas['character']))\n if datas['country']:\n films = films.filter(countries__icontains=datas['country'])\n if datas['start_date']:\n films = films.filter(release_date__gte=datas['start_date'])\n if datas['end_date']:\n films = films.filter(release_date__lte=datas['end_date'])\n if datas['play']:\n films = films.filter(play_references__play=datas['play'])\n if datas['adaptation']:\n films = films.filter(play_references__type__name=datas['adaptation'])\n if datas['contributor']:\n films = films.filter(contributor=datas['contributor'])\n return films",
"def furanose_names(self):\n output = set()\n for item in self.monomers():\n if item in self.furanose_fac:\n output.add(self.furanose_fac[item][\"name\"])\n return list(output)",
"def _get_genres(self):\n separated = self.movies['genre'].apply(self.separate_genre)\n return {g: True for x in separated for g in x}.keys()",
"def search_character(realm_list, PATH):\r\n dict_char = {}\r\n for realm in realm_list:\r\n char_list = os.listdir(PATH + realm)\r\n dict_char[realm] = char_list\r\n return dict_char, realm_list",
"def filter(self, ffun):\n # BEGIN\n lst = []\n for item in WordSet(self.text).words():\n # if len(item) == len(ffun):\n # lst.append(item)\n if ffun(item) == True:\n lst.append(item)\n return lst\n\n # END",
"def get_documents():\n documents = []\n for category in movie_reviews.categories():\n for fileid in movie_reviews.fileids(category):\n documents.append((list(movie_reviews.words(fileid)), category))\n \n return documents",
"def get_imdb_list():\n list_file = 'imdb.txt'\n name_column = 26\n f = open(list_file, 'r')\n film_list = []\n pos = 0\n\n for line in f:\n pos += 1\n words = line.split()\n name = line[name_column:-1]\n # could be problematic is there are brackets in the film name\n year = name[name.find('(') + 1:name.find(')')]\n name = name.replace('(' + year + ')', '')\n film = {\n 'pos': pos,\n 'score': Decimal(words[2]),\n 'name': name.strip(),\n 'year': year\n }\n film_list.append(film)\n f.close()\n return film_list",
"def get_documents(self, value, key='name'):\n documents = []\n for doc in value:\n if doc.endswith('.json'):\n key = 'filename'\n documents.append([x for x in self.vocab if x[key] == doc])\n return documents",
"def get_bfi_list():\n list_file = 'bfi_sight_and_sound_2012.txt'\n f = open(list_file, 'r')\n film_list = []\n\n for line in f:\n words = line.split(' ')\n #NOTE: pos is not the position in the pyton list but in the original\n # list so is not always an integer due to joint places\n film = {'pos': words[0], 'name': words[1][:-1]}\n film_list.append(film)\n f.close()\n return film_list",
"def get_filenames(self):\n return [doc['filename'] for doc in self.vocab]",
"def song_by_word(ans):\r\n songs_list = \"\"\r\n ans = ans.lower()\r\n albums = simple_album_list()\r\n for album in albums:\r\n songs = simple_songs_list(album)\r\n for song in songs:\r\n song = str(song)\r\n if ans in song.lower():\r\n songs_list += song + \", \"\r\n return songs_list[:-2]",
"def getlistofpossibletitles(fileitem,shows):\n title = []\n title.append(fileitem)\n lookfor = fileitem.replace(\".\",\" \")\n title.append(lookfor)\n lookfor = fileitem.replace('-',\" \")\n title.append(lookfor)\n return title",
"def allPossibleWords(Rack):\n def checkWord(word):\n return stringInRack(word,Rack)\n return filter(checkWord, Dictionary)",
"def get_cites_species():\n mongo_db = mongo_client_db()\n cursor = mongo_db[CITES_COLLECTION].find({'full_name': {'$ne': None}}, {'full_name':1})\n return [r['full_name'].encode('utf8') for r in cursor]",
"def character_statistics(file_name):\n from operator import itemgetter\n import collections\n cnt = collections.Counter()\n\n try:\n fsock = open(file_name,'r')\n except IOError:\n print (\"The file does not exist, exiting gracefully\")\n\n for line in fsock:\n for c in line.rstrip().lower():\n if c.isalpha():\n cnt[c] += 1\n\n lessAbundant = cnt.most_common()[len(cnt)-1][1]\n #print(type(cnt.most_common()[len(cnt)-1]))\n #print(lessAbundant)\n #print (cnt.most_common()[-4:len(cnt)])\n #print (sorted(cnt.items(), key=itemgetter(1))[0])\n #print (cnt.most_common())\n\n # list comprehension\n #lessCommon = sorted([k for (k,v) in cnt.most_common() if v == lessAbundant])[0]\n # tuple unpacking, filter and map\n lessCommon = sorted(list(filter( lambda t: t[1] == lessAbundant, cnt.most_common())))[0][0]\n #lessCommon = map( lambda (keyLetter,_): keyLetter, filter( lambda (_,freqVal): freqVal == lessAbundant, cnt.most_common()) )\n #print(lessCommon)\n\n return (cnt.most_common()[0][0], lessCommon)",
"def get_intent_filers(apk):\n # FIXME : not sure this fully reproduce Koodous filters\n res = []\n filters = apk.xml['AndroidManifest.xml'].findall(\".//intent-filter\")\n for f in filters:\n for ff in f.findall('.//action'):\n filt = ff.get('{http://schemas.android.com/apk/res/android}name')\n if filt:\n res.append(filt)\n return res",
"def list_favor(self):\n if \"all\" in self.switches:\n favors = Reputation.objects.exclude(favor=0).order_by(\"-date_gossip_set\")\n self.msg(\"Characters with favor: %s\" % \", \".join(str(ob) for ob in favors))\n return\n org = self.get_organization(check_perm=False)\n favors = org.reputations.filter(Q(favor__gt=0) | Q(favor__lt=0)).order_by(\n \"-favor\"\n )\n msg = \"{wThose Favored/Disfavored by %s{n\\n\" % org\n msg += \"\\n\\n\".join(\n \"{c%s{w (%s):{n %s\" % (ob.player, ob.favor, ob.npc_gossip) for ob in favors\n )\n self.msg(msg)",
"def known(words):\n return [w for w in words if w in tokenizer.vocab] #change vocab file?",
"def specificWordList(catsString):\n cats = catsStringToArray(catsString)\n wordList = []\n for i in cats:\n for word in Word.objects.all().filter(category=i):\n wordList.append(word)\n return wordList",
"def get_oscars_best_picture_list():\n list_file = 'oscar_best_picture_list.txt'\n f = open(list_file, 'r')\n film_list = []\n\n for line in f:\n words = line.split('-')\n film = {\n 'year': words[0][:-1],\n 'name': words[1][2:-2]\n }\n film_list.append(film)\n f.close()\n # Reverse as we want newest first not last\n film_list.reverse()\n return film_list",
"def find_genre_playlists(data):\n playlists = []\n\n if data['genre']:\n playlists += data['genre']\n\n if data['comments']:\n playlists += data['comments']\n\n matches = re.findall('\\(\\s*(cover|live|unplugged|acoustic|remix|instrumental)', data['title'].lower())\n if matches:\n if 'cover' in matches:\n matches.remove('cover')\n matches += ['covers']\n\n if 'acoustic' in matches:\n matches.remove('acoustic')\n matches += ['unplugged']\n\n if 'remix' in matches:\n matches.remove('remix')\n matches += ['remix']\n\n if 'instrumental' in matches:\n matches.remove('instrumental')\n matches += ['instrumental']\n\n playlists += matches\n\n return set([x for x in playlists if x != 'none'])",
"def recognize_pic(path):\n results = recognize(path, access_token, cookie, fb_dtsg)\n\n names = [str(result['name']) for result in results]\n print ('%s contains %s' % (path, names))\n\n return {\n \"filename\": path,\n \"friends\": names\n }",
"def frequent_words(text, k):\n\n frequent_patterns = []\n freq_map = frequency_table(text, k)\n max_val = max_map(freq_map)\n for key in freq_map.keys():\n if freq_map[key] == max_val:\n frequent_patterns.append(key)\n return frequent_patterns",
"def all_facenames ( ):\n global facenames\n \n if facenames is None:\n facenames = FontEnumerator().facenames()\n facenames.sort()\n return facenames",
"def search(self, filtr):\n return [note for note in self.notes if note.match(filtr)]",
"def api_read_foundations(self):\n return [str(found.get_topmost_card()) for found in self.board.foundations]",
"def find_cliche(self,datapath,filename):\r\n data = self.common.read_csv(datapath,filename)\r\n ##speechtext = data.speechtext.str.replace(r'[^\\w\\s\\,?]','') #Removing all panctuations from speech text\r\n speechtext = data.speechtext.str.lower()\r\n\r\n #Using tf idf to find words or tokens that are less important\r\n vectorizer = TfidfVectorizer(decode_error='replace',stop_words='english',encoding='utf-8')\r\n tfidf = vectorizer.fit_transform(speechtext.apply(lambda x: np.str_(x)))\r\n\r\n terms = vectorizer.get_feature_names()\r\n sums = tfidf.sum(axis=0)\r\n data = []\r\n for col, term in enumerate(terms):\r\n data.append( (term, sums[0,col] ))\r\n\r\n ranking = pd.DataFrame(data, columns=['term','rank'])\r\n cliches = ranking.sort_values('rank', ascending=False).nlargest(25, 'rank')\r\n found_cliches = cliches.term.values\r\n #print(found_cliches)\r\n return found_cliches",
"def known(words: list[str]) -> list[str]:\n return [z for z in list(set(words)) if z in self.words]",
"def furanoses(self):\n return sorted(set([self[x.split(\"_\")[-1]][\"name\"] for x in self.furanose_fac.keys()]))",
"def return_dispense_media():\n media = {\"50_ug/ml_Kanamycin\": \"lb_miller_50ug_ml_kan\",\n \"100_ug/ml_Ampicillin\": \"lb_miller_100ug_ml_amp\",\n \"100_ug/mL_Spectinomycin\": \"lb_miller_100ug_ml_specto\",\n \"30_ug/ml_Kanamycin\": \"lb_miller_30ug_ml_kan\",\n \"15_ug/ml_Tetracycline\": \"lb_miller_15ug_ml_tet\",\n \"50_ug/ml_Kanamycin_25_ug/ml_Chloramphenicol\":\n \"lb_miller_50ug_ml_kan_25ug_ml_cm\",\n \"25_ug/ml_Chloramphenicol\": \"lb_miller_25ug_ml_cm\",\n \"LB_miller\": \"lb_miller_noAB\",\n \"TB_100_ug/ml_Ampicillin\": \"tb_100ug_ml_amp\",\n \"TB_50_ug/ml_Kanamycin\": \"tb_50ug_ml_kan\"}\n return (media)",
"def monomer_names(self):\n output = set()\n for item in self.monomers():\n if item in self.pyranose_fac:\n output.add(self.pyranose_fac[item][\"name\"])\n return list(output)",
"def build_frequency_list(name_list):\n analyzer = build_analyzer()\n char_list = []\n for name in name_list:\n char_list += analyzer(name)\n return char_list",
"def get_friends(character, _info):\n return map(get_character, character.friends)",
"def get_speakers(words):\n speakers = []\n utterances = \" \".join(words).split(\"<|endoftext|>\")[:-1]\n for u in utterances:\n s = u.split(\":\")[0].strip() # select names according to \":\"\n if s:\n speakers.append(s)\n return list(set(speakers))",
"def uniqueCastMembers (movies):\n actors = castFilmography(movies, 1)\n return sorted([ x[0] for x in actors ])\n # OR:\n # actors = set()\n # for v in movies.values():\n # actors.add(v[2:7])\n # return sorted(actors)",
"def get_female_diversity_movies_lst():\n cnx,cur = connect_to_db() #get connection with db\n cur.execute(\"SELECT movies.movie_id, movies.title as title, COUNT(IF(profile.gender='1',1,NULL)) / NULLIF(COUNT(IF(profile.gender='1',1,NULL))+COUNT(IF(profile.gender='2',1,NULL)), 0) as ratio\"\n + \" FROM movie_crew, profile, movies WHERE profile.profile_id = movie_crew.profile_id AND movie_crew.movie_id = movies.movie_id \"\n + \" GROUP BY movies.movie_id HAVING ratio >= 0 \")\n lst = cur.fetchall()\n cur.close()\n cnx.close()\n return lst",
"def collect_english_cats(self):\n tf.logging.info('collecting english categories')\n self.english_cats = list(\n self.frames(filter_english=True, filter_category=True))",
"def get_codecs_list():\n for codec in CODECS_IN_FILE.iterkeys():\n print codec",
"def containing(letter, text):\n return([word for word in text if word.count(letter) >= 1])",
"def create_vocabulary(directory, cutoff):\n\n top_level = os.listdir(directory)\n a = cutoff\n vocab = {}\n for d in top_level:\n subdir = d if d[-1] == '/' else d+'/'\n files = os.listdir(directory+subdir)\n for f in files:\n with open(directory+subdir+f,'r', encoding=\"utf-8\") as doc:\n for word in doc:\n word = word.strip()\n if not word in vocab and len(word) > 0:\n vocab[word] = 1\n elif len(word) > 0:\n vocab[word] += 1\n return sorted([word for word in vocab if vocab[word] >= cutoff])",
"def filtered_parent_freq_count(filenames, gram_size):\n counts = Counter()\n vocab = list(read_files(filenames, gram_size=gram_size))\n parent_list = parent_shared_ngrams(filenames, gram_size=gram_size)\n for _, _, speaker, ngram in vocab:\n if speaker == \"MOT\" and ngram in parent_list:\n counts[ngram] += 1\n return counts.most_common(10)",
"def get_films_in_country(film_set, country):\n film_dict = dict()\n for film in film_set:\n try:\n film_locations = film[1].split(', ')\n film_country = film_locations[-1]\n film_city = film_locations[-2]\n\n if film_country == country:\n if film_city in film_dict:\n film_dict[film_city].add(film[0])\n else:\n film_dict[film_city] = {film[0]}\n except (TypeError, IndexError, AttributeError):\n continue\n return film_dict",
"def get_set_from_search(self, word):\n found_set = set()\n found_count = 0\n # con1 = services.connect(user='sysdba', password='masterkey')\n # print(\"Security file for database is: \", con1.get_security_database_path() + \"\\n\")\n\n con = fdb.connect(\n database=self.db_filepath,\n # dsn='localhost:~/test/CGI.vvv', #localhost:3050\n user='sysdba', password='masterkey'\n #charset='UTF8' # specify a character set for the connection\n )\n\n # Create a Cursor object that operates in the context of Connection con:\n cur = con.cursor()\n\n if \"'\" in word: # we need to add an extra for SQL statements\n word = word.replace(\"'\", \"''\")\n\n SELECT = \"select * from FILES WHERE FILE_NAME LIKE '%\" + word + \".%'\" # adding period to include start of extension\n\n try:\n cur.execute(SELECT)\n for row in cur:\n print(\"found: \", row[1])\n found_set.add(row[1])\n found_count += 1\n\n print(\"found_count:\", found_count)\n con.close()\n return found_set, found_count\n\n except Exception as identifier:\n errormesg = \"Error while looking up: \" + word + \"\\n\" + str(identifier)\n print(BColors.FAIL + errormesg + BColors.ENDC)\n return found_set, found_count",
"def getFrequentPatterns(self):\n return self.finalPatterns",
"def find_frequent_words(word_frequencies, amount=50):\n alphabetically_sorted = sorted(word_frequencies.most_common(amount), key=lambda tup: tup[0])\n final_sorted = sorted(alphabetically_sorted, key=lambda tup: tup[1], reverse=True)\n list1 = [i[0] for i in final_sorted]\n\n list2 = [i[1] for i in final_sorted]\n return list1, list2",
"def listGenres(movieId):\n genres = movies.at[movieId, 'genres'] #change movies to whatever variable name the movies df has\n genres = genres.split('|')\n return genres",
"def get_movies_by_words(words):\n cnx,cur = connect_to_db()\n text = \"'\";\n for word in words:\n text += \"+\"+word+\" \"\n text += \"'\"\n cur.execute(\"SELECT * FROM movies \"\n \"Where match(overview) against(\"+text+\" IN BOOLEAN MODE) LIMIT 100 \")\n lst = cur.fetchall()\n size = len(lst)\n cur.close()\n cnx.close()\n return lst,size",
"def get_char_names(charlist, caller):\n watch_list = caller.db.watching or []\n verbose_where = False\n if caller.tags.get(\"verbose_where\"):\n verbose_where = True\n return \", \".join(\n char_name(char, verbose_where, watch_list)\n for char in charlist\n if char.player\n and (not char.player.db.hide_from_watch or caller.check_permstring(\"builders\"))\n )",
"def unicWords(self):\n words=self.buscaPalavras()\n return self.freqWords(words).keys()",
"def get_num_words_spoken_by_character_per_episode(content):\n content = list(csv.reader(content.splitlines(), delimiter=','))\n characters = [name[2] for name in content]\n characters = list(dict.fromkeys(characters))\n del characters[0]\n res = defaultdict()\n for character in characters:\n episode = 1\n dic = {}\n count = 0\n for row in content: \n if row[2] == character:\n if str(episode) == row[1]:\n count += len(row[3].split())\n else:\n dic[str(episode)] = count\n episode = int(row[1])\n count = len(row[3].split())\n if '13' not in dic.keys():\n dic['13'] = count \n dic = Counter(dic)\n res[character] = dic\n return res",
"def filtra(rut):\n caracteres = \"1234567890k\"\n rutx = \"\"\n for cambio in rut.lower():\n if cambio in caracteres:\n rutx += cambio\n return rutx",
"def collect_coexist(self):\r\n co_list = []\r\n ner_dictKeyList = list(self.ner_dict.keys())\r\n for words in self.ner_sents:\r\n co_ners = set(ner_dictKeyList).intersection(set(words))\r\n co_info = self.combination(list(co_ners))\r\n co_list += co_info\r\n if not co_list:\r\n return []\r\n return {i[0]: i[1] for i in Counter(co_list).most_common()}",
"def get_categories():\n bu = 'http://www.watchonlinemovies.com.pk'\n r = requests.get(bu, headers=mozhdr)\n if r.url != bu:\n bu = r.url\n items = {'ARecently Uploaded Movies': bu,\n 'B2018 Movies': bu + 'category/indian-movies/2018-full-movies/',\n 'C2018 English Movies': bu + 'category/hollywood-movies/2018-movies-hollywood/',\n 'D[COLOR yellow]** Search **[/COLOR]': bu + '?s=',\n 'Z[COLOR red]Note: This addon is no longer supported, please install WatchOnlineMovies-New from ReasonsRepository [/COLOR]': 'book'}\n \n return items",
"def get_keys(filen, flist): \n if (filen in flist[0]):\n key1 = 'PSTH_STIM'\n key2 = 'ELEC_'\n key3 = '_TRIAL_'\n elif (filen in flist[1]) or (filen in flist[2]):\n key1 = 'PSTH'\n key2 = ''\n key3 = '_'\n elif (filen in flist[3]) or (filen in flist[4]):\n key1 = 'Stim'\n key2 = 'Elec'\n key3 = 'Repet'\n return key1, key2, key3",
"def kmers(sequence, alphabet, k):\n mers = (''.join(c) for c in windowed(k, sequence))\n return [mer for mer in mers if all(base in set(alphabet) for base in mer)]",
"def create_medium_list(self):\n word_list = []\n try:\n f = open(self.index, 'r')\n for line in f:\n if line[0] == 'M' and line[1] == \" \" and line[2] != \" \":\n readout = line[2:].upper()\n has_digit = re.search('\\d', readout)\n # this can be added to if there are more characters that cannot be\n # used in the game\n has_wrong = re.search(\"[-,.' '/!?]\", readout)\n if has_digit is None:\n if has_wrong is None:\n word_list.append(readout.strip('\\n'))\n return word_list\n except IOError:\n print(\"Cannot open file\")\n raise (IOError)",
"def extract_frequent_words(records, num_words, no_counts=False):\r\n word_counts = FreqDist(records)\r\n frequent_words = word_counts.most_common(num_words)\r\n if no_counts:\r\n frequent_words = [word[0] for word in frequent_words]\r\n print(\"=====The {:d} Most Frequent Words=====\".format(num_words))\r\n print(frequent_words)\r\n return frequent_words",
"def search_entries(search):\n _, filenames = default_storage.listdir(\"entries\")\n result = []\n for filename in filenames: \n if filename.endswith(\".md\"):\n nameonly = re.sub(r\"\\.md$\", \"\", filename)\n \n if nameonly.lower() == search.lower():\n #print(\"name only :\", nameonly)\n #print(\"search :\", search)\n return (nameonly)\n elif search.lower() in nameonly.lower():\n result.append(nameonly)\n return(result)",
"def getVocabList():\n vocab_list = []\n with open('vocab.txt') as f_obj:\n while True:\n vocab_line = f_obj.readline()\n if not vocab_line:\n break\n word = re.search(r'\\t(\\w+)', vocab_line).group(1)\n vocab_list.append(word)\n return vocab_list",
"def read_screen_names(filename):\n flist = []\n f = open('candidates.txt')\n for line in f:\n \tflist.append(line.strip('\\n'))\t\n return flist",
"def parse_video(filename: str) -> List[CritterImage]:\n all_icons: List[CritterImage] = []\n section_count: Dict[CritterType, int] = collections.defaultdict(int)\n for critter_type, frame in _read_frames(filename):\n section_count[critter_type] += 1\n for new_icon in _parse_frame(frame):\n critter_icon = new_icon.view(CritterIcon)\n critter_icon.critter_type = critter_type\n all_icons.append(critter_icon)\n\n assert section_count[CritterType.INSECTS] != 1, \\\n 'Incomplete critter scan for INSECTS section.'\n assert section_count[CritterType.FISH] != 1, \\\n 'Incomplete critter scan for FISH section.'\n\n return _remove_blanks(all_icons)",
"def getFichas_disponibles(self):\n \n lista = copy.deepcopy(self.__fichas_disponibles)\n return lista #EJ. [\"T\", \"O\", \"P\", \"O\"]",
"def view_character_list(request):\n\n characters_data = Character.objects.values('id', 'display_name')\n\n return render_chaffers(\n request,\n 'character_list.html',\n {'character_data': [json.dumps(character_data) for character_data in characters_data]}\n )",
"def vocabulary(self):\n lst = []\n for key in self.frequencies().keys():\n lst.append(key)\n return sorted(lst)\n #for lines in self.lines:\n # line = lines.strip(os.linesep)\n # wordslst = line.split()\n # for word in wordslst:\n # if word not in lst:\n # lst.append(word.lower())\n #return sorted(lst)",
"def getPicSufChars():\n if not gVal['picSufChars']:\n gVal['picSufChars'] = genSufList()\n\n return gVal['picSufChars']",
"def Collection_search_name(C:list, name:str) -> list:\r\n restaurants = []\r\n for r in C:\r\n for dish in r.menu:\r\n if name in dish.name:\r\n restaurants.append(r)\r\n return restaurants",
"def getlistofpossibletitles(fileitem,fname):\n title = []\n oddtitles = open(\"oddtitles.txt\", 'r')\n content = oddtitles.read()\n oddtitles.close()\n\n content = content.split(\"\\n\")\n for line in content:\n elements = line.split(',')\n if fileitem in elements[0]:\n #print(elements[1])\n title.append(elements[1].title())\n\n \n title.append(fileitem)\n title.append(fileitem.title())\n lookfor = fileitem.replace(\".\",\" \")\n title.append(lookfor)\n title.append(lookfor.title())\n lookfor = fileitem.replace('-',\" \")\n title.append(lookfor)\n title.append(lookfor.title())\n with open(fname, \"r\") as dataf:\n for line in dataf:\n if lookfor.upper() in line.upper():\n line = line.replace(\"\\n\",\"\")\n title.append(line)\n title.append(line.title())\n return title",
"def get_filter_word_list(self):\n self.filter_words = self.read_word_file(self.filter_word_file)",
"def infer_emg_channels(ch_names):\n emg = ['EMG Chin']\n found = []\n\n # find frontal channel\n for ch in ch_names:\n if any([x in ch for x in emg]):\n found.append(ch)\n return found",
"def find_frequent_words(words, most_frequent): \n \n # common_words = Counter(sorted(words))\n # print common_words\n common_words = Counter(sorted(words)).most_common(most_frequent)\n print (common_words )\n most_common_words = [w for w, w_count in common_words]\n return most_common_words",
"def get_document(self, value, key='name'):\n if value.endswith('.json'):\n key = 'filename'\n return [x for x in self.vocab if x[key] == value][0]",
"def frequency_feelings(self):\n feelings = {}\n for response in self.responses:\n if response.question.text == \"In one word, how does this text make you feel?\":\n lower_case_word = response.response.lower()\n if feelings.get(lower_case_word, 0) == 0:\n feelings[lower_case_word] = 1\n else:\n feelings[lower_case_word] += 1\n\n frequent_words = [] # list of tuples in the format (frequency, word)\n for word in feelings:\n if feelings[word] > 1:\n frequent_words.append((word, feelings[word]))\n frequent_words.sort(key=lambda x: x[1], reverse=True)\n return frequent_words",
"def get_words(f: str, letters: List[str]) -> List[str]:\r\n forbidden_letters = [i for i in string.ascii_lowercase]\r\n for i in letters:\r\n try:\r\n forbidden_letters.remove(i)\r\n except:\r\n pass\r\n words_file = open(f)\r\n word_list = []\r\n letstr = \"\"\r\n for i in letters:\r\n letstr += i\r\n for word in words_file:\r\n word = word[:-1].lower()\r\n if len(word) >= 4:\r\n count = 0\r\n for let in word:\r\n if let in forbidden_letters:\r\n count += 1\r\n if word.count(let) > letstr.count(let):\r\n count += 1\r\n if letters[4] not in word:\r\n count += 1\r\n if count == 0:\r\n word_list.append(word)\r\n return word_list",
"def get_movies(genre: str):\n with MongoClient(uri) as client:\n movie_collection = client[DB][MSG_COLLECTION]\n msg_list = movie_collection.find({\"genres\": genre}).limit(100)\n movie_title_list = []\n for msg in msg_list:\n movie_title_list.append(msg[\"title\"])\n return movie_title_list",
"def get_verbs(self) -> Set[str]:",
"def getnames(f):\n # Assumes file is sorted with girl names first, boy names second, and the\n # most popular name at the top of each list.\n\n lineoftext = f.readline()\n girlname,sex,count = processline(lineoftext)\n\n while sex != \"M\":\n name,sex,count = processline(f.readline())\n boyname=name\n\n return girlname,boyname",
"def get_people(self, letter = None):\n if letter:\n people = Person.objects.filter(member_of__entity__in = self.get_descendants(include_self = True), surname__istartswith = letter).distinct().order_by('surname', 'given_name', 'middle_names')\n else: \n people = Person.objects.filter(member_of__entity__in = self.get_descendants(include_self = True)).distinct().order_by('surname', 'given_name', 'middle_names')\n return people",
"def filter_matches(filename, e):\n transcription_factors = []\n with open(filename) as f:\n for record in NCBIXML.parse(f):\n best = e\n if record.alignments:\n for alignment in record.alignments:\n for hsp in alignment.hsps:\n if hsp.expect < best:\n best = hsp.expect\n\n if best < e:\n iden = record.query.split(\" \")[0]\n locus = iden.split(\"|\")[1]\n transcription_factors.append(locus)\n\n return transcription_factors",
"def getMatchingMotifs(fname_test):\n global gTrie\n res = []\n\n with open(fname_test) as f:\n motifs = f.readlines() \n for m in motifs[1:]:\n m = m.split(',')[0]\n if gTrie.has_key(m):\n res.append(m)\n #print(m)\n return res",
"def get_characters_mapping(X, f=None):\n f = f or (lambda x: x)\n \n vocab = {\n '<pad>': 0,\n '<unk>': 1,\n }\n for sentence in X:\n for word in sentence:\n for letter in f(word):\n if letter not in vocab:\n vocab[letter] = len(vocab)\n return vocab",
"def build_list(self, word_list):\n # Get frequency list for keys\n freq = word_list.groupby('key').agg('count')\n # Filter out only keys with greater or equal frequency to length\n key_list = freq.loc[freq['word'] >= freq.index.str.len()]\n return key_list",
"def names(filter=None):",
"def most_similar_actors(self, moviename):\n movieid = util.get_movie_id(moviename)\n movie_movie_dict = self.get_movie_movie_vector(moviename)\n if movie_movie_dict == None:\n return None\n actors = []\n for (movie,val) in movie_movie_dict:\n if val <= 0:\n break\n movieid = util.get_movie_id(movie)\n actors = actors + self.get_actors_of_movie(movie)\n if len(actors) >= 10:\n break\n\n actors_of_given_movie = self.get_actors_of_movie(moviename)\n\n actorsFinal = [x for x in actors if x not in actors_of_given_movie]\n\n actornames = []\n for actorid in actorsFinal:\n actor = util.get_actor_name_for_id(actorid)\n actornames.append(actor)\n\n return actornames",
"def like(self, cname: str, mx: int = None)->list:\n res = self.data[self.data['cname'].str.contains(cname)].to_dict(orient='record')\n if not res: return []\n if (mx is not None) and (len(res) > mx):\n assert mx > 0, \"CountryCodes().like: `mx` argument must be positive or `None`.\"\n return res[:mx]\n return res",
"def consensus(self):\n consensus = []\n for pos in range(self.cols):\n matches = []\n for letter in Motif.getAlphabet(self).getSymbols():\n p = self.counts[pos].getFreq(letter)\n if p > 0:\n matches += letter\n consensus.append(matches)\n return consensus",
"def infer_eog_channels(ch_names):\n \n eog = ['EOG ROC', 'EOG LOC']\n found = []\n\n # find frontal channel\n for ch in ch_names:\n if any([x in ch for x in eog]):\n found.append(ch)\n return found",
"def find_oneMers(spectrum): \n candidates = list('_'*len(spectrum))\n for i in range(len(spectrum)):\n if spectrum[i] in reversed_map:\n candidates[i] = reversed_map[spectrum[i]]\n return [cantdidate for cantdidate in candidates if cantdidate != '_' ]",
"def getEpCast(imdbLink, dicChars):\n\n dicEpCast = dicChars.copy()\n\n urlIDMB = requests.get(imdbLink + \"fullcredits\").text\n soup = BeautifulSoup(urlIDMB, 'lxml')\n seriesTable = soup.find('table', {'class': 'cast_list'}).find_all('tr')\n\n for char in seriesTable:\n charInfo = char.find_all('td')\n if len(charInfo) == 4:\n actorName = charInfo[1].text.strip()\n\n key = normalizeName(actorName)\n\n if key in dicEpCast:\n dicEpCast[key] = '1'\n\n return \",\".join(x for x in dicEpCast.values())",
"def show_magicians(magicians):\n for magician in magicians:\n print(magician.title())",
"def get_words(file_name, letters):\r\n with open(file_name, encoding = 'utf-8') as file:\r\n correct_dict = {\"/n\":\"noun\", \"noun\":\"noun\", \"/v\":\"verb\", \"verb\":\"verb\",\r\n \"/adj\":\"adjective\", \"adj\":\"adjective\", \"adv\":\"adverb\"}\r\n word_list = []\r\n for line in file:\r\n for key, value in correct_dict.items():\r\n word = line.split()[0]\r\n if key in line and len(word)<=5:\r\n if word[0] in letters and word not in word_list:\r\n word_list.append((word, value))\r\n break\r\n return word_list",
"def get_card_sets(self, name: str) -> List:",
"def known(words):\r\n return set(w for w in words if w in WORDS)",
"def get_keywords_for_movie(url):\n pass",
"def show_magicians(magician_names):\r\n for magician in magicians:\r\n print(magician.title())",
"def find_all_ORFs_oneframe(dna):",
"def collect_frequencies(nameoffile):\n with open(nameoffile) as text:\n list_of_words = []\n for line in text:\n words = line.split()\n list_of_words = list_of_words + words\n list_of_words = [word.lower() for word in list_of_words]\n\n dict = Counter(list_of_words)\n print(dict)\n return dict",
"def common_words_min(filename, min_chars):\n wordPattern = re.compile('[a-zA-Z]{' + str(min_chars) + ',}')\n occurance = dict()\n with open(filename, 'r') as f:\n contents = f.read()\n words = wordPattern.finditer(contents)\n for wordMatch in words:\n word = wordMatch.group(0).lower()\n if word in occurance:\n occurance[word] += 1\n else:\n occurance[word] = 1\n return sorted(occurance, key=occurance.get, reverse=True)",
"def speaker_vocab(filenames, target_speaker):\n return unique_ngrams(filenames, target_speaker, gram_size=1)"
] | [
"0.59737927",
"0.55234385",
"0.545212",
"0.54288036",
"0.5389295",
"0.5367945",
"0.5315806",
"0.53116363",
"0.53098303",
"0.52998656",
"0.52903426",
"0.5223463",
"0.521527",
"0.5212591",
"0.5199618",
"0.5162614",
"0.5160674",
"0.51528895",
"0.5130299",
"0.5123009",
"0.5109176",
"0.5090501",
"0.5086243",
"0.5060003",
"0.5059296",
"0.50484776",
"0.5030889",
"0.5025591",
"0.5011109",
"0.49966523",
"0.49913737",
"0.49907413",
"0.49881384",
"0.49602115",
"0.4955032",
"0.4946794",
"0.4939935",
"0.4921598",
"0.49196056",
"0.49165303",
"0.48964295",
"0.48900768",
"0.48846164",
"0.48830518",
"0.488217",
"0.4876806",
"0.4875277",
"0.48664778",
"0.48636985",
"0.4854122",
"0.48533797",
"0.48403955",
"0.48336327",
"0.48328227",
"0.4817157",
"0.48144528",
"0.48090917",
"0.4806194",
"0.48058477",
"0.48024148",
"0.48016188",
"0.4800246",
"0.4799632",
"0.4796633",
"0.4793502",
"0.4792842",
"0.47896618",
"0.47865313",
"0.47825068",
"0.4774086",
"0.47712073",
"0.47708505",
"0.4768948",
"0.47652707",
"0.47635233",
"0.4755495",
"0.47547555",
"0.4750142",
"0.47398067",
"0.4738427",
"0.47330937",
"0.47266635",
"0.4715366",
"0.47073668",
"0.47070432",
"0.47040504",
"0.47027704",
"0.470261",
"0.470251",
"0.47009224",
"0.47003597",
"0.4698309",
"0.46945998",
"0.4691067",
"0.46850508",
"0.46849257",
"0.46824738",
"0.4678478",
"0.46717995",
"0.46710932"
] | 0.6638027 | 0 |
set requires_grad to 'true' for all paramters, but save original values to resotre them later | def _get_gradients(self, batch):
embedding_gradients = []
original_param_name_to_requires_grad_dict = {}
for param_name, param in self.model.named_parameters():
original_param_name_to_requires_grad_dict[param_name] = param.requires_grad
param.requires_grad = True
hooks = self._register_embedding_gradient_hooks(embedding_gradients)
loss = self.forward_step(batch)
self.model.zero_grad()
loss.backward()
for hook in hooks:
hook.remove()
# restore the original requires_grad values of the parameters
for param_name, param in self.model.named_parameters():
param.requires_grad = original_param_name_to_requires_grad_dict[param_name]
return embedding_gradients[0] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_requires_grad(self, requires_grad):\n for parameter in self.parameters():\n parameter.requires_grad = requires_grad",
"def set_requires_grad(self, requires_grad):\n for parameter in self.parameters():\n parameter.requires_grad = requires_grad",
"def freeze_params(m):\r\n for p in m.parameters():\r\n p.requires_grad = False",
"def freeze_params(m):\n for p in m.parameters():\n p.requires_grad = False",
"def freeze(self) -> None:\n self._set_requires_grad(False)\n for param in self.model.fc.parameters():\n param.requires_grad = True",
"def set_parameter_requires_grad(model, feature_extract):\n if feature_extract:\n for param in model.parameters():\n param.requires_grad = False",
"def __freeze(self):\r\n features_layer = self._model._net\r\n for param in features_layer.parameters():\r\n param.requires_grad = False",
"def freeze_params(model: nn.Module):\n for par in model.parameters():\n par.requires_grad = False",
"def train_all(self):\n for p in self.parameters():\n p.requires_grad = True\n return self",
"def freeze(self):\n # Freeze.\n self.frozen = True\n for param in self.parameters():\n param.requires_grad = False",
"def freeze_params(module: nn.Module):\n for _, p in module.named_parameters():\n p.requires_grad = False",
"def freeze_parameters(module: nn.Module):\n for p in module.parameters():\n p.requires_grad = False",
"def fine_tune(self):\n for params in self.encoder.parameters():\n params.requires_grad = self.if_fine_tune",
"def requires_grad(self):\n self._activations.requires_grad = True",
"def freeze_model(model):\n for param in model.parameters():\n param.requires_grad = False",
"def set_optimizer_params_grad(named_params_optimizer, named_params_model, test_nan=False):\n is_nan = False\n for (name_opti, param_opti), (name_model, param_model) in zip(named_params_optimizer, named_params_model):\n if name_opti != name_model:\n warnings.warn(\"name_opti != name_model: {} {}\".format(name_opti, name_model))\n raise ValueError\n if param_model.grad is not None:\n if test_nan and torch.isnan(param_model.grad).sum() > 0:\n is_nan = True\n if param_opti.grad is None:\n param_opti.grad = torch.nn.Parameter(param_opti.data.new().resize_(*param_opti.data.size()))\n param_opti.grad.data.copy_(param_model.grad.data)\n else:\n param_opti.grad = None\n return is_nan",
"def set_optimizer_params_grad(named_params_optimizer, named_params_model, test_nan=False):\n is_nan = False\n for (name_opti, param_opti), (name_model, param_model) in zip(named_params_optimizer, named_params_model):\n if name_opti != name_model:\n logger.error(\"name_opti != name_model: {} {}\".format(name_opti, name_model))\n raise ValueError\n if param_model.grad is not None:\n if test_nan and torch.isnan(param_model.grad).sum() > 0:\n is_nan = True\n if param_opti.grad is None:\n param_opti.grad = torch.nn.Parameter(param_opti.data.new().resize_(*param_opti.data.size()))\n param_opti.grad.data.copy_(param_model.grad.data)\n else:\n param_opti.grad = None\n return is_nan",
"def set_optimizer_params_grad(named_params_optimizer, named_params_model, test_nan=False):\n is_nan = False\n for (name_opti, param_opti), (name_model, param_model) in zip(named_params_optimizer, named_params_model):\n if name_opti != name_model:\n logger.error(\"name_opti != name_model: {} {}\".format(name_opti, name_model))\n raise ValueError\n if param_model.grad is not None:\n if test_nan and torch.isnan(param_model.grad).sum() > 0:\n is_nan = True\n if param_opti.grad is None:\n param_opti.grad = torch.nn.Parameter(param_opti.data.new().resize_(*param_opti.data.size()))\n param_opti.grad.data.copy_(param_model.grad.data)\n else:\n param_opti.grad = None\n return is_nan",
"def set_requires_grad(self, requires_grad=False):\n for name in self.network_names:\n if isinstance(name, str):\n net = getattr(self, 'net')\n for param in net.parameters():\n param.requires_grad = requires_grad",
"def zero_grad(self):\r\n for param in self.params:\r\n param.grad = None",
"def train_last(self):\n for p in self.parameters():\n p.requires_grad = False\n for p in self.aligners[-1].parameters():\n p.requires_grad = True\n return self",
"def _update_parameters(self, loss):\n self.optimizer.zero_grad()\n loss.backward()\n self.optimizer.step()",
"def freeze(self):\n self.collect_params().setattr('grad_req', 'null')",
"def set_requires_grad(networks, requires_grad=False):\n for network in networks:\n for param in network.parameters():\n param.requires_grad = requires_grad",
"def params_with_grad(self) -> List[Parameter]:\n return [p for p in self.parameters() if p.grad is not None]",
"def update_params(self):\n if self.clip > 0:\n torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.clip)\n self.optimizer.step()",
"def update_parameters(parameters, grads, learning_rate):\n pass",
"def __init__(self):\n self.grad = 0.0",
"def test_param_to_gradient(self):\n pass",
"def optimize_parameters(self):\n self.loss_total.backward() # calculate gradients\n self.optimizer.step()\n self.optimizer.zero_grad()\n torch.cuda.empty_cache()",
"def overwrite_grad(pp, new_grad, grad_dims):\n cnt = 0\n for param in pp():\n param.grad=torch.zeros_like(param.data)\n beg = 0 if cnt == 0 else sum(grad_dims[:cnt])\n en = sum(grad_dims[:cnt + 1])\n this_grad = new_grad[beg: en].contiguous().view(\n param.data.size())\n param.grad.data.copy_(this_grad)\n cnt += 1",
"def buildGrad(self):\r\n\r\n self.grads = T.grad(self.L_elbo_modif, self.params)",
"def _UpdateGradient(self):\n self.mol.GetGradient('analytic')",
"def params_with_grad(self) -> List[Parameter]:\n return [p for p in self.parameters() if p.grad is not None]",
"def update_params(self, loss, step_size=0.5, first_order=False):\n #grads = torch.autograd.grad(loss, self.parameters(),\n # create_graph=not first_order)\n self.optim.zero_grad()\n loss.backward()\n nn.utils.clip_grad_norm(self.parameters(), self.grad_clip_norm)\n self.optim.step()\n #updated_params = OrderedDict()\n #self.relation_emb.zero_grad()\n #self.entity_emb.zero_grad()\n #for (name, param), grad in zip(self.named_parameters(), grads):\n '''\n for (name, param) in self.named_parameters():\n updated_params[name] = param.clone()\n if param.grad is not None:\n updated_params[name] -= step_size * param.grad\n\n return updated_params\n '''",
"def freeze_layers(model: torch.nn.Module) -> None:\n for param in model.parameters():\n param.requires_grad = False",
"def set_requires_grad(nets, requires_grad=False):\n if not isinstance(nets, list):\n nets = [nets]\n for net in nets:\n if net is not None:\n for param in net.parameters():\n param.requires_grad = requires_grad",
"def set_requires_grad(nets, requires_grad=False):\n if not isinstance(nets, list):\n nets = [nets]\n for net in nets:\n if net is not None:\n for param in net.parameters():\n param.requires_grad = requires_grad",
"def set_requires_grad(nets, requires_grad=False):\n if not isinstance(nets, list):\n nets = [nets]\n for net in nets:\n if net is not None:\n for param in net.parameters():\n param.requires_grad = requires_grad",
"def update_params(self, learning_rate):\n\t\t#######################################################################\n\t\t# ** START OF YOUR CODE **\n\t\t#######################################################################\n\t\tself._W = self._W - learning_rate * self._grad_W_current\n\t\tself._b = self._b - learning_rate * self._grad_b_current\n\t\t#######################################################################\n\t\t# ** END OF YOUR CODE **\n\t\t#######################################################################",
"def _propagate_param_grad(self, parray, garray):\n if self.param_array.size != self.size:\n self._param_array_ = np.empty(self.size, dtype=np.float64)\n if self.gradient.size != self.size:\n self._gradient_array_ = np.empty(self.size, dtype=np.float64)\n\n pi_old_size = 0\n for pi in self.parameters:\n pislice = slice(pi_old_size, pi_old_size + pi.size)\n\n self.param_array[pislice] = pi.param_array.flat # , requirements=['C', 'W']).flat\n self.gradient_full[pislice] = pi.gradient_full.flat # , requirements=['C', 'W']).flat\n\n pi.param_array.data = parray[pislice].data\n pi.gradient_full.data = garray[pislice].data\n\n pi._propagate_param_grad(parray[pislice], garray[pislice])\n pi_old_size += pi.size",
"def updateParams(self,gradients):\n for i in xrange(len(self.params)):\n self.params[i].set_value(self.params[i].get_value()-gradients[i]/(1/self.learning_rate+self.iterations))",
"def set_requires_grad(self, nets, requires_grad=False):\n if not isinstance(nets, list):\n nets = [nets]\n for net in nets:\n if net is not None:\n for param in net.parameters():\n param.requires_grad = requires_grad",
"def set_requires_grad(self, nets, requires_grad=False):\n if not isinstance(nets, list):\n nets = [nets]\n for net in nets:\n if net is not None:\n for param in net.parameters():\n param.requires_grad = requires_grad",
"def set_requires_grad(self, nets, requires_grad=False):\n if not isinstance(nets, list):\n nets = [nets]\n for net in nets:\n if net is not None:\n for param in net.parameters():\n param.requires_grad = requires_grad",
"def set_requires_grad(self, nets, requires_grad=False):\n if not isinstance(nets, list):\n nets = [nets]\n for net in nets:\n if net is not None:\n for param in net.parameters():\n param.requires_grad = requires_grad",
"def sgd(params, grads, lr, batch_size): #@save\n for param, grad in zip(params, grads):\n param.assign_sub(lr*grad/batch_size)",
"def freeze_img_branch_params(self):\n if self.with_img_bbox_head:\n for param in self.img_bbox_head.parameters():\n param.requires_grad = False\n if self.with_img_backbone:\n for param in self.img_backbone.parameters():\n param.requires_grad = False\n if self.with_img_neck:\n for param in self.img_neck.parameters():\n param.requires_grad = False\n if self.with_img_rpn:\n for param in self.img_rpn_head.parameters():\n param.requires_grad = False\n if self.with_img_roi_head:\n for param in self.img_roi_head.parameters():\n param.requires_grad = False",
"def update_param(self, lr):\n\n\n self.W=self.W-lr*self.W_grad\n self.b = self.b - lr*self.b_grad",
"def set_requires_grad(self, nets, requires_grad=False):\r\n if not isinstance(nets, list):\r\n nets = [nets]\r\n for net in nets:\r\n if net is not None:\r\n for param in net.parameters():\r\n param.requires_grad = requires_grad",
"def update_params(self, loss, step_size=0.5, first_order=False):\n grads = torch.autograd.grad(loss, self.parameters(),\n create_graph=not first_order)\n updated_params = OrderedDict()\n for (name, param), grad in zip(self.named_parameters(), grads):\n updated_params[name] = param - step_size * grad\n\n return updated_params",
"def test_grad_writeback(self):\n self.run_subtests(\n {\n \"change_first_weight_grad\": [False, True],\n \"change_data\": [False, True], # change `.data` vs. variable itself\n \"set_to_none\": [False, True],\n },\n self._test_grad_writeback,\n )",
"def set_requires_grad(nets: Union[nn.Module, List[nn.Module]],\n requires_grad: bool = False) -> None:\n if not isinstance(nets, list):\n nets = [nets]\n for net in nets:\n if net is not None:\n for param in net.parameters():\n param.requires_grad = requires_grad",
"def _finalize_params(fsdp_module: FullyShardedDataParallel) -> None:\n for handle in fsdp_module._handles:\n p = handle.flat_param\n if p.requires_grad:\n if hasattr(p, \"_post_backward_hook_state\"):\n p_assert(\n len(p._post_backward_hook_state) == 2, # type: ignore[attr-defined]\n \"p._post_backward_hook_state fields are not valid.\"\n )\n p._post_backward_hook_state[1].remove() # type: ignore[attr-defined]\n delattr(p, \"_post_backward_hook_state\")\n # Preserve the gradient accumulation state if not\n # synchronizing: `p.grad` remains the unsharded gradient\n # accumulated from prior `no_sync()` iterations, and\n # `p._saved_grad_shard` remains the sharded gradient from\n # the last synchronized iteration\n if not self._sync_gradients:\n continue\n # Set `p.grad` as needed to ensure optimizer correctness\n # since optimizers operate on the `grad` attribute\n if hasattr(p, \"_cpu_grad\"):\n p_assert(\n p.device == torch.device(\"cpu\"),\n f\"Device mismatch: p={p.device} \" # type: ignore[attr-defined]\n f\"p._cpu_grad={p._cpu_grad}\"\n )\n p.grad = p._cpu_grad # type: ignore[attr-defined]\n elif hasattr(p, \"_saved_grad_shard\"):\n p_assert(\n p.device == p._saved_grad_shard.device, # type: ignore[attr-defined]\n f\"Device mismatch: p={p.device} \" # type: ignore[attr-defined]\n f\"p._saved_grad_shard={p._saved_grad_shard.device}\"\n )\n # Check if post-backward was called for this param (FSDP unit).\n # TODO: This logic will have to be revisited when non-recursive wrapping\n # lands. If it was not called, there is no new gradient to accumulate\n if p._post_backward_called:\n p.grad = p._saved_grad_shard\n if fsdp_module._mixed_precision_keep_low_precision_grads():\n p.grad.data = p.grad.to(\n fsdp_module.mixed_precision.param_dtype\n )\n else:\n p_assert(\n not handle.uses_sharded_strategy or not p._post_backward_called,\n \"All sharded parameters that received a gradient \"\n \"should use `_saved_grad_shard`\"\n )\n if hasattr(p, \"_saved_grad_shard\"):\n delattr(p, \"_saved_grad_shard\")\n\n p_assert(\n hasattr(p, '_post_backward_called'),\n \"Expected flag _post_backward_called to be set on param.\"\n )\n # Reset _post_backward_called in preparation for the next iteration.\n p._post_backward_called = False",
"def overwrite_grad(pp, newgrad, grad_dims):\n cnt = 0\n for param in pp():\n if param.grad is not None:\n beg = 0 if cnt == 0 else sum(grad_dims[:cnt])\n en = sum(grad_dims[:cnt + 1])\n this_grad = newgrad[beg: en].contiguous().view(\n param.grad.data.size())\n param.grad.data.copy_(this_grad)\n cnt += 1",
"def optimize_parameters(self) -> None:\n self.forward() # compute fake images: G(A)\n # update discriminator\n self.set_requires_grad([self._discriminator_module], True) # enable backward for D\n self._discriminator_optimizer.zero_grad() # set D's gradients to zero\n self.backward_discriminator() # calculate gradients for D\n self._discriminator_optimizer.step() # update D's weights\n # update generator\n self.set_requires_grad([self._discriminator_module], False) # D requires no gradients when optimizing G\n self._generator_optimizer.zero_grad() # set G's gradients to zero\n self.backward_generator() # calculate gradients for G\n self._generator_optimizer.step() # update G's weights\n return",
"def defreeze_model(self):\n # defreeze all parameters\n for param in self.parameters():\n param.requires_grad = True\n # make the whole network trainable\n self.train()",
"def _set_trainable(self, disc=False):\n def set_requires_grad(m, rg):\n for p in m.parameters(): p.requires_grad_(rg)\n set_requires_grad(self.learn.model.G_A, not disc)\n set_requires_grad(self.learn.model.G_B, not disc)\n set_requires_grad(self.learn.model.D_A, disc)\n set_requires_grad(self.learn.model.D_B, disc)\n if disc: self.opt_D.hypers = self.learn.opt.hypers\n self.gen_mode = not disc",
"def handle_gradient(self):\n self._optimizer.sync_grad()",
"def put_trainable_parameters(net,X):\n trainable=filter(lambda p: p.requires_grad, net.parameters())\n paramlist=list(trainable)\n offset=0\n for params in paramlist:\n numel=params.numel()\n with torch.no_grad():\n params.data.copy_(X[offset:offset+numel].data.view_as(params.data))\n offset+=numel",
"def set_requires_grad(nets: Union[torch.nn.Module, List[torch.nn.Module]], requires_grad: bool = False) -> None:\n if not isinstance(nets, list):\n nets = [nets]\n for net in nets:\n if net is not None:\n for param in net.parameters():\n param.requires_grad = requires_grad\n return",
"def freeze(net):\n for p in net.parameters():\n p.requires_grad_(False)\n return net",
"def add_grad_updates(self):\n \n gradients = T.grad(self.cost, self.theta)\n \n for target_param, grad in zip(self.theta, gradients):\n \n if target_param.name ==\"W\" and self.num_hidden ==0\\\n and self.zero_diag:\n \n grad = grad - T.diag(T.diag(grad)) # no x i - xi connections\n # for all i = 1, ..., D\n ##############################################################\n if target_param.name ==\"b\" and self.learn_biases == False:\n print(\"Will not learn bias terms\")\n pass\n \n elif target_param.name ==\"bhid\" and self.learn_biases == False:\n print(\"Will not learn bias terms\")\n pass\n \n else:\n \n if self.use_momentum:\n \n # alternative definition (mostly seen):\n #g_tilda = self.momentum*self.grad_vec[target_param.name] - \\\n #T.cast(self.learning_rate, dtype = theano.config.floatX)*grad\n #self.updates[target_param] = target_param + g_tilda\n \n g_tilda = self.momentum*self.grad_vec[target_param.name] - \\\n (1-self.momentum)*grad\n \n self.updates[target_param] = target_param +\\\n T.cast(self.learning_rate, dtype = theano.config.floatX)*g_tilda\n \n # store g_tilda for next iteration:\n self.updates[self.grad_vec[target_param.name]] = g_tilda\n \n else:\n \n self.updates[target_param] = target_param -\\\n T.cast(self.learning_rate, dtype = theano.config.floatX)*grad\n \n if (\"PCD\" in self.algorithm) and self.num_hidden > 0:\n \n self.updates[self.persistent_gibbs] = self.hid_samples",
"def get_params_grad(self, X, output_grad):\n return []",
"def reset_parameters(self, initializer=torch.nn.init.normal_):\n for p in self.parameters():\n if p.requires_grad:\n initializer(p)\n return self",
"def set_feedback_requires_grad(self, value):\n if not isinstance(value, bool):\n raise TypeError('The given value should be a boolean.')\n self._feedbackweights.requires_grad = value\n if self._feedbackbias is not None:\n self._feedbackbias.requires_grad = value",
"def zero_grad(params):\n if isinstance(params, dict):\n params = params.values()\n for param in params:\n if param.requires_grad and param.grad is not None:\n param.grad.zero_()",
"def step(self):\n if self.defaults['max_grad_norm'] > 0:\n device = self.param_groups[0]['params'][0].device\n global_grad_norm = torch.zeros(1, device=device)\n\n max_grad_norm = torch.tensor(self.defaults['max_grad_norm'], device=device)\n for group in self.param_groups:\n\n for p in group['params']:\n if p.grad is not None:\n grad = p.grad\n global_grad_norm.add_(grad.pow(2).sum())\n\n global_grad_norm = torch.sqrt(global_grad_norm)\n\n clip_global_grad_norm = torch.clamp(max_grad_norm / (global_grad_norm + group['eps']), max=1.0)\n else:\n clip_global_grad_norm = 1.0\n\n for group in self.param_groups:\n beta1, beta2, beta3 = group['betas']\n # assume same step across group now to simplify things\n # per parameter step can be easily support by making it tensor, or pass list into kernel\n if 'step' in group:\n group['step'] += 1\n else:\n group['step'] = 1\n\n bias_correction1 = 1.0 - beta1 ** group['step']\n\n bias_correction2 = 1.0 - beta2 ** group['step']\n\n bias_correction3 = 1.0 - beta3 ** group['step']\n\n for p in group['params']:\n if p.grad is None:\n continue\n\n state = self.state[p]\n if len(state) == 0:\n state['exp_avg'] = torch.zeros_like(p)\n state['exp_avg_sq'] = torch.zeros_like(p)\n state['exp_avg_diff'] = torch.zeros_like(p)\n\n grad = p.grad.mul_(clip_global_grad_norm)\n if 'pre_grad' not in state or group['step'] == 1:\n state['pre_grad'] = grad\n\n copy_grad = grad.clone()\n\n exp_avg, exp_avg_sq, exp_avg_diff = state['exp_avg'], state['exp_avg_sq'], state['exp_avg_diff']\n diff = grad - state['pre_grad']\n\n update = grad + beta2 * diff\n exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1) # m_t\n exp_avg_diff.mul_(beta2).add_(diff, alpha=1 - beta2) # diff_t\n exp_avg_sq.mul_(beta3).addcmul_(update, update, value=1 - beta3) # n_t\n\n denom = ((exp_avg_sq).sqrt() / math.sqrt(bias_correction3)).add_(group['eps'])\n update = ((exp_avg / bias_correction1 + beta2 * exp_avg_diff / bias_correction2)).div_(denom)\n\n if group['no_prox']:\n p.data.mul_(1 - group['lr'] * group['weight_decay'])\n p.add_(update, alpha=-group['lr'])\n else:\n p.add_(update, alpha=-group['lr'])\n p.data.div_(1 + group['lr'] * group['weight_decay'])\n\n state['pre_grad'] = copy_grad",
"def set_grad_flag(self, flag):\n self._executor.set_grad_flag(flag)",
"def zero_grad(self):\n for parameter in self.param():\n parameter.zero_grad()",
"def freeze_until(model, param_name):\n found_name = False\n for name, params in model.named_parameters():\n if name == param_name:\n found_name = True\n params.requires_grad = found_name",
"def apply_grad(self, grad, args):\n args_new = list(args)\n\n if self.accumulation is None:\n self.accumulation = [0.0] * len(args)\n\n trained_index = 0\n for index, arg in enumerate(args):\n if getattr(arg, \"requires_grad\", False):\n self._update_accumulation(index, grad[trained_index])\n\n coeff = self.stepsize / sqrt(self.accumulation[index] + self.eps)\n args_new[index] = arg - coeff * grad[trained_index]\n\n trained_index += 1\n\n return args_new",
"def trainable_params(model, feature_extract):\n params_to_update = model.parameters()\n print(\"Params to learn:\")\n if feature_extract:\n params_to_update = []\n for name, param in model.named_parameters():\n if param.requires_grad == True:\n params_to_update.append(param)\n print(\"\\t\", name)\n else:\n for name, param in model.named_parameters():\n if param.requires_grad == True:\n print(\"\\t\", name)\n return params_to_update",
"def set_module_trainable(module: nn.Module, mode: bool) -> None:\n for param in module.parameters():\n param.requires_grad = mode",
"def parameters(self, requires_grad_only=True):\n filter_cond = lambda param: param.requires_grad if requires_grad_only else True\n return (param for param in super().parameters() if filter_cond(param))",
"def trainable_parameters(net):\n return [p for p in net.parameters() if p.requires_grad]",
"def unfreeze_layers(model: torch.nn.Module) -> None:\n for param in model.parameters():\n param.requires_grad = True",
"def add_grad(pp, new_grad, grad_dims):\n cnt = 0\n for param in pp():\n #param.grad=torch.zeros_like(param.data)\n beg = 0 if cnt == 0 else sum(grad_dims[:cnt])\n en = sum(grad_dims[:cnt + 1])\n this_grad = new_grad[beg: en].contiguous().view(\n param.data.size())\n\n param.grad.data.add_(this_grad)\n cnt += 1",
"def __prepare_parameter__(self, in_args):\n if self.__use_remote_sparse_updater__():\n self.__gradient_machine__.prefetch(in_args)\n self.__parameter_updater__.getParametersRemote()",
"def __init__(self, params: Iterable[nn.Parameter]):\n self.params = params\n self.param_states = [p.requires_grad for p in self.params]",
"def compute_grad(*, model: nn.Module, loss: Tensor) -> None:\n grad_list = torch.autograd.grad(loss, tuple(model.parameters()), retain_graph=True)\n\n for param, grad in zip(model.parameters(), grad_list):\n param.grad = grad",
"def optimize_parameters(self):\r\n # forward\r\n self.forward() # compute fake image/video and reconstruction image/video\r\n\r\n # D_A\r\n self.set_requires_grad([self.D_V], True)\r\n self.set_requires_grad([self.G_t, self.G_u, self.Att, self.classifier], False)\r\n self.optimizer_D.zero_grad() # set D_V's gradients to zero\r\n self.backward_D_V() # calculate graidents for D_V\r\n self.optimizer_D.step() # update D_A's weights\r\n\r\n # G_A and G_B\r\n self.set_requires_grad([self.D_V], False) # Ds require no gradients when optimizing Gs\r\n self.set_requires_grad([self.G_t, self.G_u, self.Att, self.classifier], True)\r\n self.optimizer_G.zero_grad() # set G_t,G_u,Att,classifier's gradients to zero\r\n self.backward_G() # calculate gradients for G_A and G_B\r\n self.optimizer_G.step() # update G_A and G_B's weights\r",
"def save_and_step(self):\n self.last_grads = [param.grad for param in self.agent.model.parameters()]\n self.optimizer.pytorch_step()",
"def __init__(self, requires_grad=False):\n self.requires_grad = requires_grad # whether tensors needs gradient\n self._reset_gradients()",
"def reset_grad(self):\n self.g_optimizer.zero_grad()\n self.d_optimizer.zero_grad()\n self.dr_optimizer.zero_grad()",
"def set_enable_grad(self, flag):\n self._executor.set_enable_grad(flag)",
"def gradients_to_updates(self, params, grads):\n return NotImplementedError('Abstract class method')",
"def reset_grad(self):\r\n self.g_optimizer.zero_grad()",
"def apply_gradients(self,\n grads_and_vars,\n global_step=None,\n name=None,\n decay_var_list=None):\n self._decay_var_list = set(decay_var_list) if decay_var_list else False\n return super(DecoupledWeightDecayExtension, self).apply_gradients(\n grads_and_vars, global_step=global_step, name=name)",
"def state_requires_grad(x):\n if isinstance(x, torch.Tensor):\n x.requires_grad = True\n elif isinstance(x, list):\n for i in range(len(x)):\n x[i] = state_requires_grad(x[i])\n return x",
"def extra_grad_info(self, component):\n return {\n f\"grad_norm({component})\": nn.utils.clip_grad_norm_(\n self.module[component].parameters(), float(\"inf\")\n ).item()\n }",
"def update_gradients(self, grads):\n self.variance.gradient = grads[0]",
"def save_feedback_gradients(self, reconstruction_loss):\n self.reconstruction_loss = reconstruction_loss.item()\n if self.feedbackbias is not None:\n grads = torch.autograd.grad(reconstruction_loss, [\n self.feedbackweights, self.feedbackbias], retain_graph=False)\n self._feedbackbias.grad = grads[1].detach()\n else:\n grads = torch.autograd.grad(reconstruction_loss,\n self.feedbackweights,\n retain_graph=False\n )\n self._feedbackweights.grad = grads[0].detach()",
"def update_parameters_with_gd(parameters, grads, learning_rate):\n\n L = len(parameters) // 2 # number of layers in the neural networks\n\n # Update rule for each parameter\n for l in range(L):\n parameters[\"W\" + str(l+1)] = parameters[\"W\" + str(l+1)] - learning_rate * grads['dW' + str(l+1)]\n parameters[\"b\" + str(l+1)] = parameters[\"b\" + str(l+1)] - learning_rate * grads['db' + str(l+1)]\n \n return parameters",
"def _update_parameters_by_gradients(self, grads_and_vars):\n grads_and_vars_new = []\n for grad, var in grads_and_vars:\n # If var is a string, create the grad var pair for\n # ElasticDL embedding\n if isinstance(var, str):\n grads_and_vars_new.append(\n self._get_embedding_var_and_grad(grad, var)\n )\n self._has_embedding = True\n else:\n grads_and_vars_new.append((grad, var))\n self._opt.apply_gradients(grads_and_vars_new)\n self._update_embedding_param()\n self._delete_slots_and_weights_in_optimizer()",
"def _reset_parameters(self):\r\n\t\tfor p in self.parameters():\r\n\t\t\tif p.dim() > 1:\r\n\t\t\t\txavier_uniform_(p)",
"def apply_gradient(params: torch.Tensor, grads: torch.Tensor, lr: float) -> torch.Tensor:\n params_prime = params + lr * grads\n return params_prime",
"def update_parameters(parameters, grads, learning_rate = 1.2):\n\t# Retrieve each parameter from the dictionary \"parameters\"\n\tW1 = parameters['W1']\n\tb1 = parameters['b1']\n\tW2 = parameters['W2']\n\tb2 = parameters['b2']\n\n\t# Retrieve each gradient from the dictionary \"grads\"\n\tdW1 = grads['dW1']\n\tdb1 = grads['db1']\n\tdW2 = grads['dW2']\n\tdb2 = grads['db2']\n\n\t# Update rule for each parameter\n\tW1 = W1 - learning_rate*dW1\n\tb1 = b1 - learning_rate*db1\n\tW2 = W2 - learning_rate*dW2\n\tb2 = b2 - learning_rate*db2\n\n\tparameters = {\"W1\": W1,\n\t\t\t\t\t\"b1\": b1,\n\t\t\t\t\t\"W2\": W2,\n\t\t\t\t\t\"b2\": b2}\n\n\treturn parameters",
"def optimize_parameters(self):\n pass",
"def optimize_parameters(self):\n pass",
"def optimize_parameters(self):\n pass"
] | [
"0.76790285",
"0.76790285",
"0.75791013",
"0.75430167",
"0.74836636",
"0.7409139",
"0.7380558",
"0.73643893",
"0.72944826",
"0.72131354",
"0.719416",
"0.71648544",
"0.71418315",
"0.71305335",
"0.70704716",
"0.7017141",
"0.69882965",
"0.69882965",
"0.69626516",
"0.6960601",
"0.69451433",
"0.6899546",
"0.6858573",
"0.6787856",
"0.6773075",
"0.6772632",
"0.672003",
"0.6717103",
"0.6692775",
"0.6690326",
"0.66638386",
"0.6648924",
"0.6634553",
"0.6621997",
"0.6620929",
"0.661235",
"0.66120493",
"0.66120493",
"0.66120493",
"0.6589221",
"0.6564724",
"0.65559363",
"0.6535046",
"0.6535046",
"0.6535046",
"0.6535046",
"0.6534737",
"0.6515606",
"0.6513169",
"0.65125066",
"0.6501122",
"0.6470164",
"0.6465453",
"0.64525205",
"0.64512813",
"0.6450716",
"0.64488775",
"0.64480793",
"0.6442767",
"0.642723",
"0.642241",
"0.641531",
"0.63949066",
"0.63862365",
"0.63417214",
"0.6340241",
"0.6336684",
"0.63202274",
"0.6319861",
"0.63017654",
"0.6291041",
"0.6287709",
"0.62673736",
"0.6267141",
"0.6264355",
"0.6258901",
"0.62466973",
"0.6241641",
"0.6234484",
"0.6221405",
"0.62089473",
"0.6195311",
"0.61808753",
"0.6163808",
"0.6150394",
"0.6142925",
"0.6140096",
"0.6136575",
"0.61333317",
"0.6129591",
"0.6128199",
"0.6126707",
"0.61240554",
"0.6100196",
"0.6098549",
"0.6095464",
"0.6079858",
"0.6076019",
"0.6070147",
"0.6070147",
"0.6070147"
] | 0.0 | -1 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.