index
int64
0
731k
package
stringlengths
2
98
name
stringlengths
1
76
docstring
stringlengths
0
281k
code
stringlengths
4
1.07M
signature
stringlengths
2
42.8k
714,514
webp
WebPAnimEncoderOptions
null
class WebPAnimEncoderOptions: def __init__(self, ptr): self.ptr = ptr @property def minimize_size(self): return self.ptr.minimize_size != 0 @minimize_size.setter def minimize_size(self, minimize_size): self.ptr.minimize_size = 1 if minimize_size else 0 @property def allow_mixed(self): return self.ptr.allow_mixed != 0 @allow_mixed.setter def allow_mixed(self, allow_mixed): self.ptr.allow_mixed = 1 if allow_mixed else 0 @staticmethod def new(minimize_size=False, allow_mixed=False): ptr = ffi.new('WebPAnimEncoderOptions*') if lib.WebPAnimEncoderOptionsInit(ptr) == 0: raise WebPError('version mismatch') enc_opts = WebPAnimEncoderOptions(ptr) enc_opts.minimize_size = minimize_size enc_opts.allow_mixed = allow_mixed return enc_opts
(ptr)
714,516
webp
new
null
@staticmethod def new(minimize_size=False, allow_mixed=False): ptr = ffi.new('WebPAnimEncoderOptions*') if lib.WebPAnimEncoderOptionsInit(ptr) == 0: raise WebPError('version mismatch') enc_opts = WebPAnimEncoderOptions(ptr) enc_opts.minimize_size = minimize_size enc_opts.allow_mixed = allow_mixed return enc_opts
(minimize_size=False, allow_mixed=False)
714,517
webp
WebPAnimInfo
null
class WebPAnimInfo: def __init__(self, ptr): self.ptr = ptr @property def frame_count(self): return self.ptr.frame_count @property def width(self): return self.ptr.canvas_width @property def height(self): return self.ptr.canvas_height @staticmethod def new(): ptr = ffi.new('WebPAnimInfo*') return WebPAnimInfo(ptr)
(ptr)
714,519
webp
new
null
@staticmethod def new(): ptr = ffi.new('WebPAnimInfo*') return WebPAnimInfo(ptr)
()
714,520
webp
WebPColorMode
An enumeration.
class WebPColorMode(Enum): RGB = lib.MODE_RGB RGBA = lib.MODE_RGBA BGR = lib.MODE_BGR BGRA = lib.MODE_BGRA ARGB = lib.MODE_ARGB RGBA_4444 = lib.MODE_RGBA_4444 RGB_565 = lib.MODE_RGB_565 rgbA = lib.MODE_rgbA bgrA = lib.MODE_bgrA Argb = lib.MODE_Argb rgbA_4444 = lib.MODE_rgbA_4444 YUV = lib.MODE_YUV YUVA = lib.MODE_YUVA LAST = lib.MODE_LAST
(value, names=None, *, module=None, qualname=None, type=None, start=1)
714,521
webp
WebPConfig
null
class WebPConfig: DEFAULT_QUALITY = 75.0 def __init__(self, ptr): self.ptr = ptr @property def lossless(self): return self.ptr.lossless != 0 @lossless.setter def lossless(self, lossless): self.ptr.lossless = 1 if lossless else 0 @property def quality(self): return self.ptr.quality @quality.setter def quality(self, quality): self.ptr.quality = quality @property def method(self): return self.ptr.method @method.setter def method(self, method): self.ptr.method = method def validate(self): return lib.WebPValidateConfig(self.ptr) != 0 @staticmethod def new(preset=WebPPreset.DEFAULT, quality=None, lossless=False, lossless_preset=None, method=None): """Create a new WebPConfig instance to describe encoder settings. 1. The preset is loaded, setting default values for quality factor (75.0) and compression method (4). 2. If `lossless` is True and `lossless_preset` is specified, then the lossless preset with the specified level is loaded. This will replace the default values for quality factor and compression method. 3. Values for lossless, quality, and method are set using explicitly provided arguments. This allows the caller to explicitly specify these settings and overrides settings from presets. Args: preset (WebPPreset): Preset setting. quality (float, optional): Quality factor (0=small but low quality, 100=high quality but big). Overrides presets. Effective default is 75.0. lossless (bool): Set to True for lossless compression. lossless_preset (int, optional): Lossless preset level (0=fast but big, 9=small but slow). Can only be specified when `lossless` is true. Sets the values for quality factor and compression method together. Effective default is 6. method (int, optional): Compression method (0=fast but big, 6=small but slow). Overrides presets. Effective default is 4. Returns: WebPConfig: The new WebPConfig instance. """ ptr = ffi.new('WebPConfig*') if lib.WebPConfigPreset(ptr, preset.value, WebPConfig.DEFAULT_QUALITY) == 0: raise WebPError('failed to load config options from preset') if lossless_preset is not None: if not lossless: raise WebPError('can only use lossless preset when lossless is True') if lib.WebPConfigLosslessPreset(ptr, lossless_preset) == 0: raise WebPError('failed to load config options from lossless preset') config = WebPConfig(ptr) config.lossless = lossless # Override presets for explicitly specified values. if quality is not None: config.quality = quality if method is not None: config.method = method if not config.validate(): raise WebPError('config is not valid') return config
(ptr)
714,523
webp
new
Create a new WebPConfig instance to describe encoder settings. 1. The preset is loaded, setting default values for quality factor (75.0) and compression method (4). 2. If `lossless` is True and `lossless_preset` is specified, then the lossless preset with the specified level is loaded. This will replace the default values for quality factor and compression method. 3. Values for lossless, quality, and method are set using explicitly provided arguments. This allows the caller to explicitly specify these settings and overrides settings from presets. Args: preset (WebPPreset): Preset setting. quality (float, optional): Quality factor (0=small but low quality, 100=high quality but big). Overrides presets. Effective default is 75.0. lossless (bool): Set to True for lossless compression. lossless_preset (int, optional): Lossless preset level (0=fast but big, 9=small but slow). Can only be specified when `lossless` is true. Sets the values for quality factor and compression method together. Effective default is 6. method (int, optional): Compression method (0=fast but big, 6=small but slow). Overrides presets. Effective default is 4. Returns: WebPConfig: The new WebPConfig instance.
@staticmethod def new(preset=WebPPreset.DEFAULT, quality=None, lossless=False, lossless_preset=None, method=None): """Create a new WebPConfig instance to describe encoder settings. 1. The preset is loaded, setting default values for quality factor (75.0) and compression method (4). 2. If `lossless` is True and `lossless_preset` is specified, then the lossless preset with the specified level is loaded. This will replace the default values for quality factor and compression method. 3. Values for lossless, quality, and method are set using explicitly provided arguments. This allows the caller to explicitly specify these settings and overrides settings from presets. Args: preset (WebPPreset): Preset setting. quality (float, optional): Quality factor (0=small but low quality, 100=high quality but big). Overrides presets. Effective default is 75.0. lossless (bool): Set to True for lossless compression. lossless_preset (int, optional): Lossless preset level (0=fast but big, 9=small but slow). Can only be specified when `lossless` is true. Sets the values for quality factor and compression method together. Effective default is 6. method (int, optional): Compression method (0=fast but big, 6=small but slow). Overrides presets. Effective default is 4. Returns: WebPConfig: The new WebPConfig instance. """ ptr = ffi.new('WebPConfig*') if lib.WebPConfigPreset(ptr, preset.value, WebPConfig.DEFAULT_QUALITY) == 0: raise WebPError('failed to load config options from preset') if lossless_preset is not None: if not lossless: raise WebPError('can only use lossless preset when lossless is True') if lib.WebPConfigLosslessPreset(ptr, lossless_preset) == 0: raise WebPError('failed to load config options from lossless preset') config = WebPConfig(ptr) config.lossless = lossless # Override presets for explicitly specified values. if quality is not None: config.quality = quality if method is not None: config.method = method if not config.validate(): raise WebPError('config is not valid') return config
(preset=<WebPPreset.DEFAULT: 0>, quality=None, lossless=False, lossless_preset=None, method=None)
714,524
webp
validate
null
def validate(self): return lib.WebPValidateConfig(self.ptr) != 0
(self)
714,525
webp
WebPData
null
class WebPData: def __init__(self, ptr, data_ref): self.ptr = ptr self._data_ref = data_ref @property def size(self): return self.ptr.size def buffer(self): buf = ffi.buffer(self._data_ref, self.size) return buf def decode(self, color_mode=WebPColorMode.RGBA): dec_config = WebPDecoderConfig.new() dec_config.read_features(self) if color_mode == WebPColorMode.RGBA \ or color_mode == WebPColorMode.bgrA \ or color_mode == WebPColorMode.BGRA \ or color_mode == WebPColorMode.rgbA \ or color_mode == WebPColorMode.ARGB \ or color_mode == WebPColorMode.Argb: bytes_per_pixel = 4 elif color_mode == WebPColorMode.RGB \ or color_mode == WebPColorMode.BGR: bytes_per_pixel = 3 elif color_mode == WebPColorMode.RGB_565 \ or color_mode == WebPColorMode.RGBA_4444 \ or color_mode == WebPColorMode.rgbA_4444: bytes_per_pixel = 2 else: raise WebPError('unsupported color mode: {}'.format(str(color_mode))) arr = np.empty((dec_config.input.height, dec_config.input.width, bytes_per_pixel), dtype=np.uint8) dec_config.output.colorspace = color_mode.value dec_config.output.u.RGBA.rgba = ffi.cast('uint8_t*', ffi.from_buffer(arr)) dec_config.output.u.RGBA.size = arr.size dec_config.output.u.RGBA.stride = dec_config.input.width * bytes_per_pixel dec_config.output.is_external_memory = 1 if lib.WebPDecode(self.ptr.bytes, self.size, dec_config.ptr) != lib.VP8_STATUS_OK: raise WebPError('failed to decode') lib.WebPFreeDecBuffer(ffi.addressof(dec_config.ptr, 'output')) return arr @staticmethod def from_buffer(buf): ptr = ffi.new('WebPData*') lib.WebPDataInit(ptr) data_ref = ffi.from_buffer(buf) ptr.size = len(buf) ptr.bytes = ffi.cast('uint8_t*', data_ref) return WebPData(ptr, data_ref)
(ptr, data_ref)
714,526
webp
__init__
null
def __init__(self, ptr, data_ref): self.ptr = ptr self._data_ref = data_ref
(self, ptr, data_ref)
714,527
webp
buffer
null
def buffer(self): buf = ffi.buffer(self._data_ref, self.size) return buf
(self)
714,528
webp
decode
null
def decode(self, color_mode=WebPColorMode.RGBA): dec_config = WebPDecoderConfig.new() dec_config.read_features(self) if color_mode == WebPColorMode.RGBA \ or color_mode == WebPColorMode.bgrA \ or color_mode == WebPColorMode.BGRA \ or color_mode == WebPColorMode.rgbA \ or color_mode == WebPColorMode.ARGB \ or color_mode == WebPColorMode.Argb: bytes_per_pixel = 4 elif color_mode == WebPColorMode.RGB \ or color_mode == WebPColorMode.BGR: bytes_per_pixel = 3 elif color_mode == WebPColorMode.RGB_565 \ or color_mode == WebPColorMode.RGBA_4444 \ or color_mode == WebPColorMode.rgbA_4444: bytes_per_pixel = 2 else: raise WebPError('unsupported color mode: {}'.format(str(color_mode))) arr = np.empty((dec_config.input.height, dec_config.input.width, bytes_per_pixel), dtype=np.uint8) dec_config.output.colorspace = color_mode.value dec_config.output.u.RGBA.rgba = ffi.cast('uint8_t*', ffi.from_buffer(arr)) dec_config.output.u.RGBA.size = arr.size dec_config.output.u.RGBA.stride = dec_config.input.width * bytes_per_pixel dec_config.output.is_external_memory = 1 if lib.WebPDecode(self.ptr.bytes, self.size, dec_config.ptr) != lib.VP8_STATUS_OK: raise WebPError('failed to decode') lib.WebPFreeDecBuffer(ffi.addressof(dec_config.ptr, 'output')) return arr
(self, color_mode=<WebPColorMode.RGBA: 1>)
714,529
webp
from_buffer
null
@staticmethod def from_buffer(buf): ptr = ffi.new('WebPData*') lib.WebPDataInit(ptr) data_ref = ffi.from_buffer(buf) ptr.size = len(buf) ptr.bytes = ffi.cast('uint8_t*', data_ref) return WebPData(ptr, data_ref)
(buf)
714,530
webp
WebPDecoderConfig
null
class WebPDecoderConfig: def __init__(self, ptr): self.ptr = ptr @property def input(self): return self.ptr.input @property def output(self): return self.ptr.output @property def options(self): return self.ptr.options def read_features(self, webp_data): input_ptr = ffi.addressof(self.ptr, 'input') if lib.WebPGetFeatures(webp_data.ptr.bytes, webp_data.size, input_ptr) != lib.VP8_STATUS_OK: raise WebPError('failed to read features') @staticmethod def new(): ptr = ffi.new('WebPDecoderConfig*') if lib.WebPInitDecoderConfig(ptr) == 0: raise WebPError('failed to init decoder config') return WebPDecoderConfig(ptr)
(ptr)
714,532
webp
new
null
@staticmethod def new(): ptr = ffi.new('WebPDecoderConfig*') if lib.WebPInitDecoderConfig(ptr) == 0: raise WebPError('failed to init decoder config') return WebPDecoderConfig(ptr)
()
714,533
webp
read_features
null
def read_features(self, webp_data): input_ptr = ffi.addressof(self.ptr, 'input') if lib.WebPGetFeatures(webp_data.ptr.bytes, webp_data.size, input_ptr) != lib.VP8_STATUS_OK: raise WebPError('failed to read features')
(self, webp_data)
714,534
webp
WebPError
null
class WebPError(Exception): pass
null
714,535
webp
WebPMemoryWriter
null
class WebPMemoryWriter: def __init__(self, ptr): self.ptr = ptr def __del__(self): # Free memory if we are still responsible for it. if self.ptr: lib.WebPMemoryWriterClear(self.ptr) def to_webp_data(self): _webp_data = _WebPData() _webp_data.ptr.bytes = self.ptr.mem _webp_data.ptr.size = self.ptr.size self.ptr = None return _webp_data.done() @staticmethod def new(): ptr = ffi.new('WebPMemoryWriter*') lib.WebPMemoryWriterInit(ptr) return WebPMemoryWriter(ptr)
(ptr)
714,536
webp
__del__
null
def __del__(self): # Free memory if we are still responsible for it. if self.ptr: lib.WebPMemoryWriterClear(self.ptr)
(self)
714,538
webp
new
null
@staticmethod def new(): ptr = ffi.new('WebPMemoryWriter*') lib.WebPMemoryWriterInit(ptr) return WebPMemoryWriter(ptr)
()
714,539
webp
to_webp_data
null
def to_webp_data(self): _webp_data = _WebPData() _webp_data.ptr.bytes = self.ptr.mem _webp_data.ptr.size = self.ptr.size self.ptr = None return _webp_data.done()
(self)
714,540
webp
WebPPicture
null
class WebPPicture: def __init__(self, ptr): self.ptr = ptr def __del__(self): lib.WebPPictureFree(self.ptr) def encode(self, config=None): if config is None: config = WebPConfig.new() writer = WebPMemoryWriter.new() self.ptr.writer = ffi.addressof(lib, 'WebPMemoryWrite') self.ptr.custom_ptr = writer.ptr if lib.WebPEncode(config.ptr, self.ptr) == 0: raise WebPError('encoding error: ' + self.ptr.error_code) return writer.to_webp_data() def save(self, file_path, config=None): buf = self.encode(config).buffer() with open(file_path, 'wb') as f: f.write(buf) @staticmethod def new(width, height): ptr = ffi.new('WebPPicture*') if lib.WebPPictureInit(ptr) == 0: raise WebPError('version mismatch') ptr.width = width ptr.height = height if lib.WebPPictureAlloc(ptr) == 0: raise WebPError('memory error') return WebPPicture(ptr) @staticmethod def from_numpy(arr, *, pilmode=None): ptr = ffi.new('WebPPicture*') if lib.WebPPictureInit(ptr) == 0: raise WebPError('version mismatch') if len(arr.shape) == 3: bytes_per_pixel = arr.shape[-1] elif len(arr.shape) == 2: bytes_per_pixel = 1 else: raise WebPError('unexpected array shape: ' + repr(arr.shape)) if pilmode is None: if bytes_per_pixel == 3: import_func = lib.WebPPictureImportRGB elif bytes_per_pixel == 4: import_func = lib.WebPPictureImportRGBA else: raise WebPError('cannot infer color mode from array of shape ' + repr(arr.shape)) else: if pilmode == 'RGB': import_func = lib.WebPPictureImportRGB elif pilmode == 'RGBA': import_func = lib.WebPPictureImportRGBA else: raise WebPError('unsupported image mode: ' + pilmode) ptr.height, ptr.width = arr.shape[:2] pixels = ffi.cast('uint8_t*', ffi.from_buffer(arr)) stride = ptr.width * bytes_per_pixel ptr.use_argb = 1 if import_func(ptr, pixels, stride) == 0: raise WebPError('memory error') return WebPPicture(ptr) @staticmethod def from_pil(img): if img.mode == 'P': if 'transparency' in img.info: img = img.convert('RGBA') else: img = img.convert('RGB') return WebPPicture.from_numpy(np.asarray(img), pilmode=img.mode)
(ptr)
714,541
webp
__del__
null
def __del__(self): lib.WebPPictureFree(self.ptr)
(self)
714,543
webp
encode
null
def encode(self, config=None): if config is None: config = WebPConfig.new() writer = WebPMemoryWriter.new() self.ptr.writer = ffi.addressof(lib, 'WebPMemoryWrite') self.ptr.custom_ptr = writer.ptr if lib.WebPEncode(config.ptr, self.ptr) == 0: raise WebPError('encoding error: ' + self.ptr.error_code) return writer.to_webp_data()
(self, config=None)
714,544
webp
from_numpy
null
@staticmethod def from_numpy(arr, *, pilmode=None): ptr = ffi.new('WebPPicture*') if lib.WebPPictureInit(ptr) == 0: raise WebPError('version mismatch') if len(arr.shape) == 3: bytes_per_pixel = arr.shape[-1] elif len(arr.shape) == 2: bytes_per_pixel = 1 else: raise WebPError('unexpected array shape: ' + repr(arr.shape)) if pilmode is None: if bytes_per_pixel == 3: import_func = lib.WebPPictureImportRGB elif bytes_per_pixel == 4: import_func = lib.WebPPictureImportRGBA else: raise WebPError('cannot infer color mode from array of shape ' + repr(arr.shape)) else: if pilmode == 'RGB': import_func = lib.WebPPictureImportRGB elif pilmode == 'RGBA': import_func = lib.WebPPictureImportRGBA else: raise WebPError('unsupported image mode: ' + pilmode) ptr.height, ptr.width = arr.shape[:2] pixels = ffi.cast('uint8_t*', ffi.from_buffer(arr)) stride = ptr.width * bytes_per_pixel ptr.use_argb = 1 if import_func(ptr, pixels, stride) == 0: raise WebPError('memory error') return WebPPicture(ptr)
(arr, *, pilmode=None)
714,545
webp
from_pil
null
@staticmethod def from_pil(img): if img.mode == 'P': if 'transparency' in img.info: img = img.convert('RGBA') else: img = img.convert('RGB') return WebPPicture.from_numpy(np.asarray(img), pilmode=img.mode)
(img)
714,546
webp
new
null
@staticmethod def new(width, height): ptr = ffi.new('WebPPicture*') if lib.WebPPictureInit(ptr) == 0: raise WebPError('version mismatch') ptr.width = width ptr.height = height if lib.WebPPictureAlloc(ptr) == 0: raise WebPError('memory error') return WebPPicture(ptr)
(width, height)
714,547
webp
save
null
def save(self, file_path, config=None): buf = self.encode(config).buffer() with open(file_path, 'wb') as f: f.write(buf)
(self, file_path, config=None)
714,548
webp
WebPPreset
An enumeration.
class WebPPreset(Enum): DEFAULT = lib.WEBP_PRESET_DEFAULT # Default PICTURE = lib.WEBP_PRESET_PICTURE # Indoor photo, portrait-like PHOTO = lib.WEBP_PRESET_PHOTO # Outdoor photo with natural lighting DRAWING = lib.WEBP_PRESET_DRAWING # Drawing with high-contrast details ICON = lib.WEBP_PRESET_ICON # Small-sized colourful image TEXT = lib.WEBP_PRESET_TEXT # Text-like
(value, names=None, *, module=None, qualname=None, type=None, start=1)
714,549
webp
_WebPData
null
class _WebPData: def __init__(self): self.ptr = ffi.new('WebPData*') lib.WebPDataInit(self.ptr) # Call this after the struct has been filled in def done(self, free_func=lib.WebPFree): webp_data = WebPData(self.ptr, ffi.gc(self.ptr.bytes, free_func)) self.ptr = None return webp_data
()
714,550
webp
__init__
null
def __init__(self): self.ptr = ffi.new('WebPData*') lib.WebPDataInit(self.ptr)
(self)
714,551
webp
done
null
def done(self, free_func=lib.WebPFree): webp_data = WebPData(self.ptr, ffi.gc(self.ptr.bytes, free_func)) self.ptr = None return webp_data
(self, free_func=<built-in method WebPFree of _cffi_backend.Lib object at 0x7f4ae8655760>)
714,552
webp
_mimwrite_pics
null
def _mimwrite_pics(file_path, pics, *, fps=30.0, **kwargs): enc_opts = WebPAnimEncoderOptions.new() enc = WebPAnimEncoder.new(pics[0].ptr.width, pics[0].ptr.height, enc_opts) config = WebPConfig.new(**kwargs) for i, pic in enumerate(pics): t = round((i * 1000) / fps) enc.encode_frame(pic, t, config) end_t = round((len(pics) * 1000) / fps) anim_data = enc.assemble(end_t) with open(file_path, 'wb') as f: f.write(anim_data.buffer())
(file_path, pics, *, fps=30.0, **kwargs)
714,553
webp
imread
Load from file and decode numpy array with WebP. Args: file_path (str): File to load from. pilmode (str): Image color mode (RGBA, RGBa, or RGB). Returns: np.ndarray: The decoded image data.
def imread(file_path, *, pilmode='RGBA'): """Load from file and decode numpy array with WebP. Args: file_path (str): File to load from. pilmode (str): Image color mode (RGBA, RGBa, or RGB). Returns: np.ndarray: The decoded image data. """ if pilmode == 'RGBA': color_mode = WebPColorMode.RGBA elif pilmode == 'RGBa': color_mode = WebPColorMode.rgbA elif pilmode == 'RGB': color_mode = WebPColorMode.RGB else: raise WebPError('unsupported color mode: ' + pilmode) with open(file_path, 'rb') as f: webp_data = WebPData.from_buffer(f.read()) arr = webp_data.decode(color_mode=color_mode) return arr
(file_path, *, pilmode='RGBA')
714,554
webp
imwrite
Encode numpy array image with WebP and save to file. Args: file_path (str): File to save to. arr (np.ndarray): Image data to save. pilmode (str): PIL image mode corresponding to the data in `arr`. kwargs: Keyword arguments for encoder settings (see `WebPConfig.new`).
def imwrite(file_path, arr, *, pilmode=None, **kwargs): """Encode numpy array image with WebP and save to file. Args: file_path (str): File to save to. arr (np.ndarray): Image data to save. pilmode (str): PIL image mode corresponding to the data in `arr`. kwargs: Keyword arguments for encoder settings (see `WebPConfig.new`). """ pic = WebPPicture.from_numpy(arr, pilmode=pilmode) config = WebPConfig.new(**kwargs) pic.save(file_path, config)
(file_path, arr, *, pilmode=None, **kwargs)
714,556
webp
load_image
Load from file and decode PIL Image with WebP. Args: file_path (str): File to load from. mode (str): Mode for the PIL image (RGBA, RGBa, or RGB). Returns: PIL.Image: The decoded Image.
def load_image(file_path, mode='RGBA'): """Load from file and decode PIL Image with WebP. Args: file_path (str): File to load from. mode (str): Mode for the PIL image (RGBA, RGBa, or RGB). Returns: PIL.Image: The decoded Image. """ arr = imread(file_path, pilmode=mode) return Image.fromarray(arr, mode)
(file_path, mode='RGBA')
714,557
webp
load_images
Load from file and decode a sequence of PIL Images with WebP. Args: file_path (str): File to load from. mode (str): Mode for the PIL image (RGBA, RGBa, or RGB). kwargs: Keyword arguments for loading the images (see `mimread`). Returns: list of PIL.Image: The decoded Images.
def load_images(file_path, mode='RGBA', **kwargs): """Load from file and decode a sequence of PIL Images with WebP. Args: file_path (str): File to load from. mode (str): Mode for the PIL image (RGBA, RGBa, or RGB). kwargs: Keyword arguments for loading the images (see `mimread`). Returns: list of PIL.Image: The decoded Images. """ arrs = mimread(file_path, pilmode=mode, **kwargs) return [Image.fromarray(arr, mode) for arr in arrs]
(file_path, mode='RGBA', **kwargs)
714,558
webp
mimread
Load from file and decode a list of numpy arrays with WebP. Args: file_path (str): File to load from. pilmode (str): Image color mode (RGBA, RGBa, or RGB). fps (float, optional): Frames will be evenly sampled to meet this particular FPS. If `fps` is None, an ordered sequence of unique frames in the animation will be returned. use_threads (bool): Set to False to disable multi-threaded decoding. Returns: list of np.ndarray: The decoded image data.
def mimread(file_path, *, fps=None, use_threads=True, pilmode='RGBA'): """Load from file and decode a list of numpy arrays with WebP. Args: file_path (str): File to load from. pilmode (str): Image color mode (RGBA, RGBa, or RGB). fps (float, optional): Frames will be evenly sampled to meet this particular FPS. If `fps` is None, an ordered sequence of unique frames in the animation will be returned. use_threads (bool): Set to False to disable multi-threaded decoding. Returns: list of np.ndarray: The decoded image data. """ if pilmode == 'RGBA': color_mode = WebPColorMode.RGBA elif pilmode == 'RGBa': color_mode = WebPColorMode.rgbA elif pilmode == 'RGB': # NOTE: RGB decoding of animations is currently not supported by # libwebpdemux. Hence we will read RGBA and remove the alpha channel later. color_mode = WebPColorMode.RGBA else: raise WebPError('unsupported color mode: ' + pilmode) arrs = [] with open(file_path, 'rb') as f: webp_data = WebPData.from_buffer(f.read()) dec_opts = WebPAnimDecoderOptions.new( use_threads=use_threads, color_mode=color_mode) dec = WebPAnimDecoder.new(webp_data, dec_opts) eps = 1e-7 for arr, frame_end_time in dec.frames(): if pilmode == 'RGB': arr = arr[:, :, 0:3] if fps is None: arrs.append(arr) else: while len(arrs) * (1000 / fps) + eps < frame_end_time: arrs.append(arr) return arrs
(file_path, *, fps=None, use_threads=True, pilmode='RGBA')
714,559
webp
mimwrite
Encode a sequence of PIL Images with WebP and save to file. Args: file_path (str): File to save to. imgs (list of np.ndarray): Image data to save. fps (float): Animation speed in frames per second. kwargs: Keyword arguments for encoder settings (see `WebPConfig.new`).
def mimwrite(file_path, arrs, *, fps=30.0, pilmode=None, **kwargs): """Encode a sequence of PIL Images with WebP and save to file. Args: file_path (str): File to save to. imgs (list of np.ndarray): Image data to save. fps (float): Animation speed in frames per second. kwargs: Keyword arguments for encoder settings (see `WebPConfig.new`). """ pics = [WebPPicture.from_numpy(arr, pilmode=pilmode) for arr in arrs] _mimwrite_pics(file_path, pics, fps=fps, **kwargs)
(file_path, arrs, *, fps=30.0, pilmode=None, **kwargs)
714,561
webp
save_image
Encode PIL Image with WebP and save to file. Args: img (pil.Image): Image to save. file_path (str): File to save to. kwargs: Keyword arguments for encoder settings (see `WebPConfig.new`).
def save_image(img, file_path, **kwargs): """Encode PIL Image with WebP and save to file. Args: img (pil.Image): Image to save. file_path (str): File to save to. kwargs: Keyword arguments for encoder settings (see `WebPConfig.new`). """ pic = WebPPicture.from_pil(img) config = WebPConfig.new(**kwargs) pic.save(file_path, config)
(img, file_path, **kwargs)
714,562
webp
save_images
Encode a sequence of PIL Images with WebP and save to file. Args: imgs (list of pil.Image): Images to save. file_path (str): File to save to. kwargs: Keyword arguments for saving the images (see `mimwrite`).
def save_images(imgs, file_path, **kwargs): """Encode a sequence of PIL Images with WebP and save to file. Args: imgs (list of pil.Image): Images to save. file_path (str): File to save to. kwargs: Keyword arguments for saving the images (see `mimwrite`). """ pics = [WebPPicture.from_pil(img) for img in imgs] _mimwrite_pics(file_path, pics, **kwargs)
(imgs, file_path, **kwargs)
714,563
nwnsdk.postgres.dbmodels
JobStatus
An enumeration.
class JobStatus(Enum): REGISTERED = "registered" RUNNING = "running" FINISHED = "finished" ERROR = "error" STOPPED = "stopped"
(value, names=None, *, module=None, qualname=None, type=None, start=1)
714,564
nwnsdk.app_logging
LogLevel
Simple enum to cover log levels for logging library.
class LogLevel(Enum): """Simple enum to cover log levels for logging library.""" DEBUG = logging.DEBUG INFO = logging.INFO WARNING = logging.WARNING ERROR = logging.ERROR @staticmethod def parse(value: str) -> "LogLevel": """ Parses a given string for LogLevel's. Parameters ---------- value : str user provided string containing the requested log level Returns ------- LogLevel Loglevel for this logger """ lowered = value.lower() if lowered == "debug": result = LogLevel.DEBUG elif lowered == "info": result = LogLevel.INFO elif lowered in ["warning", "warn"]: result = LogLevel.WARNING elif lowered in ["err", "error"]: result = LogLevel.ERROR else: raise ValueError(f"Value {value} is not a valid log level.") return result
(value, names=None, *, module=None, qualname=None, type=None, start=1)
714,565
nwnsdk.nwn_client
NwnClient
null
class NwnClient(PostgresClient, RabbitmqClient): rabbitmq_client: RabbitmqClient postgres_client: PostgresClient logger: logging.Logger def __init__(self, postgres_config: PostgresConfig, rabbitmq_config: RabbitmqConfig): PostgresClient.__init__(self, postgres_config) RabbitmqClient.__init__(self, rabbitmq_config) def connect(self): PostgresClient._connect_postgres(self) RabbitmqClient._start_rabbitmq(self) def stop(self): PostgresClient._close_postgres(self) RabbitmqClient._stop_rabbitmq(self) def start_work_flow( self, work_flow_type: WorkFlowType, job_name: str, esdl_str: str, user_name: str, project_name: str ) -> uuid4: job_id: uuid4 = uuid4() PostgresClient._send_input( self, job_id=job_id, job_name=job_name, work_flow_type=work_flow_type, esdl_str=esdl_str, user_name=user_name, project_name=project_name, ) RabbitmqClient._send_start_work_flow(self, job_id, work_flow_type) return job_id def get_job_details(self, job_id: uuid4) -> Job: return self.get_job(job_id) def get_all_jobs(self) -> List[Job]: return self.get_jobs() def get_jobs_from_ids(self, job_ids: List[uuid4]) -> List[Job]: return self.get_jobs(job_ids)
(postgres_config: nwnsdk.config.PostgresConfig, rabbitmq_config: nwnsdk.config.RabbitmqConfig)
714,566
nwnsdk.nwn_client
__init__
null
def __init__(self, postgres_config: PostgresConfig, rabbitmq_config: RabbitmqConfig): PostgresClient.__init__(self, postgres_config) RabbitmqClient.__init__(self, rabbitmq_config)
(self, postgres_config: nwnsdk.config.PostgresConfig, rabbitmq_config: nwnsdk.config.RabbitmqConfig)
714,570
nwnsdk.postgres.postgres_client
_close_postgres
null
def _close_postgres(self): if self.engine: self.engine.dispose()
(self)
714,571
nwnsdk.postgres.postgres_client
_connect_postgres
null
def _connect_postgres(self): self.engine = initialize_db("nwn", self.db_config)
(self)
714,572
nwnsdk.rabbitmq.rabbitmq_client
_connect_rabbitmq
null
def _connect_rabbitmq(self): # initialize rabbitmq connection LOGGER.info( "Connecting to RabbitMQ at %s:%s as user %s", self.rabbitmq_config.host, self.rabbitmq_config.port, self.rabbitmq_config.user_name, ) credentials = pika.PlainCredentials(self.rabbitmq_config.user_name, self.rabbitmq_config.password) parameters = pika.ConnectionParameters( self.rabbitmq_config.host, self.rabbitmq_config.port, "/", credentials, heartbeat=60, blocked_connection_timeout=3600, connection_attempts=10, ) if not self.rabbitmq_connection or self.rabbitmq_connection.is_closed: LOGGER.info("Setting up a new connection to RabbitMQ.") self.rabbitmq_connection = pika.BlockingConnection(parameters) if not self.rabbitmq_channel or self.rabbitmq_channel.is_closed: LOGGER.info("Setting up a new channel to RabbitMQ.") self.rabbitmq_channel = self.rabbitmq_connection.channel() self.rabbitmq_channel.basic_qos(prefetch_size=0, prefetch_count=1) self.rabbitmq_channel.exchange_declare(exchange=self.rabbitmq_exchange, exchange_type="topic") for queue_item in Queue: queue = self.rabbitmq_channel.queue_declare(queue_item.value, exclusive=False).method.queue self.rabbitmq_channel.queue_bind(queue, self.rabbitmq_exchange, routing_key=queue_item.value) for queue, callback in self.rabbitmq_callbacks.items(): self.rabbitmq_channel.basic_consume(queue=queue.value, on_message_callback=callback, auto_ack=False) LOGGER.info("Connected to RabbitMQ")
(self)
714,575
nwnsdk.postgres.postgres_client
_send_input
null
def _send_input( self, job_id: uuid4, job_name: str, work_flow_type: WorkFlowType, esdl_str: str, user_name: str, project_name: str, ) -> None: with session_scope() as session: new_job = Job( job_id=job_id, job_name=job_name, work_flow_type=work_flow_type, user_name=user_name, project_name=project_name, status=JobStatus.REGISTERED, input_esdl=esdl_str, added_at=datetime.now(), ) session.add(new_job)
(self, job_id: <function uuid4 at 0x7faedbc0a290>, job_name: str, work_flow_type: nwnsdk.config.WorkFlowType, esdl_str: str, user_name: str, project_name: str) -> NoneType
714,576
nwnsdk.rabbitmq.rabbitmq_client
_send_output
null
def _send_output(self, queue: Queue, message: str): body: bytes = message.encode("utf-8") self.rabbitmq_connection.add_callback_threadsafe( functools.partial( self.rabbitmq_channel.basic_publish, exchange=self.rabbitmq_exchange, routing_key=queue.value, body=body ) )
(self, queue: nwnsdk.rabbitmq.rabbitmq_client.Queue, message: str)
714,577
nwnsdk.rabbitmq.rabbitmq_client
_send_start_work_flow
null
def _send_start_work_flow(self, job_id: uuid4, work_flow_type: WorkFlowType): # TODO convert to protobuf # TODO job_id converted to string for json body = json.dumps({"job_id": str(job_id)}) self._send_output(Queue.from_workflow_type(work_flow_type), body)
(self, job_id: <function uuid4 at 0x7faedbc0a290>, work_flow_type: nwnsdk.config.WorkFlowType)
714,581
nwnsdk.rabbitmq.rabbitmq_client
_start_rabbitmq
null
def _start_rabbitmq(self): self._connect_rabbitmq() self.start()
(self)
714,583
nwnsdk.rabbitmq.rabbitmq_client
_stop_rabbitmq
null
def _stop_rabbitmq(self): self.rabbitmq_is_running = False if self.rabbitmq_connection: self.rabbitmq_connection.add_callback_threadsafe(self.rabbitmq_connection.close)
(self)
714,585
nwnsdk.nwn_client
connect
null
def connect(self): PostgresClient._connect_postgres(self) RabbitmqClient._start_rabbitmq(self)
(self)
714,586
nwnsdk.postgres.postgres_client
delete_job
null
def delete_job(self, job_id: uuid4) -> bool: LOGGER.debug("Deleting job with id '%s'", job_id) session: Session with session_scope() as session: stmnt = select(Job).where(Job.job_id == job_id) job = session.scalars(stmnt).all() if job: stmnt = delete(Job).where(Job.job_id == job_id) session.execute(stmnt) job_deleted = True else: job_deleted = False return job_deleted
(self, job_id: <function uuid4 at 0x7faedbc0a290>) -> bool
714,588
nwnsdk.nwn_client
get_all_jobs
null
def get_all_jobs(self) -> List[Job]: return self.get_jobs()
(self) -> List[nwnsdk.postgres.dbmodels.Job]
714,589
nwnsdk.postgres.postgres_client
get_job
null
def get_job(self, job_id: uuid4) -> Job: LOGGER.debug("Retrieving job data for job with id '%s'", job_id) session: Session with session_scope(do_expunge=True) as session: stmnt = select(Job).where(Job.job_id == job_id) job = session.scalar(stmnt) return job
(self, job_id: <function uuid4 at 0x7faedbc0a290>) -> nwnsdk.postgres.dbmodels.Job
714,590
nwnsdk.nwn_client
get_job_details
null
def get_job_details(self, job_id: uuid4) -> Job: return self.get_job(job_id)
(self, job_id: <function uuid4 at 0x7faedbc0a290>) -> nwnsdk.postgres.dbmodels.Job
714,591
nwnsdk.postgres.postgres_client
get_job_input_esdl
null
def get_job_input_esdl(self, job_id: uuid4) -> str: LOGGER.debug("Retrieving input esdl for job %s", job_id) with session_scope(do_expunge=True) as session: stmnt = select(Job.input_esdl).where(Job.job_id == (job_id)) job_input_esdl: str = session.scalar(stmnt) return job_input_esdl
(self, job_id: <function uuid4 at 0x7faedbc0a290>) -> str
714,592
nwnsdk.postgres.postgres_client
get_job_logs
null
def get_job_logs(self, job_id: uuid4) -> str: LOGGER.debug("Retrieving job log for job with id '%s'", job_id) with session_scope() as session: stmnt = select(Job.logs).where(Job.job_id == job_id) job_logs: Job = session.scalar(stmnt) return job_logs
(self, job_id: <function uuid4 at 0x7faedbc0a290>) -> str
714,593
nwnsdk.postgres.postgres_client
get_job_output_esdl
null
def get_job_output_esdl(self, job_id: uuid4) -> str: LOGGER.debug("Retrieving job output esdl for job with id '%s'", job_id) with session_scope() as session: stmnt = select(Job.output_esdl).where(Job.job_id == job_id) job_output_esdl: Job = session.scalar(stmnt) return job_output_esdl
(self, job_id: <function uuid4 at 0x7faedbc0a290>) -> str
714,594
nwnsdk.postgres.postgres_client
get_job_status
null
def get_job_status(self, job_id: uuid4) -> JobStatus: LOGGER.debug("Retrieving job status for job with id '%s'", job_id) with session_scope(do_expunge=True) as session: stmnt = select(Job.status).where(Job.job_id == job_id) job_status = session.scalar(stmnt) return job_status
(self, job_id: <function uuid4 at 0x7faedbc0a290>) -> nwnsdk.postgres.dbmodels.JobStatus
714,595
nwnsdk.postgres.postgres_client
get_jobs
null
def get_jobs(self, job_ids: List[uuid4] = None) -> List[Job]: with session_scope(do_expunge=True) as session: stmnt = ALL_JOBS_STMNT if job_ids: LOGGER.debug(f"Retrieving job data for jobs '{','.join([str(job_id) for job_id in job_ids])}'") stmnt = stmnt.where(Job.job_id.in_(job_ids)) else: LOGGER.debug(f"Retrieving job data for all jobs") jobs = session.scalars(stmnt).all() return jobs
(self, job_ids: Optional[List[uuid4]] = None) -> List[nwnsdk.postgres.dbmodels.Job]
714,596
nwnsdk.nwn_client
get_jobs_from_ids
null
def get_jobs_from_ids(self, job_ids: List[uuid4]) -> List[Job]: return self.get_jobs(job_ids)
(self, job_ids: List[uuid4]) -> List[nwnsdk.postgres.dbmodels.Job]
714,597
nwnsdk.postgres.postgres_client
get_jobs_from_project
null
def get_jobs_from_project(self, project_name: str) -> List[Job]: LOGGER.debug(f"Retrieving job data for jobs from project '{project_name}'") with session_scope(do_expunge=True) as session: stmnt = ALL_JOBS_STMNT.where(Job.project_name == project_name) jobs = session.scalars(stmnt).all() return jobs
(self, project_name: str) -> List[nwnsdk.postgres.dbmodels.Job]
714,598
nwnsdk.postgres.postgres_client
get_jobs_from_user
null
def get_jobs_from_user(self, user_name: str) -> List[Job]: LOGGER.debug(f"Retrieving job data for jobs from user '{user_name}'") with session_scope(do_expunge=True) as session: stmnt = ALL_JOBS_STMNT.where(Job.user_name == user_name) jobs = session.scalars(stmnt).all() return jobs
(self, user_name: str) -> List[nwnsdk.postgres.dbmodels.Job]
714,602
nwnsdk.rabbitmq.rabbitmq_client
run
null
def run(self): self.rabbitmq_is_running = True while self.rabbitmq_is_running: try: LOGGER.info("Waiting for input...") while self.rabbitmq_is_running: self.rabbitmq_channel._process_data_events(time_limit=1) except pika.exceptions.ConnectionClosedByBroker as exc: LOGGER.info('Connection was closed by broker. Reason: "%s". Shutting down...', exc.reply_text) except pika.exceptions.ChannelClosedByBroker as exc: LOGGER.info('Channel was closed by broker. Reason: "%s". retrying...', exc.reply_text) self._connect_rabbitmq() except pika.exceptions.AMQPConnectionError: LOGGER.info("Connection was lost, retrying...") self._connect_rabbitmq()
(self)
714,605
nwnsdk.rabbitmq.rabbitmq_client
set_callbacks
null
def set_callbacks(self, callbacks: Dict[Queue, PikaCallback]): self.rabbitmq_callbacks.update(callbacks) for queue, callback in callbacks.items(): self.rabbitmq_connection.add_callback_threadsafe( functools.partial( self.rabbitmq_channel.basic_consume, queue=queue.value, on_message_callback=callback, auto_ack=False ) )
(self, callbacks: Dict[nwnsdk.rabbitmq.rabbitmq_client.Queue, Callable[[pika.adapters.blocking_connection.BlockingChannel, pika.spec.Basic.Deliver, pika.spec.BasicProperties, bytes], NoneType]])
714,606
nwnsdk.postgres.postgres_client
set_job_running
null
def set_job_running(self, job_id: uuid4) -> None: LOGGER.debug("Started job with id '%s'", job_id) with session_scope() as session: stmnt = update(Job).where(Job.job_id == job_id).values(status=JobStatus.RUNNING, running_at=datetime.now()) session.execute(stmnt)
(self, job_id: <function uuid4 at 0x7faedbc0a290>) -> NoneType
714,608
nwnsdk.nwn_client
start_work_flow
null
def start_work_flow( self, work_flow_type: WorkFlowType, job_name: str, esdl_str: str, user_name: str, project_name: str ) -> uuid4: job_id: uuid4 = uuid4() PostgresClient._send_input( self, job_id=job_id, job_name=job_name, work_flow_type=work_flow_type, esdl_str=esdl_str, user_name=user_name, project_name=project_name, ) RabbitmqClient._send_start_work_flow(self, job_id, work_flow_type) return job_id
(self, work_flow_type: nwnsdk.config.WorkFlowType, job_name: str, esdl_str: str, user_name: str, project_name: str) -> <function uuid4 at 0x7faedbc0a290>
714,609
nwnsdk.nwn_client
stop
null
def stop(self): PostgresClient._close_postgres(self) RabbitmqClient._stop_rabbitmq(self)
(self)
714,610
nwnsdk.postgres.postgres_client
store_job_result
null
def store_job_result(self, job_id: uuid4, new_logs: str, new_status: JobStatus, output_esdl: str): LOGGER.debug( "Stored job result %s with exit code %s, status %s and %s characters of log", job_id, new_status, len(new_logs), ) with session_scope() as session: stmnt = ( update(Job) .where(Job.job_id == job_id) .values(status=new_status, logs=new_logs, output_esdl=output_esdl, stopped_at=datetime.now()) ) session.execute(stmnt)
(self, job_id: <function uuid4 at 0x7faedbc0a290>, new_logs: str, new_status: nwnsdk.postgres.dbmodels.JobStatus, output_esdl: str)
714,611
nwnsdk.config
PostgresConfig
PostgresConfig(host: str, port: int, database_name: str, user_name: str, password: str)
class PostgresConfig: host: str port: int database_name: str user_name: str password: str
(host: str, port: int, database_name: str, user_name: str, password: str) -> None
714,612
nwnsdk.config
__eq__
null
from dataclasses import dataclass from enum import Enum from typing import Optional class WorkFlowType(Enum): GROW_OPTIMIZER = "grow_optimizer" GROW_SIMULATOR = "grow_simulator"
(self, other)
714,615
nwnsdk.rabbitmq.rabbitmq_client
Queue
An enumeration.
class Queue(Enum): StartWorkflowOptimizer = "start_work_flow.optimizer" StartWorkflowGrowSimulator = "start_work_flow.grow_simulator" @staticmethod def from_workflow_type(workflow_type: WorkFlowType) -> "Queue": if workflow_type == WorkFlowType.GROW_OPTIMIZER: return Queue.StartWorkflowOptimizer elif workflow_type == WorkFlowType.GROW_SIMULATOR: return Queue.StartWorkflowGrowSimulator else: raise RuntimeError(f"Unimplemented workflow type {workflow_type}. Please implement.")
(value, names=None, *, module=None, qualname=None, type=None, start=1)
714,616
nwnsdk.config
RabbitmqConfig
RabbitmqConfig(host: str, port: int, exchange_name: str, user_name: str, password: str, hipe_compile: Optional[int] = 1)
class RabbitmqConfig: host: str port: int exchange_name: str user_name: str password: str hipe_compile: Optional[int] = 1
(host: str, port: int, exchange_name: str, user_name: str, password: str, hipe_compile: Optional[int] = 1) -> None
714,620
nwnsdk.config
WorkFlowType
An enumeration.
class WorkFlowType(Enum): GROW_OPTIMIZER = "grow_optimizer" GROW_SIMULATOR = "grow_simulator"
(value, names=None, *, module=None, qualname=None, type=None, start=1)
714,627
nwnsdk.app_logging
setup_logging
Initializes logging. Parameters ---------- log_level : LogLevel The LogLevel for this logger. logger_name : Optional[str] Name for this logger.
def setup_logging(log_level: LogLevel, logger_name: Optional[str]) -> logging.Logger: """ Initializes logging. Parameters ---------- log_level : LogLevel The LogLevel for this logger. logger_name : Optional[str] Name for this logger. """ logger = logging.getLogger(logger_name) if logger_name not in CONFIGURED_LOGGERS: print(f"Will use log level {log_level} for logger {logger_name}") logger.setLevel(log_level.value) log_handler = logging.StreamHandler(sys.stdout) formatter = logging.Formatter( fmt="%(asctime)s [%(threadName)s][%(filename)s:%(lineno)d]" "[%(levelname)s]: %(message)s" ) log_handler.setFormatter(formatter) logger.addHandler(log_handler) CONFIGURED_LOGGERS[logger_name] = logger return logger
(log_level: nwnsdk.app_logging.LogLevel, logger_name: Optional[str]) -> logging.Logger
714,630
langid.langid
classify
Convenience method using a global identifier instance with the default model included in langid.py. Identifies the language that a string is written in. @param instance a text string. Unicode strings will automatically be utf8-encoded @returns a tuple of the most likely language and the confidence score
def classify(instance): """ Convenience method using a global identifier instance with the default model included in langid.py. Identifies the language that a string is written in. @param instance a text string. Unicode strings will automatically be utf8-encoded @returns a tuple of the most likely language and the confidence score """ global identifier if identifier is None: load_model() return identifier.classify(instance)
(instance)
714,632
langid.langid
rank
Convenience method using a global identifier instance with the default model included in langid.py. Ranks all the languages in the model according to the likelihood that the string is written in each language. @param instance a text string. Unicode strings will automatically be utf8-encoded @returns a list of tuples language and the confidence score, in descending order
def rank(instance): """ Convenience method using a global identifier instance with the default model included in langid.py. Ranks all the languages in the model according to the likelihood that the string is written in each language. @param instance a text string. Unicode strings will automatically be utf8-encoded @returns a list of tuples language and the confidence score, in descending order """ global identifier if identifier is None: load_model() return identifier.rank(instance)
(instance)
714,633
langid.langid
set_languages
Set the language set used by the global identifier. @param langs a list of language codes
def set_languages(langs=None): """ Set the language set used by the global identifier. @param langs a list of language codes """ global identifier if identifier is None: load_model() return identifier.set_languages(langs)
(langs=None)
714,641
jsonconversion
get_all_args
Determines the names of all arguments of the given function :param func: The function/method to be inspected :return: Argument names :rtype: set(str)
def get_all_args(func): """Determines the names of all arguments of the given function :param func: The function/method to be inspected :return: Argument names :rtype: set(str) """ fullargspec = getfullargspec(func) func_args = set() for arg_type in ["args", "varargs", "varkw", "kwonlyargs", "keywords"]: arg_type_val = getattr(fullargspec, arg_type, None) if arg_type_val is not None: func_args |= set(arg_type_val) return func_args
(func)
714,651
google_crc32c.cext
Checksum
Hashlib-alike helper for CRC32C operations. Args: initial_value (Optional[bytes]): the initial chunk of data from which the CRC32C checksum is computed. Defaults to b''.
class Checksum(CommonChecksum): """Hashlib-alike helper for CRC32C operations. Args: initial_value (Optional[bytes]): the initial chunk of data from which the CRC32C checksum is computed. Defaults to b''. """ __slots__ = ("_crc",) def __init__(self, initial_value=b""): self._crc = value(initial_value) def update(self, chunk): """Update the checksum with a new chunk of data. Args: chunk (Optional[bytes]): a chunk of data used to extend the CRC32C checksum. """ self._crc = extend(self._crc, chunk)
(initial_value=b'')
714,652
google_crc32c.cext
__init__
null
def __init__(self, initial_value=b""): self._crc = value(initial_value)
(self, initial_value=b'')
714,653
google_crc32c._checksum
consume
Consume chunks from a stream, extending our CRC32 checksum. Args: stream (BinaryIO): the stream to consume. chunksize (int): the size of the read to perform Returns: Generator[bytes, None, None]: Iterable of the chunks read from the stream.
def consume(self, stream, chunksize): """Consume chunks from a stream, extending our CRC32 checksum. Args: stream (BinaryIO): the stream to consume. chunksize (int): the size of the read to perform Returns: Generator[bytes, None, None]: Iterable of the chunks read from the stream. """ while True: chunk = stream.read(chunksize) if not chunk: break self.update(chunk) yield chunk
(self, stream, chunksize)
714,654
google_crc32c._checksum
copy
Create another checksum with the same CRC32C value. Returns: Checksum: the new instance.
def copy(self): """Create another checksum with the same CRC32C value. Returns: Checksum: the new instance. """ clone = self.__class__() clone._crc = self._crc return clone
(self)
714,655
google_crc32c._checksum
digest
Big-endian order, per RFC 4960. See: https://cloud.google.com/storage/docs/json_api/v1/objects#crc32c Returns: bytes: An eight-byte digest string.
def digest(self): """Big-endian order, per RFC 4960. See: https://cloud.google.com/storage/docs/json_api/v1/objects#crc32c Returns: bytes: An eight-byte digest string. """ return struct.pack(">L", self._crc)
(self)
714,656
google_crc32c._checksum
hexdigest
Like :meth:`digest` except returns as a bytestring of double length. Returns bytes: A sixteen byte digest string, contaiing only hex digits.
def hexdigest(self): """Like :meth:`digest` except returns as a bytestring of double length. Returns bytes: A sixteen byte digest string, contaiing only hex digits. """ return "{:08x}".format(self._crc).encode("ascii")
(self)
714,657
google_crc32c.cext
update
Update the checksum with a new chunk of data. Args: chunk (Optional[bytes]): a chunk of data used to extend the CRC32C checksum.
def update(self, chunk): """Update the checksum with a new chunk of data. Args: chunk (Optional[bytes]): a chunk of data used to extend the CRC32C checksum. """ self._crc = extend(self._crc, chunk)
(self, chunk)
714,666
pandas_redshift.core
close_up_shop
null
def close_up_shop(): global connect, cursor, s3, s3_bucket_var, s3_subdirectory_var, aws_1, aws_2, aws_token cursor.close() connect.commit() connect.close() try: del connect, cursor except: pass try: del s3, s3_bucket_var, s3_subdirectory_var, aws_1, aws_2, aws_token except: pass
()
714,667
pandas_redshift.core
connect_to_redshift
null
def connect_to_redshift(dbname, host, user, port=5439, **kwargs): global connect, cursor connect = psycopg2.connect(dbname=dbname, host=host, port=port, user=user, **kwargs) cursor = connect.cursor()
(dbname, host, user, port=5439, **kwargs)
714,668
pandas_redshift.core
connect_to_s3
null
def connect_to_s3(aws_access_key_id, aws_secret_access_key, bucket, subdirectory=None, aws_iam_role=None, **kwargs): global s3, s3_bucket_var, s3_subdirectory_var, aws_1, aws_2, aws_token, aws_role s3 = boto3.resource('s3', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, **kwargs) s3_bucket_var = bucket if subdirectory is None: s3_subdirectory_var = '' else: s3_subdirectory_var = subdirectory + '/' aws_1 = aws_access_key_id aws_2 = aws_secret_access_key aws_role = aws_iam_role if kwargs.get('aws_session_token'): aws_token = kwargs.get('aws_session_token') else: aws_token = ''
(aws_access_key_id, aws_secret_access_key, bucket, subdirectory=None, aws_iam_role=None, **kwargs)
714,670
pandas_redshift.core
create_redshift_table
Create an empty RedShift Table
def create_redshift_table(data_frame, redshift_table_name, column_data_types=None, index=False, append=False, diststyle='even', distkey='', sort_interleaved=False, sortkey='', verbose=True): """Create an empty RedShift Table """ if index: columns = list(data_frame.columns) if data_frame.index.name: columns.insert(0, data_frame.index.name) else: columns.insert(0, "index") else: columns = list(data_frame.columns) if column_data_types is None: column_data_types = get_column_data_types(data_frame, index) columns_and_data_type = ', '.join( ['{0} {1}'.format(x, y) for x, y in zip(columns, column_data_types)]) create_table_query = 'create table {0} ({1})'.format( redshift_table_name, columns_and_data_type) if not distkey: # Without a distkey, we can set a diststyle if diststyle not in ['even', 'all']: raise ValueError("diststyle must be either 'even' or 'all'") else: create_table_query += ' diststyle {0}'.format(diststyle) else: # otherwise, override diststyle with distkey create_table_query += ' distkey({0})'.format(distkey) if len(sortkey) > 0: if sort_interleaved: create_table_query += ' interleaved' create_table_query += ' sortkey({0})'.format(sortkey) if verbose: logger.info(create_table_query) logger.info('CREATING A TABLE IN REDSHIFT') cursor.execute('drop table if exists {0}'.format(redshift_table_name)) cursor.execute(create_table_query) connect.commit()
(data_frame, redshift_table_name, column_data_types=None, index=False, append=False, diststyle='even', distkey='', sort_interleaved=False, sortkey='', verbose=True)
714,671
pandas_redshift.core
df_to_s3
Write a dataframe to S3 Arguments: dataframe pd.data_frame -- data to upload csv_name str -- name of the file to upload save_local bool -- save a local copy delimiter str -- delimiter for csv file
def df_to_s3(data_frame, csv_name, index, save_local, delimiter, verbose=True, **kwargs): """Write a dataframe to S3 Arguments: dataframe pd.data_frame -- data to upload csv_name str -- name of the file to upload save_local bool -- save a local copy delimiter str -- delimiter for csv file """ extra_kwargs = {k: v for k, v in kwargs.items( ) if k in S3_ACCEPTED_KWARGS and v is not None} # create local backup if save_local: data_frame.to_csv(csv_name, index=index, sep=delimiter) if verbose: logger.info('saved file {0} in {1}'.format(csv_name, os.getcwd())) # csv_buffer = StringIO() data_frame.to_csv(csv_buffer, index=index, sep=delimiter) s3.Bucket(s3_bucket_var).put_object( Key=s3_subdirectory_var + csv_name, Body=csv_buffer.getvalue(), **extra_kwargs) if verbose: logger.info('saved file {0} in bucket {1}'.format( csv_name, s3_subdirectory_var + csv_name))
(data_frame, csv_name, index, save_local, delimiter, verbose=True, **kwargs)
714,672
pandas_redshift.core
exec_commit
null
def exec_commit(sql_query): cursor.execute(sql_query) connect.commit()
(sql_query)
714,673
pandas_redshift.core
get_column_data_types
null
def get_column_data_types(data_frame, index=False): column_data_types = [pd_dtype_to_redshift_dtype(dtype.name) for dtype in data_frame.dtypes.values] if index: column_data_types.insert( 0, pd_dtype_to_redshift_dtype(data_frame.index.dtype.name)) return column_data_types
(data_frame, index=False)
714,675
pandas_redshift.core
mask_aws_credentials
null
def mask_aws_credentials(s): if logging_config['mask_secrets']: import re s = re.sub('(?<=access_key_id \')(.*)(?=\')', '*'*8, s) s = re.sub('(?<=secret_access_key \')(.*)(?=\')', '*'*8, s) return s
(s)
714,677
pandas_redshift.core
pandas_to_redshift
null
def pandas_to_redshift(data_frame, redshift_table_name, column_data_types=None, index=False, save_local=False, delimiter=',', quotechar='"', dateformat='auto', timeformat='auto', region='', append=False, diststyle='even', distkey='', sort_interleaved=False, sortkey='', parameters='', verbose=True, **kwargs): # Validate column names. data_frame = validate_column_names(data_frame) # Send data to S3 csv_name = '{}-{}.csv'.format(redshift_table_name, uuid.uuid4()) s3_kwargs = {k: v for k, v in kwargs.items() if k in S3_ACCEPTED_KWARGS and v is not None} df_to_s3(data_frame, csv_name, index, save_local, delimiter, verbose=verbose, **s3_kwargs) # CREATE AN EMPTY TABLE IN REDSHIFT if not append: create_redshift_table(data_frame, redshift_table_name, column_data_types, index, append, diststyle, distkey, sort_interleaved, sortkey, verbose=verbose) # CREATE THE COPY STATEMENT TO SEND FROM S3 TO THE TABLE IN REDSHIFT s3_to_redshift(redshift_table_name, csv_name, delimiter, quotechar, dateformat, timeformat, region, parameters, verbose=verbose)
(data_frame, redshift_table_name, column_data_types=None, index=False, save_local=False, delimiter=',', quotechar='"', dateformat='auto', timeformat='auto', region='', append=False, diststyle='even', distkey='', sort_interleaved=False, sortkey='', parameters='', verbose=True, **kwargs)
714,679
pandas_redshift.core
pd_dtype_to_redshift_dtype
null
def pd_dtype_to_redshift_dtype(dtype): if dtype.startswith('int64'): return 'BIGINT' elif dtype.startswith('int'): return 'INTEGER' elif dtype.startswith('float'): return 'REAL' elif dtype.startswith('datetime'): return 'TIMESTAMP' elif dtype == 'bool': return 'BOOLEAN' else: return 'VARCHAR(256)'
(dtype)
714,682
pandas_redshift.core
redshift_to_pandas
null
def redshift_to_pandas(sql_query, query_params=None): # pass a sql query and return a pandas dataframe cursor.execute(sql_query, query_params) columns_list = [desc[0] for desc in cursor.description] data = pd.DataFrame(cursor.fetchall(), columns=columns_list) return data
(sql_query, query_params=None)