Search is not available for this dataset
repo
stringlengths 2
152
⌀ | file
stringlengths 15
239
| code
stringlengths 0
58.4M
| file_length
int64 0
58.4M
| avg_line_length
float64 0
1.81M
| max_line_length
int64 0
12.7M
| extension_type
stringclasses 364
values |
---|---|---|---|---|---|---|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/container.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import matplotlib.cbook as cbook
import matplotlib.artist as martist
class Container(tuple):
"""
Base class for containers.
Containers are classes that collect semantically related Artists such as
the bars of a bar plot.
"""
def __repr__(self):
return ("<{} object of {} artists>"
.format(type(self).__name__, len(self)))
def __new__(cls, *kl, **kwargs):
return tuple.__new__(cls, kl[0])
def __init__(self, kl, label=None):
self.eventson = False # fire events only if eventson
self._oid = 0 # an observer id
self._propobservers = {} # a dict from oids to funcs
self._remove_method = None
self.set_label(label)
def set_remove_method(self, f):
self._remove_method = f
def remove(self):
for c in cbook.flatten(
self, scalarp=lambda x: isinstance(x, martist.Artist)):
if c is not None:
c.remove()
if self._remove_method:
self._remove_method(self)
def __getstate__(self):
d = self.__dict__.copy()
# remove the unpicklable remove method, this will get re-added on load
# (by the axes) if the artist lives on an axes.
d['_remove_method'] = None
return d
def get_label(self):
"""
Get the label used for this artist in the legend.
"""
return self._label
def set_label(self, s):
"""
Set the label to *s* for auto legend.
ACCEPTS: string or anything printable with '%s' conversion.
"""
if s is not None:
self._label = '%s' % (s, )
else:
self._label = None
self.pchanged()
def add_callback(self, func):
"""
Adds a callback function that will be called whenever one of
the :class:`Artist`'s properties changes.
Returns an *id* that is useful for removing the callback with
:meth:`remove_callback` later.
"""
oid = self._oid
self._propobservers[oid] = func
self._oid += 1
return oid
def remove_callback(self, oid):
"""
Remove a callback based on its *id*.
.. seealso::
:meth:`add_callback`
For adding callbacks
"""
try:
del self._propobservers[oid]
except KeyError:
pass
def pchanged(self):
"""
Fire an event when property changed, calling all of the
registered callbacks.
"""
for oid, func in list(six.iteritems(self._propobservers)):
func(self)
def get_children(self):
return [child for child in cbook.flatten(self) if child is not None]
class BarContainer(Container):
"""
Container for the artists of bar plots (e.g. created by `.Axes.bar`).
The container can be treated as a tuple of the *patches* themselves.
Additionally, you can access these and further parameters by the
attributes.
Attributes
----------
patches : list of :class:`~matplotlib.patches.Rectangle`
The artists of the bars.
errorbar : None or :class:`~matplotlib.container.ErrorbarContainer`
A container for the error bar artists if error bars are present.
*None* otherwise.
"""
def __init__(self, patches, errorbar=None, **kwargs):
self.patches = patches
self.errorbar = errorbar
Container.__init__(self, patches, **kwargs)
class ErrorbarContainer(Container):
"""
Container for the artists of error bars (e.g. created by `.Axes.errorbar`).
The container can be treated as the *lines* tuple itself.
Additionally, you can access these and further parameters by the
attributes.
Attributes
----------
lines : tuple
Tuple of ``(data_line, caplines, barlinecols)``.
- data_line : :class:`~matplotlib.lines.Line2D` instance of
x, y plot markers and/or line.
- caplines : tuple of :class:`~matplotlib.lines.Line2D` instances of
the error bar caps.
- barlinecols : list of :class:`~matplotlib.collections.LineCollection`
with the horizontal and vertical error ranges.
has_xerr, has_yerr : bool
``True`` if the errorbar has x/y errors.
"""
def __init__(self, lines, has_xerr=False, has_yerr=False, **kwargs):
self.lines = lines
self.has_xerr = has_xerr
self.has_yerr = has_yerr
Container.__init__(self, lines, **kwargs)
class StemContainer(Container):
"""
Container for the artists created in a :meth:`.Axes.stem` plot.
The container can be treated like a namedtuple ``(markerline, stemlines,
baseline)``.
Attributes
----------
markerline : :class:`~matplotlib.lines.Line2D`
The artist of the markers at the stem heads.
stemlines : list of :class:`~matplotlib.lines.Line2D`
The artists of the vertical lines for all stems.
baseline : :class:`~matplotlib.lines.Line2D`
The artist of the horizontal baseline.
"""
def __init__(self, markerline_stemlines_baseline, **kwargs):
markerline, stemlines, baseline = markerline_stemlines_baseline
self.markerline = markerline
self.stemlines = stemlines
self.baseline = baseline
Container.__init__(self, markerline_stemlines_baseline, **kwargs)
| 5,561 | 27.523077 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/dviread.py
|
"""
A module for reading dvi files output by TeX. Several limitations make
this not (currently) useful as a general-purpose dvi preprocessor, but
it is currently used by the pdf backend for processing usetex text.
Interface::
with Dvi(filename, 72) as dvi:
# iterate over pages:
for page in dvi:
w, h, d = page.width, page.height, page.descent
for x,y,font,glyph,width in page.text:
fontname = font.texname
pointsize = font.size
...
for x,y,height,width in page.boxes:
...
"""
from __future__ import absolute_import, division, print_function
import six
from six.moves import xrange
from collections import namedtuple
from functools import partial, wraps
import logging
import numpy as np
import os
import re
import struct
import sys
import textwrap
from matplotlib import cbook, rcParams
from matplotlib.compat import subprocess
try:
from functools import lru_cache
except ImportError: # Py2
from backports.functools_lru_cache import lru_cache
if six.PY3:
def ord(x):
return x
_log = logging.getLogger(__name__)
# Dvi is a bytecode format documented in
# http://mirrors.ctan.org/systems/knuth/dist/texware/dvitype.web
# http://texdoc.net/texmf-dist/doc/generic/knuth/texware/dvitype.pdf
#
# The file consists of a preamble, some number of pages, a postamble,
# and a finale. Different opcodes are allowed in different contexts,
# so the Dvi object has a parser state:
#
# pre: expecting the preamble
# outer: between pages (followed by a page or the postamble,
# also e.g. font definitions are allowed)
# page: processing a page
# post_post: state after the postamble (our current implementation
# just stops reading)
# finale: the finale (unimplemented in our current implementation)
_dvistate = cbook.Bunch(pre=0, outer=1, inpage=2, post_post=3, finale=4)
# The marks on a page consist of text and boxes. A page also has dimensions.
Page = namedtuple('Page', 'text boxes height width descent')
Text = namedtuple('Text', 'x y font glyph width')
Box = namedtuple('Box', 'x y height width')
# Opcode argument parsing
#
# Each of the following functions takes a Dvi object and delta,
# which is the difference between the opcode and the minimum opcode
# with the same meaning. Dvi opcodes often encode the number of
# argument bytes in this delta.
def _arg_raw(dvi, delta):
"""Return *delta* without reading anything more from the dvi file"""
return delta
def _arg(bytes, signed, dvi, _):
"""Read *bytes* bytes, returning the bytes interpreted as a
signed integer if *signed* is true, unsigned otherwise."""
return dvi._arg(bytes, signed)
def _arg_slen(dvi, delta):
"""Signed, length *delta*
Read *delta* bytes, returning None if *delta* is zero, and
the bytes interpreted as a signed integer otherwise."""
if delta == 0:
return None
return dvi._arg(delta, True)
def _arg_slen1(dvi, delta):
"""Signed, length *delta*+1
Read *delta*+1 bytes, returning the bytes interpreted as signed."""
return dvi._arg(delta+1, True)
def _arg_ulen1(dvi, delta):
"""Unsigned length *delta*+1
Read *delta*+1 bytes, returning the bytes interpreted as unsigned."""
return dvi._arg(delta+1, False)
def _arg_olen1(dvi, delta):
"""Optionally signed, length *delta*+1
Read *delta*+1 bytes, returning the bytes interpreted as
unsigned integer for 0<=*delta*<3 and signed if *delta*==3."""
return dvi._arg(delta + 1, delta == 3)
_arg_mapping = dict(raw=_arg_raw,
u1=partial(_arg, 1, False),
u4=partial(_arg, 4, False),
s4=partial(_arg, 4, True),
slen=_arg_slen,
olen1=_arg_olen1,
slen1=_arg_slen1,
ulen1=_arg_ulen1)
def _dispatch(table, min, max=None, state=None, args=('raw',)):
"""Decorator for dispatch by opcode. Sets the values in *table*
from *min* to *max* to this method, adds a check that the Dvi state
matches *state* if not None, reads arguments from the file according
to *args*.
*table*
the dispatch table to be filled in
*min*
minimum opcode for calling this function
*max*
maximum opcode for calling this function, None if only *min* is allowed
*state*
state of the Dvi object in which these opcodes are allowed
*args*
sequence of argument specifications:
``'raw'``: opcode minus minimum
``'u1'``: read one unsigned byte
``'u4'``: read four bytes, treat as an unsigned number
``'s4'``: read four bytes, treat as a signed number
``'slen'``: read (opcode - minimum) bytes, treat as signed
``'slen1'``: read (opcode - minimum + 1) bytes, treat as signed
``'ulen1'``: read (opcode - minimum + 1) bytes, treat as unsigned
``'olen1'``: read (opcode - minimum + 1) bytes, treat as unsigned
if under four bytes, signed if four bytes
"""
def decorate(method):
get_args = [_arg_mapping[x] for x in args]
@wraps(method)
def wrapper(self, byte):
if state is not None and self.state != state:
raise ValueError("state precondition failed")
return method(self, *[f(self, byte-min) for f in get_args])
if max is None:
table[min] = wrapper
else:
for i in xrange(min, max+1):
assert table[i] is None
table[i] = wrapper
return wrapper
return decorate
class Dvi(object):
"""
A reader for a dvi ("device-independent") file, as produced by TeX.
The current implementation can only iterate through pages in order,
and does not even attempt to verify the postamble.
This class can be used as a context manager to close the underlying
file upon exit. Pages can be read via iteration. Here is an overly
simple way to extract text without trying to detect whitespace::
>>> with matplotlib.dviread.Dvi('input.dvi', 72) as dvi:
>>> for page in dvi:
>>> print(''.join(unichr(t.glyph) for t in page.text))
"""
# dispatch table
_dtable = [None for _ in xrange(256)]
_dispatch = partial(_dispatch, _dtable)
def __init__(self, filename, dpi):
"""
Read the data from the file named *filename* and convert
TeX's internal units to units of *dpi* per inch.
*dpi* only sets the units and does not limit the resolution.
Use None to return TeX's internal units.
"""
_log.debug('Dvi: %s', filename)
self.file = open(filename, 'rb')
self.dpi = dpi
self.fonts = {}
self.state = _dvistate.pre
self.baseline = self._get_baseline(filename)
def _get_baseline(self, filename):
if rcParams['text.latex.preview']:
base, ext = os.path.splitext(filename)
baseline_filename = base + ".baseline"
if os.path.exists(baseline_filename):
with open(baseline_filename, 'rb') as fd:
l = fd.read().split()
height, depth, width = l
return float(depth)
return None
def __enter__(self):
"""
Context manager enter method, does nothing.
"""
return self
def __exit__(self, etype, evalue, etrace):
"""
Context manager exit method, closes the underlying file if it is open.
"""
self.close()
def __iter__(self):
"""
Iterate through the pages of the file.
Yields
------
Page
Details of all the text and box objects on the page.
The Page tuple contains lists of Text and Box tuples and
the page dimensions, and the Text and Box tuples contain
coordinates transformed into a standard Cartesian
coordinate system at the dpi value given when initializing.
The coordinates are floating point numbers, but otherwise
precision is not lost and coordinate values are not clipped to
integers.
"""
while True:
have_page = self._read()
if have_page:
yield self._output()
else:
break
def close(self):
"""
Close the underlying file if it is open.
"""
if not self.file.closed:
self.file.close()
def _output(self):
"""
Output the text and boxes belonging to the most recent page.
page = dvi._output()
"""
minx, miny, maxx, maxy = np.inf, np.inf, -np.inf, -np.inf
maxy_pure = -np.inf
for elt in self.text + self.boxes:
if isinstance(elt, Box):
x, y, h, w = elt
e = 0 # zero depth
else: # glyph
x, y, font, g, w = elt
h, e = font._height_depth_of(g)
minx = min(minx, x)
miny = min(miny, y - h)
maxx = max(maxx, x + w)
maxy = max(maxy, y + e)
maxy_pure = max(maxy_pure, y)
if self.dpi is None:
# special case for ease of debugging: output raw dvi coordinates
return Page(text=self.text, boxes=self.boxes,
width=maxx-minx, height=maxy_pure-miny,
descent=maxy-maxy_pure)
# convert from TeX's "scaled points" to dpi units
d = self.dpi / (72.27 * 2**16)
if self.baseline is None:
descent = (maxy - maxy_pure) * d
else:
descent = self.baseline
text = [Text((x-minx)*d, (maxy-y)*d - descent, f, g, w*d)
for (x, y, f, g, w) in self.text]
boxes = [Box((x-minx)*d, (maxy-y)*d - descent, h*d, w*d)
for (x, y, h, w) in self.boxes]
return Page(text=text, boxes=boxes, width=(maxx-minx)*d,
height=(maxy_pure-miny)*d, descent=descent)
def _read(self):
"""
Read one page from the file. Return True if successful,
False if there were no more pages.
"""
while True:
byte = ord(self.file.read(1)[0])
self._dtable[byte](self, byte)
if byte == 140: # end of page
return True
if self.state == _dvistate.post_post: # end of file
self.close()
return False
def _arg(self, nbytes, signed=False):
"""
Read and return an integer argument *nbytes* long.
Signedness is determined by the *signed* keyword.
"""
str = self.file.read(nbytes)
value = ord(str[0])
if signed and value >= 0x80:
value = value - 0x100
for i in range(1, nbytes):
value = 0x100*value + ord(str[i])
return value
@_dispatch(min=0, max=127, state=_dvistate.inpage)
def _set_char_immediate(self, char):
self._put_char_real(char)
self.h += self.fonts[self.f]._width_of(char)
@_dispatch(min=128, max=131, state=_dvistate.inpage, args=('olen1',))
def _set_char(self, char):
self._put_char_real(char)
self.h += self.fonts[self.f]._width_of(char)
@_dispatch(132, state=_dvistate.inpage, args=('s4', 's4'))
def _set_rule(self, a, b):
self._put_rule_real(a, b)
self.h += b
@_dispatch(min=133, max=136, state=_dvistate.inpage, args=('olen1',))
def _put_char(self, char):
self._put_char_real(char)
def _put_char_real(self, char):
font = self.fonts[self.f]
if font._vf is None:
self.text.append(Text(self.h, self.v, font, char,
font._width_of(char)))
else:
scale = font._scale
for x, y, f, g, w in font._vf[char].text:
newf = DviFont(scale=_mul2012(scale, f._scale),
tfm=f._tfm, texname=f.texname, vf=f._vf)
self.text.append(Text(self.h + _mul2012(x, scale),
self.v + _mul2012(y, scale),
newf, g, newf._width_of(g)))
self.boxes.extend([Box(self.h + _mul2012(x, scale),
self.v + _mul2012(y, scale),
_mul2012(a, scale), _mul2012(b, scale))
for x, y, a, b in font._vf[char].boxes])
@_dispatch(137, state=_dvistate.inpage, args=('s4', 's4'))
def _put_rule(self, a, b):
self._put_rule_real(a, b)
def _put_rule_real(self, a, b):
if a > 0 and b > 0:
self.boxes.append(Box(self.h, self.v, a, b))
@_dispatch(138)
def _nop(self, _):
pass
@_dispatch(139, state=_dvistate.outer, args=('s4',)*11)
def _bop(self, c0, c1, c2, c3, c4, c5, c6, c7, c8, c9, p):
self.state = _dvistate.inpage
self.h, self.v, self.w, self.x, self.y, self.z = 0, 0, 0, 0, 0, 0
self.stack = []
self.text = [] # list of Text objects
self.boxes = [] # list of Box objects
@_dispatch(140, state=_dvistate.inpage)
def _eop(self, _):
self.state = _dvistate.outer
del self.h, self.v, self.w, self.x, self.y, self.z, self.stack
@_dispatch(141, state=_dvistate.inpage)
def _push(self, _):
self.stack.append((self.h, self.v, self.w, self.x, self.y, self.z))
@_dispatch(142, state=_dvistate.inpage)
def _pop(self, _):
self.h, self.v, self.w, self.x, self.y, self.z = self.stack.pop()
@_dispatch(min=143, max=146, state=_dvistate.inpage, args=('slen1',))
def _right(self, b):
self.h += b
@_dispatch(min=147, max=151, state=_dvistate.inpage, args=('slen',))
def _right_w(self, new_w):
if new_w is not None:
self.w = new_w
self.h += self.w
@_dispatch(min=152, max=156, state=_dvistate.inpage, args=('slen',))
def _right_x(self, new_x):
if new_x is not None:
self.x = new_x
self.h += self.x
@_dispatch(min=157, max=160, state=_dvistate.inpage, args=('slen1',))
def _down(self, a):
self.v += a
@_dispatch(min=161, max=165, state=_dvistate.inpage, args=('slen',))
def _down_y(self, new_y):
if new_y is not None:
self.y = new_y
self.v += self.y
@_dispatch(min=166, max=170, state=_dvistate.inpage, args=('slen',))
def _down_z(self, new_z):
if new_z is not None:
self.z = new_z
self.v += self.z
@_dispatch(min=171, max=234, state=_dvistate.inpage)
def _fnt_num_immediate(self, k):
self.f = k
@_dispatch(min=235, max=238, state=_dvistate.inpage, args=('olen1',))
def _fnt_num(self, new_f):
self.f = new_f
@_dispatch(min=239, max=242, args=('ulen1',))
def _xxx(self, datalen):
special = self.file.read(datalen)
if six.PY3:
chr_ = chr
else:
def chr_(x):
return x
_log.debug(
'Dvi._xxx: encountered special: %s',
''.join([chr_(ch) if 32 <= ord(ch) < 127 else '<%02x>' % ord(ch)
for ch in special]))
@_dispatch(min=243, max=246, args=('olen1', 'u4', 'u4', 'u4', 'u1', 'u1'))
def _fnt_def(self, k, c, s, d, a, l):
self._fnt_def_real(k, c, s, d, a, l)
def _fnt_def_real(self, k, c, s, d, a, l):
n = self.file.read(a + l)
fontname = n[-l:].decode('ascii')
tfm = _tfmfile(fontname)
if tfm is None:
if six.PY2:
error_class = OSError
else:
error_class = FileNotFoundError
raise error_class("missing font metrics file: %s" % fontname)
if c != 0 and tfm.checksum != 0 and c != tfm.checksum:
raise ValueError('tfm checksum mismatch: %s' % n)
vf = _vffile(fontname)
self.fonts[k] = DviFont(scale=s, tfm=tfm, texname=n, vf=vf)
@_dispatch(247, state=_dvistate.pre, args=('u1', 'u4', 'u4', 'u4', 'u1'))
def _pre(self, i, num, den, mag, k):
comment = self.file.read(k)
if i != 2:
raise ValueError("Unknown dvi format %d" % i)
if num != 25400000 or den != 7227 * 2**16:
raise ValueError("nonstandard units in dvi file")
# meaning: TeX always uses those exact values, so it
# should be enough for us to support those
# (There are 72.27 pt to an inch so 7227 pt =
# 7227 * 2**16 sp to 100 in. The numerator is multiplied
# by 10^5 to get units of 10**-7 meters.)
if mag != 1000:
raise ValueError("nonstandard magnification in dvi file")
# meaning: LaTeX seems to frown on setting \mag, so
# I think we can assume this is constant
self.state = _dvistate.outer
@_dispatch(248, state=_dvistate.outer)
def _post(self, _):
self.state = _dvistate.post_post
# TODO: actually read the postamble and finale?
# currently post_post just triggers closing the file
@_dispatch(249)
def _post_post(self, _):
raise NotImplementedError
@_dispatch(min=250, max=255)
def _malformed(self, offset):
raise ValueError("unknown command: byte %d", 250 + offset)
class DviFont(object):
"""
Encapsulation of a font that a DVI file can refer to.
This class holds a font's texname and size, supports comparison,
and knows the widths of glyphs in the same units as the AFM file.
There are also internal attributes (for use by dviread.py) that
are *not* used for comparison.
The size is in Adobe points (converted from TeX points).
Parameters
----------
scale : float
Factor by which the font is scaled from its natural size.
tfm : Tfm
TeX font metrics for this font
texname : bytes
Name of the font as used internally by TeX and friends, as an
ASCII bytestring. This is usually very different from any external
font names, and :class:`dviread.PsfontsMap` can be used to find
the external name of the font.
vf : Vf
A TeX "virtual font" file, or None if this font is not virtual.
Attributes
----------
texname : bytes
size : float
Size of the font in Adobe points, converted from the slightly
smaller TeX points.
widths : list
Widths of glyphs in glyph-space units, typically 1/1000ths of
the point size.
"""
__slots__ = ('texname', 'size', 'widths', '_scale', '_vf', '_tfm')
def __init__(self, scale, tfm, texname, vf):
if not isinstance(texname, bytes):
raise ValueError("texname must be a bytestring, got %s"
% type(texname))
self._scale, self._tfm, self.texname, self._vf = \
scale, tfm, texname, vf
self.size = scale * (72.0 / (72.27 * 2**16))
try:
nchars = max(tfm.width) + 1
except ValueError:
nchars = 0
self.widths = [(1000*tfm.width.get(char, 0)) >> 20
for char in xrange(nchars)]
def __eq__(self, other):
return self.__class__ == other.__class__ and \
self.texname == other.texname and self.size == other.size
def __ne__(self, other):
return not self.__eq__(other)
def _width_of(self, char):
"""
Width of char in dvi units. For internal use by dviread.py.
"""
width = self._tfm.width.get(char, None)
if width is not None:
return _mul2012(width, self._scale)
_log.debug('No width for char %d in font %s.', char, self.texname)
return 0
def _height_depth_of(self, char):
"""
Height and depth of char in dvi units. For internal use by dviread.py.
"""
result = []
for metric, name in ((self._tfm.height, "height"),
(self._tfm.depth, "depth")):
value = metric.get(char, None)
if value is None:
_log.debug('No %s for char %d in font %s',
name, char, self.texname)
result.append(0)
else:
result.append(_mul2012(value, self._scale))
return result
class Vf(Dvi):
"""
A virtual font (\\*.vf file) containing subroutines for dvi files.
Usage::
vf = Vf(filename)
glyph = vf[code]
glyph.text, glyph.boxes, glyph.width
Parameters
----------
filename : string or bytestring
Notes
-----
The virtual font format is a derivative of dvi:
http://mirrors.ctan.org/info/knuth/virtual-fonts
This class reuses some of the machinery of `Dvi`
but replaces the `_read` loop and dispatch mechanism.
"""
def __init__(self, filename):
Dvi.__init__(self, filename, 0)
try:
self._first_font = None
self._chars = {}
self._read()
finally:
self.close()
def __getitem__(self, code):
return self._chars[code]
def _read(self):
"""
Read one page from the file. Return True if successful,
False if there were no more pages.
"""
packet_char, packet_ends = None, None
packet_len, packet_width = None, None
while True:
byte = ord(self.file.read(1)[0])
# If we are in a packet, execute the dvi instructions
if self.state == _dvistate.inpage:
byte_at = self.file.tell()-1
if byte_at == packet_ends:
self._finalize_packet(packet_char, packet_width)
packet_len, packet_char, packet_width = None, None, None
# fall through to out-of-packet code
elif byte_at > packet_ends:
raise ValueError("Packet length mismatch in vf file")
else:
if byte in (139, 140) or byte >= 243:
raise ValueError(
"Inappropriate opcode %d in vf file" % byte)
Dvi._dtable[byte](self, byte)
continue
# We are outside a packet
if byte < 242: # a short packet (length given by byte)
packet_len = byte
packet_char, packet_width = self._arg(1), self._arg(3)
packet_ends = self._init_packet(byte)
self.state = _dvistate.inpage
elif byte == 242: # a long packet
packet_len, packet_char, packet_width = \
[self._arg(x) for x in (4, 4, 4)]
self._init_packet(packet_len)
elif 243 <= byte <= 246:
k = self._arg(byte - 242, byte == 246)
c, s, d, a, l = [self._arg(x) for x in (4, 4, 4, 1, 1)]
self._fnt_def_real(k, c, s, d, a, l)
if self._first_font is None:
self._first_font = k
elif byte == 247: # preamble
i, k = self._arg(1), self._arg(1)
x = self.file.read(k)
cs, ds = self._arg(4), self._arg(4)
self._pre(i, x, cs, ds)
elif byte == 248: # postamble (just some number of 248s)
break
else:
raise ValueError("unknown vf opcode %d" % byte)
def _init_packet(self, pl):
if self.state != _dvistate.outer:
raise ValueError("Misplaced packet in vf file")
self.h, self.v, self.w, self.x, self.y, self.z = 0, 0, 0, 0, 0, 0
self.stack, self.text, self.boxes = [], [], []
self.f = self._first_font
return self.file.tell() + pl
def _finalize_packet(self, packet_char, packet_width):
self._chars[packet_char] = Page(
text=self.text, boxes=self.boxes, width=packet_width,
height=None, descent=None)
self.state = _dvistate.outer
def _pre(self, i, x, cs, ds):
if self.state != _dvistate.pre:
raise ValueError("pre command in middle of vf file")
if i != 202:
raise ValueError("Unknown vf format %d" % i)
if len(x):
_log.debug('vf file comment: %s', x)
self.state = _dvistate.outer
# cs = checksum, ds = design size
def _fix2comp(num):
"""
Convert from two's complement to negative.
"""
assert 0 <= num < 2**32
if num & 2**31:
return num - 2**32
else:
return num
def _mul2012(num1, num2):
"""
Multiply two numbers in 20.12 fixed point format.
"""
# Separated into a function because >> has surprising precedence
return (num1*num2) >> 20
class Tfm(object):
"""
A TeX Font Metric file.
This implementation covers only the bare minimum needed by the Dvi class.
Parameters
----------
filename : string or bytestring
Attributes
----------
checksum : int
Used for verifying against the dvi file.
design_size : int
Design size of the font (unknown units)
width, height, depth : dict
Dimensions of each character, need to be scaled by the factor
specified in the dvi file. These are dicts because indexing may
not start from 0.
"""
__slots__ = ('checksum', 'design_size', 'width', 'height', 'depth')
def __init__(self, filename):
_log.debug('opening tfm file %s', filename)
with open(filename, 'rb') as file:
header1 = file.read(24)
lh, bc, ec, nw, nh, nd = \
struct.unpack('!6H', header1[2:14])
_log.debug('lh=%d, bc=%d, ec=%d, nw=%d, nh=%d, nd=%d',
lh, bc, ec, nw, nh, nd)
header2 = file.read(4*lh)
self.checksum, self.design_size = \
struct.unpack('!2I', header2[:8])
# there is also encoding information etc.
char_info = file.read(4*(ec-bc+1))
widths = file.read(4*nw)
heights = file.read(4*nh)
depths = file.read(4*nd)
self.width, self.height, self.depth = {}, {}, {}
widths, heights, depths = \
[struct.unpack('!%dI' % (len(x)/4), x)
for x in (widths, heights, depths)]
for idx, char in enumerate(xrange(bc, ec+1)):
byte0 = ord(char_info[4*idx])
byte1 = ord(char_info[4*idx+1])
self.width[char] = _fix2comp(widths[byte0])
self.height[char] = _fix2comp(heights[byte1 >> 4])
self.depth[char] = _fix2comp(depths[byte1 & 0xf])
PsFont = namedtuple('Font', 'texname psname effects encoding filename')
class PsfontsMap(object):
"""
A psfonts.map formatted file, mapping TeX fonts to PS fonts.
Usage::
>>> map = PsfontsMap(find_tex_file('pdftex.map'))
>>> entry = map[b'ptmbo8r']
>>> entry.texname
b'ptmbo8r'
>>> entry.psname
b'Times-Bold'
>>> entry.encoding
'/usr/local/texlive/2008/texmf-dist/fonts/enc/dvips/base/8r.enc'
>>> entry.effects
{'slant': 0.16700000000000001}
>>> entry.filename
Parameters
----------
filename : string or bytestring
Notes
-----
For historical reasons, TeX knows many Type-1 fonts by different
names than the outside world. (For one thing, the names have to
fit in eight characters.) Also, TeX's native fonts are not Type-1
but Metafont, which is nontrivial to convert to PostScript except
as a bitmap. While high-quality conversions to Type-1 format exist
and are shipped with modern TeX distributions, we need to know
which Type-1 fonts are the counterparts of which native fonts. For
these reasons a mapping is needed from internal font names to font
file names.
A texmf tree typically includes mapping files called e.g.
:file:`psfonts.map`, :file:`pdftex.map`, or :file:`dvipdfm.map`.
The file :file:`psfonts.map` is used by :program:`dvips`,
:file:`pdftex.map` by :program:`pdfTeX`, and :file:`dvipdfm.map`
by :program:`dvipdfm`. :file:`psfonts.map` might avoid embedding
the 35 PostScript fonts (i.e., have no filename for them, as in
the Times-Bold example above), while the pdf-related files perhaps
only avoid the "Base 14" pdf fonts. But the user may have
configured these files differently.
"""
__slots__ = ('_font', '_filename')
def __init__(self, filename):
self._font = {}
self._filename = filename
if six.PY3 and isinstance(filename, bytes):
encoding = sys.getfilesystemencoding() or 'utf-8'
self._filename = filename.decode(encoding, errors='replace')
with open(filename, 'rb') as file:
self._parse(file)
def __getitem__(self, texname):
assert isinstance(texname, bytes)
try:
result = self._font[texname]
except KeyError:
fmt = ('A PostScript file for the font whose TeX name is "{0}" '
'could not be found in the file "{1}". The dviread module '
'can only handle fonts that have an associated PostScript '
'font file. '
'This problem can often be solved by installing '
'a suitable PostScript font package in your (TeX) '
'package manager.')
msg = fmt.format(texname.decode('ascii'), self._filename)
msg = textwrap.fill(msg, break_on_hyphens=False,
break_long_words=False)
_log.info(msg)
raise
fn, enc = result.filename, result.encoding
if fn is not None and not fn.startswith(b'/'):
fn = find_tex_file(fn)
if enc is not None and not enc.startswith(b'/'):
enc = find_tex_file(result.encoding)
return result._replace(filename=fn, encoding=enc)
def _parse(self, file):
"""
Parse the font mapping file.
The format is, AFAIK: texname fontname [effects and filenames]
Effects are PostScript snippets like ".177 SlantFont",
filenames begin with one or two less-than signs. A filename
ending in enc is an encoding file, other filenames are font
files. This can be overridden with a left bracket: <[foobar
indicates an encoding file named foobar.
There is some difference between <foo.pfb and <<bar.pfb in
subsetting, but I have no example of << in my TeX installation.
"""
# If the map file specifies multiple encodings for a font, we
# follow pdfTeX in choosing the last one specified. Such
# entries are probably mistakes but they have occurred.
# http://tex.stackexchange.com/questions/10826/
# http://article.gmane.org/gmane.comp.tex.pdftex/4914
empty_re = re.compile(br'%|\s*$')
word_re = re.compile(
br'''(?x) (?:
"<\[ (?P<enc1> [^"]+ )" | # quoted encoding marked by [
"< (?P<enc2> [^"]+.enc)" | # quoted encoding, ends in .enc
"<<? (?P<file1> [^"]+ )" | # quoted font file name
" (?P<eff1> [^"]+ )" | # quoted effects or font name
<\[ (?P<enc3> \S+ ) | # encoding marked by [
< (?P<enc4> \S+ .enc) | # encoding, ends in .enc
<<? (?P<file2> \S+ ) | # font file name
(?P<eff2> \S+ ) # effects or font name
)''')
effects_re = re.compile(
br'''(?x) (?P<slant> -?[0-9]*(?:\.[0-9]+)) \s* SlantFont
| (?P<extend>-?[0-9]*(?:\.[0-9]+)) \s* ExtendFont''')
lines = (line.strip()
for line in file
if not empty_re.match(line))
for line in lines:
effects, encoding, filename = b'', None, None
words = word_re.finditer(line)
# The named groups are mutually exclusive and are
# referenced below at an estimated order of probability of
# occurrence based on looking at my copy of pdftex.map.
# The font names are probably unquoted:
w = next(words)
texname = w.group('eff2') or w.group('eff1')
w = next(words)
psname = w.group('eff2') or w.group('eff1')
for w in words:
# Any effects are almost always quoted:
eff = w.group('eff1') or w.group('eff2')
if eff:
effects = eff
continue
# Encoding files usually have the .enc suffix
# and almost never need quoting:
enc = (w.group('enc4') or w.group('enc3') or
w.group('enc2') or w.group('enc1'))
if enc:
if encoding is not None:
_log.debug('Multiple encodings for %s = %s',
texname, psname)
encoding = enc
continue
# File names are probably unquoted:
filename = w.group('file2') or w.group('file1')
effects_dict = {}
for match in effects_re.finditer(effects):
slant = match.group('slant')
if slant:
effects_dict['slant'] = float(slant)
else:
effects_dict['extend'] = float(match.group('extend'))
self._font[texname] = PsFont(
texname=texname, psname=psname, effects=effects_dict,
encoding=encoding, filename=filename)
class Encoding(object):
"""
Parses a \\*.enc file referenced from a psfonts.map style file.
The format this class understands is a very limited subset of
PostScript.
Usage (subject to change)::
for name in Encoding(filename):
whatever(name)
Parameters
----------
filename : string or bytestring
Attributes
----------
encoding : list
List of character names
"""
__slots__ = ('encoding',)
def __init__(self, filename):
with open(filename, 'rb') as file:
_log.debug('Parsing TeX encoding %s', filename)
self.encoding = self._parse(file)
_log.debug('Result: %s', self.encoding)
def __iter__(self):
for name in self.encoding:
yield name
def _parse(self, file):
result = []
lines = (line.split(b'%', 1)[0].strip() for line in file)
data = b''.join(lines)
beginning = data.find(b'[')
if beginning < 0:
raise ValueError("Cannot locate beginning of encoding in {}"
.format(file))
data = data[beginning:]
end = data.find(b']')
if end < 0:
raise ValueError("Cannot locate end of encoding in {}"
.format(file))
data = data[:end]
return re.findall(br'/([^][{}<>\s]+)', data)
def find_tex_file(filename, format=None):
"""
Find a file in the texmf tree.
Calls :program:`kpsewhich` which is an interface to the kpathsea
library [1]_. Most existing TeX distributions on Unix-like systems use
kpathsea. It is also available as part of MikTeX, a popular
distribution on Windows.
Parameters
----------
filename : string or bytestring
format : string or bytestring
Used as the value of the `--format` option to :program:`kpsewhich`.
Could be e.g. 'tfm' or 'vf' to limit the search to that type of files.
References
----------
.. [1] `Kpathsea documentation <http://www.tug.org/kpathsea/>`_
The library that :program:`kpsewhich` is part of.
"""
if six.PY3:
# we expect these to always be ascii encoded, but use utf-8
# out of caution
if isinstance(filename, bytes):
filename = filename.decode('utf-8', errors='replace')
if isinstance(format, bytes):
format = format.decode('utf-8', errors='replace')
cmd = ['kpsewhich']
if format is not None:
cmd += ['--format=' + format]
cmd += [filename]
_log.debug('find_tex_file(%s): %s', filename, cmd)
# stderr is unused, but reading it avoids a subprocess optimization
# that breaks EINTR handling in some Python versions:
# http://bugs.python.org/issue12493
# https://github.com/matplotlib/matplotlib/issues/633
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
result = pipe.communicate()[0].rstrip()
_log.debug('find_tex_file result: %s', result)
return result.decode('ascii')
# With multiple text objects per figure (e.g., tick labels) we may end
# up reading the same tfm and vf files many times, so we implement a
# simple cache. TODO: is this worth making persistent?
@lru_cache()
def _fontfile(cls, suffix, texname):
filename = find_tex_file(texname + suffix)
return cls(filename) if filename else None
_tfmfile = partial(_fontfile, Tfm, ".tfm")
_vffile = partial(_fontfile, Vf, ".vf")
if __name__ == '__main__':
import sys
fname = sys.argv[1]
try:
dpi = float(sys.argv[2])
except IndexError:
dpi = None
with Dvi(fname, dpi) as dvi:
fontmap = PsfontsMap(find_tex_file('pdftex.map'))
for page in dvi:
print('=== new page ===')
fPrev = None
for x, y, f, c, w in page.text:
if f != fPrev:
print('font', f.texname, 'scaled', f._scale/pow(2.0, 20))
fPrev = f
print(x, y, c, 32 <= c < 128 and chr(c) or '.', w)
for x, y, w, h in page.boxes:
print(x, y, 'BOX', w, h)
| 38,206 | 34.24631 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/legend.py
|
"""
The legend module defines the Legend class, which is responsible for
drawing legends associated with axes and/or figures.
.. important::
It is unlikely that you would ever create a Legend instance manually.
Most users would normally create a legend via the
:meth:`~matplotlib.axes.Axes.legend` function. For more details on legends
there is also a :ref:`legend guide
<sphx_glr_tutorials_intermediate_legend_guide.py>`.
The Legend class can be considered as a container of legend handles
and legend texts. Creation of corresponding legend handles from the
plot elements in the axes or figures (e.g., lines, patches, etc.) are
specified by the handler map, which defines the mapping between the
plot elements and the legend handlers to be used (the default legend
handlers are defined in the :mod:`~matplotlib.legend_handler` module).
Note that not all kinds of artist are supported by the legend yet by default
but it is possible to extend the legend handler's capabilities to support
arbitrary objects. See the :ref:`legend guide
<sphx_glr_tutorials_intermediate_legend_guide.py>` for more information.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import logging
import warnings
import numpy as np
from matplotlib import rcParams
from matplotlib import docstring
from matplotlib.artist import Artist, allow_rasterization
from matplotlib.cbook import silent_list, is_hashable
import matplotlib.colors as colors
from matplotlib.font_manager import FontProperties
from matplotlib.lines import Line2D
from matplotlib.patches import Patch, Rectangle, Shadow, FancyBboxPatch
from matplotlib.collections import (LineCollection, RegularPolyCollection,
CircleCollection, PathCollection,
PolyCollection)
from matplotlib.transforms import Bbox, BboxBase, TransformedBbox
from matplotlib.transforms import BboxTransformTo, BboxTransformFrom
from matplotlib.offsetbox import HPacker, VPacker, TextArea, DrawingArea
from matplotlib.offsetbox import DraggableOffsetBox
from matplotlib.container import ErrorbarContainer, BarContainer, StemContainer
from . import legend_handler
class DraggableLegend(DraggableOffsetBox):
def __init__(self, legend, use_blit=False, update="loc"):
"""
Parameters
----------
update : string
If "loc", update *loc* parameter of legend upon finalizing.
If "bbox", update *bbox_to_anchor* parameter.
"""
self.legend = legend
if update in ["loc", "bbox"]:
self._update = update
else:
raise ValueError("update parameter '%s' is not supported." %
update)
DraggableOffsetBox.__init__(self, legend, legend._legend_box,
use_blit=use_blit)
def artist_picker(self, legend, evt):
return self.legend.contains(evt)
def finalize_offset(self):
loc_in_canvas = self.get_loc_in_canvas()
if self._update == "loc":
self._update_loc(loc_in_canvas)
elif self._update == "bbox":
self._update_bbox_to_anchor(loc_in_canvas)
else:
raise RuntimeError("update parameter '%s' is not supported." %
self.update)
def _update_loc(self, loc_in_canvas):
bbox = self.legend.get_bbox_to_anchor()
# if bbox has zero width or height, the transformation is
# ill-defined. Fall back to the defaul bbox_to_anchor.
if bbox.width == 0 or bbox.height == 0:
self.legend.set_bbox_to_anchor(None)
bbox = self.legend.get_bbox_to_anchor()
_bbox_transform = BboxTransformFrom(bbox)
self.legend._loc = tuple(
_bbox_transform.transform_point(loc_in_canvas)
)
def _update_bbox_to_anchor(self, loc_in_canvas):
tr = self.legend.axes.transAxes
loc_in_bbox = tr.transform_point(loc_in_canvas)
self.legend.set_bbox_to_anchor(loc_in_bbox)
_legend_kw_doc = '''
loc : int or string or pair of floats, default: 'upper right'
The location of the legend. Possible codes are:
=============== =============
Location String Location Code
=============== =============
'best' 0
'upper right' 1
'upper left' 2
'lower left' 3
'lower right' 4
'right' 5
'center left' 6
'center right' 7
'lower center' 8
'upper center' 9
'center' 10
=============== =============
Alternatively can be a 2-tuple giving ``x, y`` of the lower-left
corner of the legend in axes coordinates (in which case
``bbox_to_anchor`` will be ignored).
bbox_to_anchor : `.BboxBase` or pair of floats
Specify any arbitrary location for the legend in `bbox_transform`
coordinates (default Axes coordinates).
For example, to put the legend's upper right hand corner in the
center of the axes the following keywords can be used::
loc='upper right', bbox_to_anchor=(0.5, 0.5)
ncol : integer
The number of columns that the legend has. Default is 1.
prop : None or :class:`matplotlib.font_manager.FontProperties` or dict
The font properties of the legend. If None (default), the current
:data:`matplotlib.rcParams` will be used.
fontsize : int or float or {'xx-small', 'x-small', 'small', 'medium', \
'large', 'x-large', 'xx-large'}
Controls the font size of the legend. If the value is numeric the
size will be the absolute font size in points. String values are
relative to the current default font size. This argument is only
used if `prop` is not specified.
numpoints : None or int
The number of marker points in the legend when creating a legend
entry for a `.Line2D` (line).
Default is ``None``, which will take the value from
:rc:`legend.numpoints`.
scatterpoints : None or int
The number of marker points in the legend when creating
a legend entry for a `.PathCollection` (scatter plot).
Default is ``None``, which will take the value from
:rc:`legend.scatterpoints`.
scatteryoffsets : iterable of floats
The vertical offset (relative to the font size) for the markers
created for a scatter plot legend entry. 0.0 is at the base the
legend text, and 1.0 is at the top. To draw all markers at the
same height, set to ``[0.5]``. Default is ``[0.375, 0.5, 0.3125]``.
markerscale : None or int or float
The relative size of legend markers compared with the originally
drawn ones.
Default is ``None``, which will take the value from
:rc:`legend.markerscale`.
markerfirst : bool
If *True*, legend marker is placed to the left of the legend label.
If *False*, legend marker is placed to the right of the legend
label.
Default is *True*.
frameon : None or bool
Control whether the legend should be drawn on a patch
(frame).
Default is ``None``, which will take the value from
:rc:`legend.frameon`.
fancybox : None or bool
Control whether round edges should be enabled around the
:class:`~matplotlib.patches.FancyBboxPatch` which makes up the
legend's background.
Default is ``None``, which will take the value from
:rc:`legend.fancybox`.
shadow : None or bool
Control whether to draw a shadow behind the legend.
Default is ``None``, which will take the value from
:rc:`legend.shadow`.
framealpha : None or float
Control the alpha transparency of the legend's background.
Default is ``None``, which will take the value from
:rc:`legend.framealpha`. If shadow is activated and
*framealpha* is ``None``, the default value is ignored.
facecolor : None or "inherit" or a color spec
Control the legend's background color.
Default is ``None``, which will take the value from
:rc:`legend.facecolor`. If ``"inherit"``, it will take
:rc:`axes.facecolor`.
edgecolor : None or "inherit" or a color spec
Control the legend's background patch edge color.
Default is ``None``, which will take the value from
:rc:`legend.edgecolor` If ``"inherit"``, it will take
:rc:`axes.edgecolor`.
mode : {"expand", None}
If `mode` is set to ``"expand"`` the legend will be horizontally
expanded to fill the axes area (or `bbox_to_anchor` if defines
the legend's size).
bbox_transform : None or :class:`matplotlib.transforms.Transform`
The transform for the bounding box (`bbox_to_anchor`). For a value
of ``None`` (default) the Axes'
:data:`~matplotlib.axes.Axes.transAxes` transform will be used.
title : str or None
The legend's title. Default is no title (``None``).
borderpad : float or None
The fractional whitespace inside the legend border.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.borderpad`.
labelspacing : float or None
The vertical space between the legend entries.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.labelspacing`.
handlelength : float or None
The length of the legend handles.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.handlelength`.
handletextpad : float or None
The pad between the legend handle and text.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.handletextpad`.
borderaxespad : float or None
The pad between the axes and legend border.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.borderaxespad`.
columnspacing : float or None
The spacing between columns.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.columnspacing`.
handler_map : dict or None
The custom dictionary mapping instances or types to a legend
handler. This `handler_map` updates the default handler map
found at :func:`matplotlib.legend.Legend.get_legend_handler_map`.
'''
docstring.interpd.update(_legend_kw_doc=_legend_kw_doc)
class Legend(Artist):
"""
Place a legend on the axes at location loc.
"""
codes = {'best': 0, # only implemented for axes legends
'upper right': 1,
'upper left': 2,
'lower left': 3,
'lower right': 4,
'right': 5,
'center left': 6,
'center right': 7,
'lower center': 8,
'upper center': 9,
'center': 10,
}
zorder = 5
def __str__(self):
return "Legend"
@docstring.dedent_interpd
def __init__(self, parent, handles, labels,
loc=None,
numpoints=None, # the number of points in the legend line
markerscale=None, # the relative size of legend markers
# vs. original
markerfirst=True, # controls ordering (left-to-right) of
# legend marker and label
scatterpoints=None, # number of scatter points
scatteryoffsets=None,
prop=None, # properties for the legend texts
fontsize=None, # keyword to set font size directly
# spacing & pad defined as a fraction of the font-size
borderpad=None, # the whitespace inside the legend border
labelspacing=None, # the vertical space between the legend
# entries
handlelength=None, # the length of the legend handles
handleheight=None, # the height of the legend handles
handletextpad=None, # the pad between the legend handle
# and text
borderaxespad=None, # the pad between the axes and legend
# border
columnspacing=None, # spacing between columns
ncol=1, # number of columns
mode=None, # mode for horizontal distribution of columns.
# None, "expand"
fancybox=None, # True use a fancy box, false use a rounded
# box, none use rc
shadow=None,
title=None, # set a title for the legend
framealpha=None, # set frame alpha
edgecolor=None, # frame patch edgecolor
facecolor=None, # frame patch facecolor
bbox_to_anchor=None, # bbox that the legend will be anchored.
bbox_transform=None, # transform for the bbox
frameon=None, # draw frame
handler_map=None,
):
"""
Parameters
----------
parent : `.Axes` or `.Figure`
The artist that contains the legend.
handles : sequence of `.Artist`
A list of Artists (lines, patches) to be added to the legend.
labels : sequence of strings
A list of labels to show next to the artists. The length of handles
and labels should be the same. If they are not, they are truncated
to the smaller of both lengths.
Other Parameters
----------------
loc : int or string or pair of floats, default: 'upper right'
The location of the legend. Possible codes are:
=============== =============
Location String Location Code
=============== =============
'best' 0
'upper right' 1
'upper left' 2
'lower left' 3
'lower right' 4
'right' 5
'center left' 6
'center right' 7
'lower center' 8
'upper center' 9
'center' 10
=============== =============
Alternatively can be a 2-tuple giving ``x, y`` of the lower-left
corner of the legend in axes coordinates (in which case
``bbox_to_anchor`` will be ignored).
bbox_to_anchor : `.BboxBase` or pair of floats
Specify any arbitrary location for the legend in `bbox_transform`
coordinates (default Axes coordinates).
For example, to put the legend's upper right hand corner in the
center of the axes the following keywords can be used::
loc='upper right', bbox_to_anchor=(0.5, 0.5)
ncol : integer
The number of columns that the legend has. Default is 1.
prop : None or :class:`matplotlib.font_manager.FontProperties` or dict
The font properties of the legend. If None (default), the current
:data:`matplotlib.rcParams` will be used.
fontsize : int or float or {'xx-small', 'x-small', 'small', 'medium', \
'large', 'x-large', 'xx-large'}
Controls the font size of the legend. If the value is numeric the
size will be the absolute font size in points. String values are
relative to the current default font size. This argument is only
used if `prop` is not specified.
numpoints : None or int
The number of marker points in the legend when creating a legend
entry for a `.Line2D` (line).
Default is ``None``, which will take the value from
:rc:`legend.numpoints`.
scatterpoints : None or int
The number of marker points in the legend when creating
a legend entry for a `.PathCollection` (scatter plot).
Default is ``None``, which will take the value from
:rc:`legend.scatterpoints`.
scatteryoffsets : iterable of floats
The vertical offset (relative to the font size) for the markers
created for a scatter plot legend entry. 0.0 is at the base the
legend text, and 1.0 is at the top. To draw all markers at the
same height, set to ``[0.5]``. Default is ``[0.375, 0.5, 0.3125]``.
markerscale : None or int or float
The relative size of legend markers compared with the originally
drawn ones.
Default is ``None``, which will take the value from
:rc:`legend.markerscale`.
markerfirst : bool
If *True*, legend marker is placed to the left of the legend label.
If *False*, legend marker is placed to the right of the legend
label.
Default is *True*.
frameon : None or bool
Control whether the legend should be drawn on a patch
(frame).
Default is ``None``, which will take the value from
:rc:`legend.frameon`.
fancybox : None or bool
Control whether round edges should be enabled around the
:class:`~matplotlib.patches.FancyBboxPatch` which makes up the
legend's background.
Default is ``None``, which will take the value from
:rc:`legend.fancybox`.
shadow : None or bool
Control whether to draw a shadow behind the legend.
Default is ``None``, which will take the value from
:rc:`legend.shadow`.
framealpha : None or float
Control the alpha transparency of the legend's background.
Default is ``None``, which will take the value from
:rc:`legend.framealpha`. If shadow is activated and
*framealpha* is ``None``, the default value is ignored.
facecolor : None or "inherit" or a color spec
Control the legend's background color.
Default is ``None``, which will take the value from
:rc:`legend.facecolor`. If ``"inherit"``, it will take
:rc:`axes.facecolor`.
edgecolor : None or "inherit" or a color spec
Control the legend's background patch edge color.
Default is ``None``, which will take the value from
:rc:`legend.edgecolor` If ``"inherit"``, it will take
:rc:`axes.edgecolor`.
mode : {"expand", None}
If `mode` is set to ``"expand"`` the legend will be horizontally
expanded to fill the axes area (or `bbox_to_anchor` if defines
the legend's size).
bbox_transform : None or :class:`matplotlib.transforms.Transform`
The transform for the bounding box (`bbox_to_anchor`). For a value
of ``None`` (default) the Axes'
:data:`~matplotlib.axes.Axes.transAxes` transform will be used.
title : str or None
The legend's title. Default is no title (``None``).
borderpad : float or None
The fractional whitespace inside the legend border.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.borderpad`.
labelspacing : float or None
The vertical space between the legend entries.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.labelspacing`.
handlelength : float or None
The length of the legend handles.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.handlelength`.
handletextpad : float or None
The pad between the legend handle and text.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.handletextpad`.
borderaxespad : float or None
The pad between the axes and legend border.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.borderaxespad`.
columnspacing : float or None
The spacing between columns.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.columnspacing`.
handler_map : dict or None
The custom dictionary mapping instances or types to a legend
handler. This `handler_map` updates the default handler map
found at :func:`matplotlib.legend.Legend.get_legend_handler_map`.
Notes
-----
Users can specify any arbitrary location for the legend using the
*bbox_to_anchor* keyword argument. bbox_to_anchor can be an instance
of BboxBase(or its derivatives) or a tuple of 2 or 4 floats.
See :meth:`set_bbox_to_anchor` for more detail.
The legend location can be specified by setting *loc* with a tuple of
2 floats, which is interpreted as the lower-left corner of the legend
in the normalized axes coordinate.
"""
# local import only to avoid circularity
from matplotlib.axes import Axes
from matplotlib.figure import Figure
Artist.__init__(self)
if prop is None:
if fontsize is not None:
self.prop = FontProperties(size=fontsize)
else:
self.prop = FontProperties(size=rcParams["legend.fontsize"])
elif isinstance(prop, dict):
self.prop = FontProperties(**prop)
if "size" not in prop:
self.prop.set_size(rcParams["legend.fontsize"])
else:
self.prop = prop
self._fontsize = self.prop.get_size_in_points()
self.texts = []
self.legendHandles = []
self._legend_title_box = None
#: A dictionary with the extra handler mappings for this Legend
#: instance.
self._custom_handler_map = handler_map
locals_view = locals()
for name in ["numpoints", "markerscale", "shadow", "columnspacing",
"scatterpoints", "handleheight", 'borderpad',
'labelspacing', 'handlelength', 'handletextpad',
'borderaxespad']:
if locals_view[name] is None:
value = rcParams["legend." + name]
else:
value = locals_view[name]
setattr(self, name, value)
del locals_view
# trim handles and labels if illegal label...
_lab, _hand = [], []
for label, handle in zip(labels, handles):
if (isinstance(label, six.string_types) and
label.startswith('_')):
warnings.warn('The handle {!r} has a label of {!r} which '
'cannot be automatically added to the '
'legend.'.format(handle, label))
else:
_lab.append(label)
_hand.append(handle)
labels, handles = _lab, _hand
handles = list(handles)
if len(handles) < 2:
ncol = 1
self._ncol = ncol
if self.numpoints <= 0:
raise ValueError("numpoints must be > 0; it was %d" % numpoints)
# introduce y-offset for handles of the scatter plot
if scatteryoffsets is None:
self._scatteryoffsets = np.array([3. / 8., 4. / 8., 2.5 / 8.])
else:
self._scatteryoffsets = np.asarray(scatteryoffsets)
reps = self.scatterpoints // len(self._scatteryoffsets) + 1
self._scatteryoffsets = np.tile(self._scatteryoffsets,
reps)[:self.scatterpoints]
# _legend_box is an OffsetBox instance that contains all
# legend items and will be initialized from _init_legend_box()
# method.
self._legend_box = None
if isinstance(parent, Axes):
self.isaxes = True
self.axes = parent
self.set_figure(parent.figure)
elif isinstance(parent, Figure):
self.isaxes = False
self.set_figure(parent)
else:
raise TypeError("Legend needs either Axes or Figure as parent")
self.parent = parent
if loc is None:
loc = rcParams["legend.loc"]
if not self.isaxes and loc in [0, 'best']:
loc = 'upper right'
if isinstance(loc, six.string_types):
if loc not in self.codes:
if self.isaxes:
warnings.warn('Unrecognized location "%s". Falling back '
'on "best"; valid locations are\n\t%s\n'
% (loc, '\n\t'.join(self.codes)))
loc = 0
else:
warnings.warn('Unrecognized location "%s". Falling back '
'on "upper right"; '
'valid locations are\n\t%s\n'
% (loc, '\n\t'.join(self.codes)))
loc = 1
else:
loc = self.codes[loc]
if not self.isaxes and loc == 0:
warnings.warn('Automatic legend placement (loc="best") not '
'implemented for figure legend. '
'Falling back on "upper right".')
loc = 1
self._mode = mode
self.set_bbox_to_anchor(bbox_to_anchor, bbox_transform)
# We use FancyBboxPatch to draw a legend frame. The location
# and size of the box will be updated during the drawing time.
if facecolor is None:
facecolor = rcParams["legend.facecolor"]
if facecolor == 'inherit':
facecolor = rcParams["axes.facecolor"]
if edgecolor is None:
edgecolor = rcParams["legend.edgecolor"]
if edgecolor == 'inherit':
edgecolor = rcParams["axes.edgecolor"]
self.legendPatch = FancyBboxPatch(
xy=(0.0, 0.0), width=1., height=1.,
facecolor=facecolor,
edgecolor=edgecolor,
mutation_scale=self._fontsize,
snap=True
)
# The width and height of the legendPatch will be set (in the
# draw()) to the length that includes the padding. Thus we set
# pad=0 here.
if fancybox is None:
fancybox = rcParams["legend.fancybox"]
if fancybox:
self.legendPatch.set_boxstyle("round", pad=0,
rounding_size=0.2)
else:
self.legendPatch.set_boxstyle("square", pad=0)
self._set_artist_props(self.legendPatch)
self._drawFrame = frameon
if frameon is None:
self._drawFrame = rcParams["legend.frameon"]
# init with null renderer
self._init_legend_box(handles, labels, markerfirst)
# If shadow is activated use framealpha if not
# explicitly passed. See Issue 8943
if framealpha is None:
if shadow:
self.get_frame().set_alpha(1)
else:
self.get_frame().set_alpha(rcParams["legend.framealpha"])
else:
self.get_frame().set_alpha(framealpha)
self._loc = loc
self.set_title(title)
self._last_fontsize_points = self._fontsize
self._draggable = None
def _set_artist_props(self, a):
"""
Set the boilerplate props for artists added to axes.
"""
a.set_figure(self.figure)
if self.isaxes:
# a.set_axes(self.axes)
a.axes = self.axes
a.set_transform(self.get_transform())
def _set_loc(self, loc):
# find_offset function will be provided to _legend_box and
# _legend_box will draw itself at the location of the return
# value of the find_offset.
self._loc_real = loc
self.stale = True
self._legend_box.set_offset(self._findoffset)
def _get_loc(self):
return self._loc_real
_loc = property(_get_loc, _set_loc)
def _findoffset(self, width, height, xdescent, ydescent, renderer):
"Helper function to locate the legend."
if self._loc == 0: # "best".
x, y = self._find_best_position(width, height, renderer)
elif self._loc in Legend.codes.values(): # Fixed location.
bbox = Bbox.from_bounds(0, 0, width, height)
x, y = self._get_anchored_bbox(self._loc, bbox,
self.get_bbox_to_anchor(),
renderer)
else: # Axes or figure coordinates.
fx, fy = self._loc
bbox = self.get_bbox_to_anchor()
x, y = bbox.x0 + bbox.width * fx, bbox.y0 + bbox.height * fy
return x + xdescent, y + ydescent
@allow_rasterization
def draw(self, renderer):
"Draw everything that belongs to the legend."
if not self.get_visible():
return
renderer.open_group('legend')
fontsize = renderer.points_to_pixels(self._fontsize)
# if mode == fill, set the width of the legend_box to the
# width of the paret (minus pads)
if self._mode in ["expand"]:
pad = 2 * (self.borderaxespad + self.borderpad) * fontsize
self._legend_box.set_width(self.get_bbox_to_anchor().width - pad)
# update the location and size of the legend. This needs to
# be done in any case to clip the figure right.
bbox = self._legend_box.get_window_extent(renderer)
self.legendPatch.set_bounds(bbox.x0, bbox.y0,
bbox.width, bbox.height)
self.legendPatch.set_mutation_scale(fontsize)
if self._drawFrame:
if self.shadow:
shadow = Shadow(self.legendPatch, 2, -2)
shadow.draw(renderer)
self.legendPatch.draw(renderer)
self._legend_box.draw(renderer)
renderer.close_group('legend')
self.stale = False
def _approx_text_height(self, renderer=None):
"""
Return the approximate height of the text. This is used to place
the legend handle.
"""
if renderer is None:
return self._fontsize
else:
return renderer.points_to_pixels(self._fontsize)
# _default_handler_map defines the default mapping between plot
# elements and the legend handlers.
_default_handler_map = {
StemContainer: legend_handler.HandlerStem(),
ErrorbarContainer: legend_handler.HandlerErrorbar(),
Line2D: legend_handler.HandlerLine2D(),
Patch: legend_handler.HandlerPatch(),
LineCollection: legend_handler.HandlerLineCollection(),
RegularPolyCollection: legend_handler.HandlerRegularPolyCollection(),
CircleCollection: legend_handler.HandlerCircleCollection(),
BarContainer: legend_handler.HandlerPatch(
update_func=legend_handler.update_from_first_child),
tuple: legend_handler.HandlerTuple(),
PathCollection: legend_handler.HandlerPathCollection(),
PolyCollection: legend_handler.HandlerPolyCollection()
}
# (get|set|update)_default_handler_maps are public interfaces to
# modify the default handler map.
@classmethod
def get_default_handler_map(cls):
"""
A class method that returns the default handler map.
"""
return cls._default_handler_map
@classmethod
def set_default_handler_map(cls, handler_map):
"""
A class method to set the default handler map.
"""
cls._default_handler_map = handler_map
@classmethod
def update_default_handler_map(cls, handler_map):
"""
A class method to update the default handler map.
"""
cls._default_handler_map.update(handler_map)
def get_legend_handler_map(self):
"""
Return the handler map.
"""
default_handler_map = self.get_default_handler_map()
if self._custom_handler_map:
hm = default_handler_map.copy()
hm.update(self._custom_handler_map)
return hm
else:
return default_handler_map
@staticmethod
def get_legend_handler(legend_handler_map, orig_handle):
"""
Return a legend handler from *legend_handler_map* that
corresponds to *orig_handler*.
*legend_handler_map* should be a dictionary object (that is
returned by the get_legend_handler_map method).
It first checks if the *orig_handle* itself is a key in the
*legend_hanler_map* and return the associated value.
Otherwise, it checks for each of the classes in its
method-resolution-order. If no matching key is found, it
returns ``None``.
"""
if is_hashable(orig_handle):
try:
return legend_handler_map[orig_handle]
except KeyError:
pass
for handle_type in type(orig_handle).mro():
try:
return legend_handler_map[handle_type]
except KeyError:
pass
return None
def _init_legend_box(self, handles, labels, markerfirst=True):
"""
Initialize the legend_box. The legend_box is an instance of
the OffsetBox, which is packed with legend handles and
texts. Once packed, their location is calculated during the
drawing time.
"""
fontsize = self._fontsize
# legend_box is a HPacker, horizontally packed with
# columns. Each column is a VPacker, vertically packed with
# legend items. Each legend item is HPacker packed with
# legend handleBox and labelBox. handleBox is an instance of
# offsetbox.DrawingArea which contains legend handle. labelBox
# is an instance of offsetbox.TextArea which contains legend
# text.
text_list = [] # the list of text instances
handle_list = [] # the list of text instances
handles_and_labels = []
label_prop = dict(verticalalignment='baseline',
horizontalalignment='left',
fontproperties=self.prop,
)
# The approximate height and descent of text. These values are
# only used for plotting the legend handle.
descent = 0.35 * self._approx_text_height() * (self.handleheight - 0.7)
# 0.35 and 0.7 are just heuristic numbers and may need to be improved.
height = self._approx_text_height() * self.handleheight - descent
# each handle needs to be drawn inside a box of (x, y, w, h) =
# (0, -descent, width, height). And their coordinates should
# be given in the display coordinates.
# The transformation of each handle will be automatically set
# to self.get_trasnform(). If the artist does not use its
# default transform (e.g., Collections), you need to
# manually set their transform to the self.get_transform().
legend_handler_map = self.get_legend_handler_map()
for orig_handle, lab in zip(handles, labels):
handler = self.get_legend_handler(legend_handler_map, orig_handle)
if handler is None:
warnings.warn(
"Legend does not support {!r} instances.\nA proxy artist "
"may be used instead.\nSee: "
"http://matplotlib.org/users/legend_guide.html"
"#creating-artists-specifically-for-adding-to-the-legend-"
"aka-proxy-artists".format(orig_handle)
)
# We don't have a handle for this artist, so we just defer
# to None.
handle_list.append(None)
else:
textbox = TextArea(lab, textprops=label_prop,
multilinebaseline=True,
minimumdescent=True)
handlebox = DrawingArea(width=self.handlelength * fontsize,
height=height,
xdescent=0., ydescent=descent)
text_list.append(textbox._text)
# Create the artist for the legend which represents the
# original artist/handle.
handle_list.append(handler.legend_artist(self, orig_handle,
fontsize, handlebox))
handles_and_labels.append((handlebox, textbox))
if handles_and_labels:
# We calculate number of rows in each column. The first
# (num_largecol) columns will have (nrows+1) rows, and remaining
# (num_smallcol) columns will have (nrows) rows.
ncol = min(self._ncol, len(handles_and_labels))
nrows, num_largecol = divmod(len(handles_and_labels), ncol)
num_smallcol = ncol - num_largecol
# starting index of each column and number of rows in it.
rows_per_col = [nrows + 1] * num_largecol + [nrows] * num_smallcol
start_idxs = np.concatenate([[0], np.cumsum(rows_per_col)[:-1]])
cols = zip(start_idxs, rows_per_col)
else:
cols = []
columnbox = []
for i0, di in cols:
# pack handleBox and labelBox into itemBox
itemBoxes = [HPacker(pad=0,
sep=self.handletextpad * fontsize,
children=[h, t] if markerfirst else [t, h],
align="baseline")
for h, t in handles_and_labels[i0:i0 + di]]
# minimumdescent=False for the text of the last row of the column
if markerfirst:
itemBoxes[-1].get_children()[1].set_minimumdescent(False)
else:
itemBoxes[-1].get_children()[0].set_minimumdescent(False)
# pack columnBox
alignment = "baseline" if markerfirst else "right"
columnbox.append(VPacker(pad=0,
sep=self.labelspacing * fontsize,
align=alignment,
children=itemBoxes))
mode = "expand" if self._mode == "expand" else "fixed"
sep = self.columnspacing * fontsize
self._legend_handle_box = HPacker(pad=0,
sep=sep, align="baseline",
mode=mode,
children=columnbox)
self._legend_title_box = TextArea("")
self._legend_box = VPacker(pad=self.borderpad * fontsize,
sep=self.labelspacing * fontsize,
align="center",
children=[self._legend_title_box,
self._legend_handle_box])
self._legend_box.set_figure(self.figure)
self.texts = text_list
self.legendHandles = handle_list
def _auto_legend_data(self):
"""
Returns list of vertices and extents covered by the plot.
Returns a two long list.
First element is a list of (x, y) vertices (in
display-coordinates) covered by all the lines and line
collections, in the legend's handles.
Second element is a list of bounding boxes for all the patches in
the legend's handles.
"""
# should always hold because function is only called internally
assert self.isaxes
ax = self.parent
bboxes = []
lines = []
offsets = []
for handle in ax.lines:
assert isinstance(handle, Line2D)
path = handle.get_path()
trans = handle.get_transform()
tpath = trans.transform_path(path)
lines.append(tpath)
for handle in ax.patches:
assert isinstance(handle, Patch)
if isinstance(handle, Rectangle):
transform = handle.get_data_transform()
bboxes.append(handle.get_bbox().transformed(transform))
else:
transform = handle.get_transform()
bboxes.append(handle.get_path().get_extents(transform))
for handle in ax.collections:
transform, transOffset, hoffsets, paths = handle._prepare_points()
if len(hoffsets):
for offset in transOffset.transform(hoffsets):
offsets.append(offset)
try:
vertices = np.concatenate([l.vertices for l in lines])
except ValueError:
vertices = np.array([])
return [vertices, bboxes, lines, offsets]
def draw_frame(self, b):
'''
Set draw frame to b.
Parameters
----------
b : bool
'''
self.set_frame_on(b)
def get_children(self):
'Return a list of child artists.'
children = []
if self._legend_box:
children.append(self._legend_box)
children.append(self.get_frame())
return children
def get_frame(self):
'''
Return the `~.patches.Rectangle` instances used to frame the legend.
'''
return self.legendPatch
def get_lines(self):
'Return a list of `~.lines.Line2D` instances in the legend.'
return [h for h in self.legendHandles if isinstance(h, Line2D)]
def get_patches(self):
'Return a list of `~.patches.Patch` instances in the legend.'
return silent_list('Patch',
[h for h in self.legendHandles
if isinstance(h, Patch)])
def get_texts(self):
'Return a list of `~.text.Text` instances in the legend.'
return silent_list('Text', self.texts)
def set_title(self, title, prop=None):
"""
Set the legend title. Fontproperties can be optionally set
with *prop* parameter.
"""
self._legend_title_box._text.set_text(title)
if prop is not None:
if isinstance(prop, dict):
prop = FontProperties(**prop)
self._legend_title_box._text.set_fontproperties(prop)
if title:
self._legend_title_box.set_visible(True)
else:
self._legend_title_box.set_visible(False)
self.stale = True
def get_title(self):
'Return the `.Text` instance for the legend title.'
return self._legend_title_box._text
def get_window_extent(self, *args, **kwargs):
'Return extent of the legend.'
return self.legendPatch.get_window_extent(*args, **kwargs)
def get_frame_on(self):
"""Get whether the legend box patch is drawn."""
return self._drawFrame
def set_frame_on(self, b):
"""
Set whether the legend box patch is drawn.
Parameters
----------
b : bool
.. ACCEPTS: bool
"""
self._drawFrame = b
self.stale = True
def get_bbox_to_anchor(self):
"""Return the bbox that the legend will be anchored to."""
if self._bbox_to_anchor is None:
return self.parent.bbox
else:
return self._bbox_to_anchor
def set_bbox_to_anchor(self, bbox, transform=None):
"""
Set the bbox that the legend will be anchored to.
*bbox* can be
- A `.BboxBase` instance
- A tuple of ``(left, bottom, width, height)`` in the given transform
(normalized axes coordinate if None)
- A tuple of ``(left, bottom)`` where the width and height will be
assumed to be zero.
"""
if bbox is None:
self._bbox_to_anchor = None
return
elif isinstance(bbox, BboxBase):
self._bbox_to_anchor = bbox
else:
try:
l = len(bbox)
except TypeError:
raise ValueError("Invalid argument for bbox : %s" % str(bbox))
if l == 2:
bbox = [bbox[0], bbox[1], 0, 0]
self._bbox_to_anchor = Bbox.from_bounds(*bbox)
if transform is None:
transform = BboxTransformTo(self.parent.bbox)
self._bbox_to_anchor = TransformedBbox(self._bbox_to_anchor,
transform)
self.stale = True
def _get_anchored_bbox(self, loc, bbox, parentbbox, renderer):
"""
Place the *bbox* inside the *parentbbox* according to a given
location code. Return the (x,y) coordinate of the bbox.
- loc: a location code in range(1, 11).
This corresponds to the possible values for self._loc, excluding
"best".
- bbox: bbox to be placed, display coordinate units.
- parentbbox: a parent box which will contain the bbox. In
display coordinates.
"""
assert loc in range(1, 11) # called only internally
BEST, UR, UL, LL, LR, R, CL, CR, LC, UC, C = range(11)
anchor_coefs = {UR: "NE",
UL: "NW",
LL: "SW",
LR: "SE",
R: "E",
CL: "W",
CR: "E",
LC: "S",
UC: "N",
C: "C"}
c = anchor_coefs[loc]
fontsize = renderer.points_to_pixels(self._fontsize)
container = parentbbox.padded(-(self.borderaxespad) * fontsize)
anchored_box = bbox.anchored(c, container=container)
return anchored_box.x0, anchored_box.y0
def _find_best_position(self, width, height, renderer, consider=None):
"""
Determine the best location to place the legend.
*consider* is a list of ``(x, y)`` pairs to consider as a potential
lower-left corner of the legend. All are display coords.
"""
# should always hold because function is only called internally
assert self.isaxes
verts, bboxes, lines, offsets = self._auto_legend_data()
bbox = Bbox.from_bounds(0, 0, width, height)
if consider is None:
consider = [self._get_anchored_bbox(x, bbox,
self.get_bbox_to_anchor(),
renderer)
for x in range(1, len(self.codes))]
candidates = []
for idx, (l, b) in enumerate(consider):
legendBox = Bbox.from_bounds(l, b, width, height)
badness = 0
# XXX TODO: If markers are present, it would be good to
# take them into account when checking vertex overlaps in
# the next line.
badness = (legendBox.count_contains(verts)
+ legendBox.count_contains(offsets)
+ legendBox.count_overlaps(bboxes)
+ sum(line.intersects_bbox(legendBox, filled=False)
for line in lines))
if badness == 0:
return l, b
# Include the index to favor lower codes in case of a tie.
candidates.append((badness, idx, (l, b)))
_, _, (l, b) = min(candidates)
return l, b
def contains(self, event):
return self.legendPatch.contains(event)
def draggable(self, state=None, use_blit=False, update="loc"):
"""
Set the draggable state -- if state is
* None : toggle the current state
* True : turn draggable on
* False : turn draggable off
If draggable is on, you can drag the legend on the canvas with
the mouse. The `.DraggableLegend` helper instance is returned if
draggable is on.
The update parameter control which parameter of the legend changes
when dragged. If update is "loc", the *loc* parameter of the legend
is changed. If "bbox", the *bbox_to_anchor* parameter is changed.
"""
is_draggable = self._draggable is not None
# if state is None we'll toggle
if state is None:
state = not is_draggable
if state:
if self._draggable is None:
self._draggable = DraggableLegend(self,
use_blit,
update=update)
else:
if self._draggable is not None:
self._draggable.disconnect()
self._draggable = None
return self._draggable
# Helper functions to parse legend arguments for both `figure.legend` and
# `axes.legend`:
def _get_legend_handles(axs, legend_handler_map=None):
"""
Return a generator of artists that can be used as handles in
a legend.
"""
handles_original = []
for ax in axs:
handles_original += (ax.lines + ax.patches +
ax.collections + ax.containers)
# support parasite axes:
if hasattr(ax, 'parasites'):
for axx in ax.parasites:
handles_original += (axx.lines + axx.patches +
axx.collections + axx.containers)
handler_map = Legend.get_default_handler_map()
if legend_handler_map is not None:
handler_map = handler_map.copy()
handler_map.update(legend_handler_map)
has_handler = Legend.get_legend_handler
for handle in handles_original:
label = handle.get_label()
if label != '_nolegend_' and has_handler(handler_map, handle):
yield handle
def _get_legend_handles_labels(axs, legend_handler_map=None):
"""
Return handles and labels for legend, internal method.
"""
handles = []
labels = []
for handle in _get_legend_handles(axs, legend_handler_map):
label = handle.get_label()
if (label and not label.startswith('_')):
handles.append(handle)
labels.append(label)
return handles, labels
def _parse_legend_args(axs, *args, **kwargs):
"""
Get the handles and labels from the calls to either ``figure.legend``
or ``axes.legend``.
``axs`` is a list of axes (to get legend artists from)
"""
log = logging.getLogger(__name__)
handlers = kwargs.get('handler_map', {}) or {}
# Support handles and labels being passed as keywords.
handles = kwargs.pop('handles', None)
labels = kwargs.pop('labels', None)
extra_args = ()
if (handles is not None or labels is not None) and len(args):
warnings.warn("You have mixed positional and keyword "
"arguments, some input may be "
"discarded.")
# if got both handles and labels as kwargs, make same length
if handles and labels:
handles, labels = zip(*zip(handles, labels))
elif handles is not None and labels is None:
labels = [handle.get_label() for handle in handles]
elif labels is not None and handles is None:
# Get as many handles as there are labels.
handles = [handle for handle, label
in zip(_get_legend_handles(axs, handlers), labels)]
# No arguments - automatically detect labels and handles.
elif len(args) == 0:
handles, labels = _get_legend_handles_labels(axs, handlers)
if not handles:
log.warning('No handles with labels found to put in legend.')
# One argument. User defined labels - automatic handle detection.
elif len(args) == 1:
labels, = args
# Get as many handles as there are labels.
handles = [handle for handle, label
in zip(_get_legend_handles(axs, handlers), labels)]
# Two arguments:
# * user defined handles and labels
elif len(args) >= 2:
handles, labels = args[:2]
extra_args = args[2:]
else:
raise TypeError('Invalid arguments to legend.')
return handles, labels, extra_args, kwargs
| 52,691 | 36.556664 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/blocking_input.py
|
"""
This provides several classes used for blocking interaction with figure
windows:
:class:`BlockingInput`
creates a callable object to retrieve events in a blocking way for
interactive sessions
:class:`BlockingKeyMouseInput`
creates a callable object to retrieve key or mouse clicks in a blocking
way for interactive sessions.
Note: Subclass of BlockingInput. Used by waitforbuttonpress
:class:`BlockingMouseInput`
creates a callable object to retrieve mouse clicks in a blocking way for
interactive sessions.
Note: Subclass of BlockingInput. Used by ginput
:class:`BlockingContourLabeler`
creates a callable object to retrieve mouse clicks in a blocking way that
will then be used to place labels on a ContourSet
Note: Subclass of BlockingMouseInput. Used by clabel
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import matplotlib.lines as mlines
import logging
_log = logging.getLogger(__name__)
class BlockingInput(object):
"""
Class that creates a callable object to retrieve events in a
blocking way.
"""
def __init__(self, fig, eventslist=()):
self.fig = fig
self.eventslist = eventslist
def on_event(self, event):
"""
Event handler that will be passed to the current figure to
retrieve events.
"""
# Add a new event to list - using a separate function is
# overkill for the base class, but this is consistent with
# subclasses
self.add_event(event)
_log.info("Event %i", len(self.events))
# This will extract info from events
self.post_event()
# Check if we have enough events already
if len(self.events) >= self.n and self.n > 0:
self.fig.canvas.stop_event_loop()
def post_event(self):
"""For baseclass, do nothing but collect events"""
pass
def cleanup(self):
"""Disconnect all callbacks"""
for cb in self.callbacks:
self.fig.canvas.mpl_disconnect(cb)
self.callbacks = []
def add_event(self, event):
"""For base class, this just appends an event to events."""
self.events.append(event)
def pop_event(self, index=-1):
"""
This removes an event from the event list. Defaults to
removing last event, but an index can be supplied. Note that
this does not check that there are events, much like the
normal pop method. If not events exist, this will throw an
exception.
"""
self.events.pop(index)
def pop(self, index=-1):
self.pop_event(index)
pop.__doc__ = pop_event.__doc__
def __call__(self, n=1, timeout=30):
"""
Blocking call to retrieve n events
"""
if not isinstance(n, int):
raise ValueError("Requires an integer argument")
self.n = n
self.events = []
self.callbacks = []
if hasattr(self.fig.canvas, "manager"):
# Ensure that the figure is shown, if we are managing it.
self.fig.show()
# connect the events to the on_event function call
for n in self.eventslist:
self.callbacks.append(
self.fig.canvas.mpl_connect(n, self.on_event))
try:
# Start event loop
self.fig.canvas.start_event_loop(timeout=timeout)
finally: # Run even on exception like ctrl-c
# Disconnect the callbacks
self.cleanup()
# Return the events in this case
return self.events
class BlockingMouseInput(BlockingInput):
"""
Class that creates a callable object to retrieve mouse clicks in a
blocking way.
This class will also retrieve keyboard clicks and treat them like
appropriate mouse clicks (delete and backspace are like mouse button 3,
enter is like mouse button 2 and all others are like mouse button 1).
"""
button_add = 1
button_pop = 3
button_stop = 2
def __init__(self, fig, mouse_add=1, mouse_pop=3, mouse_stop=2):
BlockingInput.__init__(self, fig=fig,
eventslist=('button_press_event',
'key_press_event'))
self.button_add = mouse_add
self.button_pop = mouse_pop
self.button_stop = mouse_stop
def post_event(self):
"""
This will be called to process events
"""
if len(self.events) == 0:
_log.warning("No events yet")
elif self.events[-1].name == 'key_press_event':
self.key_event()
else:
self.mouse_event()
def mouse_event(self):
'''Process a mouse click event'''
event = self.events[-1]
button = event.button
if button == self.button_pop:
self.mouse_event_pop(event)
elif button == self.button_stop:
self.mouse_event_stop(event)
else:
self.mouse_event_add(event)
def key_event(self):
'''
Process a key click event. This maps certain keys to appropriate
mouse click events.
'''
event = self.events[-1]
if event.key is None:
# at least in mac os X gtk backend some key returns None.
return
key = event.key.lower()
if key in ['backspace', 'delete']:
self.mouse_event_pop(event)
elif key in ['escape', 'enter']:
# on windows XP and wxAgg, the enter key doesn't seem to register
self.mouse_event_stop(event)
else:
self.mouse_event_add(event)
def mouse_event_add(self, event):
"""
Will be called for any event involving a button other than
button 2 or 3. This will add a click if it is inside axes.
"""
if event.inaxes:
self.add_click(event)
else: # If not a valid click, remove from event list
BlockingInput.pop(self, -1)
def mouse_event_stop(self, event):
"""
Will be called for any event involving button 2.
Button 2 ends blocking input.
"""
# Remove last event just for cleanliness
BlockingInput.pop(self, -1)
# This will exit even if not in infinite mode. This is
# consistent with MATLAB and sometimes quite useful, but will
# require the user to test how many points were actually
# returned before using data.
self.fig.canvas.stop_event_loop()
def mouse_event_pop(self, event):
"""
Will be called for any event involving button 3.
Button 3 removes the last click.
"""
# Remove this last event
BlockingInput.pop(self, -1)
# Now remove any existing clicks if possible
if len(self.events) > 0:
self.pop(event, -1)
def add_click(self, event):
"""
This add the coordinates of an event to the list of clicks
"""
self.clicks.append((event.xdata, event.ydata))
_log.info("input %i: %f,%f" %
(len(self.clicks), event.xdata, event.ydata))
# If desired plot up click
if self.show_clicks:
line = mlines.Line2D([event.xdata], [event.ydata],
marker='+', color='r')
event.inaxes.add_line(line)
self.marks.append(line)
self.fig.canvas.draw()
def pop_click(self, event, index=-1):
"""
This removes a click from the list of clicks. Defaults to
removing the last click.
"""
self.clicks.pop(index)
if self.show_clicks:
mark = self.marks.pop(index)
mark.remove()
self.fig.canvas.draw()
# NOTE: I do NOT understand why the above 3 lines does not work
# for the keyboard backspace event on windows XP wxAgg.
# maybe event.inaxes here is a COPY of the actual axes?
def pop(self, event, index=-1):
"""
This removes a click and the associated event from the object.
Defaults to removing the last click, but any index can be
supplied.
"""
self.pop_click(event, index)
BlockingInput.pop(self, index)
def cleanup(self, event=None):
# clean the figure
if self.show_clicks:
for mark in self.marks:
mark.remove()
self.marks = []
self.fig.canvas.draw()
# Call base class to remove callbacks
BlockingInput.cleanup(self)
def __call__(self, n=1, timeout=30, show_clicks=True):
"""
Blocking call to retrieve n coordinate pairs through mouse
clicks.
"""
self.show_clicks = show_clicks
self.clicks = []
self.marks = []
BlockingInput.__call__(self, n=n, timeout=timeout)
return self.clicks
class BlockingContourLabeler(BlockingMouseInput):
"""
Class that creates a callable object that uses mouse clicks or key
clicks on a figure window to place contour labels.
"""
def __init__(self, cs):
self.cs = cs
BlockingMouseInput.__init__(self, fig=cs.ax.figure)
def add_click(self, event):
self.button1(event)
def pop_click(self, event, index=-1):
self.button3(event)
def button1(self, event):
"""
This will be called if an event involving a button other than
2 or 3 occcurs. This will add a label to a contour.
"""
# Shorthand
if event.inaxes == self.cs.ax:
self.cs.add_label_near(event.x, event.y, self.inline,
inline_spacing=self.inline_spacing,
transform=False)
self.fig.canvas.draw()
else: # Remove event if not valid
BlockingInput.pop(self)
def button3(self, event):
"""
This will be called if button 3 is clicked. This will remove
a label if not in inline mode. Unfortunately, if one is doing
inline labels, then there is currently no way to fix the
broken contour - once humpty-dumpty is broken, he can't be put
back together. In inline mode, this does nothing.
"""
if self.inline:
pass
else:
self.cs.pop_label()
self.cs.ax.figure.canvas.draw()
def __call__(self, inline, inline_spacing=5, n=-1, timeout=-1):
self.inline = inline
self.inline_spacing = inline_spacing
BlockingMouseInput.__call__(self, n=n, timeout=timeout,
show_clicks=False)
class BlockingKeyMouseInput(BlockingInput):
"""
Class that creates a callable object to retrieve a single mouse or
keyboard click
"""
def __init__(self, fig):
BlockingInput.__init__(self, fig=fig, eventslist=(
'button_press_event', 'key_press_event'))
def post_event(self):
"""
Determines if it is a key event
"""
if len(self.events) == 0:
_log.warning("No events yet")
else:
self.keyormouse = self.events[-1].name == 'key_press_event'
def __call__(self, timeout=30):
"""
Blocking call to retrieve a single mouse or key click
Returns True if key click, False if mouse, or None if timeout
"""
self.keyormouse = None
BlockingInput.__call__(self, n=1, timeout=timeout)
return self.keyormouse
| 11,658 | 30.007979 | 77 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/spines.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import matplotlib
from matplotlib.artist import allow_rasterization
from matplotlib import docstring
import matplotlib.transforms as mtransforms
import matplotlib.patches as mpatches
import matplotlib.path as mpath
import numpy as np
import warnings
rcParams = matplotlib.rcParams
class Spine(mpatches.Patch):
"""an axis spine -- the line noting the data area boundaries
Spines are the lines connecting the axis tick marks and noting the
boundaries of the data area. They can be placed at arbitrary
positions. See function:`~matplotlib.spines.Spine.set_position`
for more information.
The default position is ``('outward',0)``.
Spines are subclasses of class:`~matplotlib.patches.Patch`, and
inherit much of their behavior.
Spines draw a line, a circle, or an arc depending if
function:`~matplotlib.spines.Spine.set_patch_line`,
function:`~matplotlib.spines.Spine.set_patch_circle`, or
function:`~matplotlib.spines.Spine.set_patch_arc` has been called.
Line-like is the default.
"""
def __str__(self):
return "Spine"
@docstring.dedent_interpd
def __init__(self, axes, spine_type, path, **kwargs):
"""
- *axes* : the Axes instance containing the spine
- *spine_type* : a string specifying the spine type
- *path* : the path instance used to draw the spine
Valid kwargs are:
%(Patch)s
"""
super(Spine, self).__init__(**kwargs)
self.axes = axes
self.set_figure(self.axes.figure)
self.spine_type = spine_type
self.set_facecolor('none')
self.set_edgecolor(rcParams['axes.edgecolor'])
self.set_linewidth(rcParams['axes.linewidth'])
self.set_capstyle('projecting')
self.axis = None
self.set_zorder(2.5)
self.set_transform(self.axes.transData) # default transform
self._bounds = None # default bounds
self._smart_bounds = False
# Defer initial position determination. (Not much support for
# non-rectangular axes is currently implemented, and this lets
# them pass through the spines machinery without errors.)
self._position = None
if not isinstance(path, matplotlib.path.Path):
raise ValueError(
"'path' must be an instance of 'matplotlib.path.Path'")
self._path = path
# To support drawing both linear and circular spines, this
# class implements Patch behavior three ways. If
# self._patch_type == 'line', behave like a mpatches.PathPatch
# instance. If self._patch_type == 'circle', behave like a
# mpatches.Ellipse instance. If self._patch_type == 'arc', behave like
# a mpatches.Arc instance.
self._patch_type = 'line'
# Behavior copied from mpatches.Ellipse:
# Note: This cannot be calculated until this is added to an Axes
self._patch_transform = mtransforms.IdentityTransform()
def set_smart_bounds(self, value):
"""set the spine and associated axis to have smart bounds"""
self._smart_bounds = value
# also set the axis if possible
if self.spine_type in ('left', 'right'):
self.axes.yaxis.set_smart_bounds(value)
elif self.spine_type in ('top', 'bottom'):
self.axes.xaxis.set_smart_bounds(value)
self.stale = True
def get_smart_bounds(self):
"""get whether the spine has smart bounds"""
return self._smart_bounds
def set_patch_arc(self, center, radius, theta1, theta2):
"""set the spine to be arc-like"""
self._patch_type = 'arc'
self._center = center
self._width = radius * 2
self._height = radius * 2
self._theta1 = theta1
self._theta2 = theta2
self._path = mpath.Path.arc(theta1, theta2)
# arc drawn on axes transform
self.set_transform(self.axes.transAxes)
self.stale = True
def set_patch_circle(self, center, radius):
"""set the spine to be circular"""
self._patch_type = 'circle'
self._center = center
self._width = radius * 2
self._height = radius * 2
# circle drawn on axes transform
self.set_transform(self.axes.transAxes)
self.stale = True
def set_patch_line(self):
"""set the spine to be linear"""
self._patch_type = 'line'
self.stale = True
# Behavior copied from mpatches.Ellipse:
def _recompute_transform(self):
"""NOTE: This cannot be called until after this has been added
to an Axes, otherwise unit conversion will fail. This
makes it very important to call the accessor method and
not directly access the transformation member variable.
"""
assert self._patch_type in ('arc', 'circle')
center = (self.convert_xunits(self._center[0]),
self.convert_yunits(self._center[1]))
width = self.convert_xunits(self._width)
height = self.convert_yunits(self._height)
self._patch_transform = mtransforms.Affine2D() \
.scale(width * 0.5, height * 0.5) \
.translate(*center)
def get_patch_transform(self):
if self._patch_type in ('arc', 'circle'):
self._recompute_transform()
return self._patch_transform
else:
return super(Spine, self).get_patch_transform()
def get_path(self):
return self._path
def _ensure_position_is_set(self):
if self._position is None:
# default position
self._position = ('outward', 0.0) # in points
self.set_position(self._position)
def register_axis(self, axis):
"""register an axis
An axis should be registered with its corresponding spine from
the Axes instance. This allows the spine to clear any axis
properties when needed.
"""
self.axis = axis
if self.axis is not None:
self.axis.cla()
self.stale = True
def cla(self):
"""Clear the current spine"""
self._position = None # clear position
if self.axis is not None:
self.axis.cla()
def is_frame_like(self):
"""return True if directly on axes frame
This is useful for determining if a spine is the edge of an
old style MPL plot. If so, this function will return True.
"""
self._ensure_position_is_set()
position = self._position
if isinstance(position, six.string_types):
if position == 'center':
position = ('axes', 0.5)
elif position == 'zero':
position = ('data', 0)
if len(position) != 2:
raise ValueError("position should be 2-tuple")
position_type, amount = position
if position_type == 'outward' and amount == 0:
return True
else:
return False
def _adjust_location(self):
"""automatically set spine bounds to the view interval"""
if self.spine_type == 'circle':
return
if self._bounds is None:
if self.spine_type in ('left', 'right'):
low, high = self.axes.viewLim.intervaly
elif self.spine_type in ('top', 'bottom'):
low, high = self.axes.viewLim.intervalx
else:
raise ValueError('unknown spine spine_type: %s' %
self.spine_type)
if self._smart_bounds:
# attempt to set bounds in sophisticated way
# handle inverted limits
viewlim_low, viewlim_high = sorted([low, high])
if self.spine_type in ('left', 'right'):
datalim_low, datalim_high = self.axes.dataLim.intervaly
ticks = self.axes.get_yticks()
elif self.spine_type in ('top', 'bottom'):
datalim_low, datalim_high = self.axes.dataLim.intervalx
ticks = self.axes.get_xticks()
# handle inverted limits
ticks = np.sort(ticks)
datalim_low, datalim_high = sorted([datalim_low, datalim_high])
if datalim_low < viewlim_low:
# Data extends past view. Clip line to view.
low = viewlim_low
else:
# Data ends before view ends.
cond = (ticks <= datalim_low) & (ticks >= viewlim_low)
tickvals = ticks[cond]
if len(tickvals):
# A tick is less than or equal to lowest data point.
low = tickvals[-1]
else:
# No tick is available
low = datalim_low
low = max(low, viewlim_low)
if datalim_high > viewlim_high:
# Data extends past view. Clip line to view.
high = viewlim_high
else:
# Data ends before view ends.
cond = (ticks >= datalim_high) & (ticks <= viewlim_high)
tickvals = ticks[cond]
if len(tickvals):
# A tick is greater than or equal to highest data
# point.
high = tickvals[0]
else:
# No tick is available
high = datalim_high
high = min(high, viewlim_high)
else:
low, high = self._bounds
if self._patch_type == 'arc':
if self.spine_type in ('bottom', 'top'):
try:
direction = self.axes.get_theta_direction()
except AttributeError:
direction = 1
try:
offset = self.axes.get_theta_offset()
except AttributeError:
offset = 0
low = low * direction + offset
high = high * direction + offset
if low > high:
low, high = high, low
self._path = mpath.Path.arc(np.rad2deg(low), np.rad2deg(high))
if self.spine_type == 'bottom':
rmin, rmax = self.axes.viewLim.intervaly
try:
rorigin = self.axes.get_rorigin()
except AttributeError:
rorigin = rmin
scaled_diameter = (rmin - rorigin) / (rmax - rorigin)
self._height = scaled_diameter
self._width = scaled_diameter
else:
raise ValueError('unable to set bounds for spine "%s"' %
self.spine_type)
else:
v1 = self._path.vertices
assert v1.shape == (2, 2), 'unexpected vertices shape'
if self.spine_type in ['left', 'right']:
v1[0, 1] = low
v1[1, 1] = high
elif self.spine_type in ['bottom', 'top']:
v1[0, 0] = low
v1[1, 0] = high
else:
raise ValueError('unable to set bounds for spine "%s"' %
self.spine_type)
@allow_rasterization
def draw(self, renderer):
self._adjust_location()
ret = super(Spine, self).draw(renderer)
self.stale = False
return ret
def _calc_offset_transform(self):
"""calculate the offset transform performed by the spine"""
self._ensure_position_is_set()
position = self._position
if isinstance(position, six.string_types):
if position == 'center':
position = ('axes', 0.5)
elif position == 'zero':
position = ('data', 0)
assert len(position) == 2, "position should be 2-tuple"
position_type, amount = position
assert position_type in ('axes', 'outward', 'data')
if position_type == 'outward':
if amount == 0:
# short circuit commonest case
self._spine_transform = ('identity',
mtransforms.IdentityTransform())
elif self.spine_type in ['left', 'right', 'top', 'bottom']:
offset_vec = {'left': (-1, 0),
'right': (1, 0),
'bottom': (0, -1),
'top': (0, 1),
}[self.spine_type]
# calculate x and y offset in dots
offset_x = amount * offset_vec[0] / 72.0
offset_y = amount * offset_vec[1] / 72.0
self._spine_transform = ('post',
mtransforms.ScaledTranslation(
offset_x,
offset_y,
self.figure.dpi_scale_trans))
else:
warnings.warn('unknown spine type "%s": no spine '
'offset performed' % self.spine_type)
self._spine_transform = ('identity',
mtransforms.IdentityTransform())
elif position_type == 'axes':
if self.spine_type in ('left', 'right'):
self._spine_transform = ('pre',
mtransforms.Affine2D.from_values(
# keep y unchanged, fix x at
# amount
0, 0, 0, 1, amount, 0))
elif self.spine_type in ('bottom', 'top'):
self._spine_transform = ('pre',
mtransforms.Affine2D.from_values(
# keep x unchanged, fix y at
# amount
1, 0, 0, 0, 0, amount))
else:
warnings.warn('unknown spine type "%s": no spine '
'offset performed' % self.spine_type)
self._spine_transform = ('identity',
mtransforms.IdentityTransform())
elif position_type == 'data':
if self.spine_type in ('right', 'top'):
# The right and top spines have a default position of 1 in
# axes coordinates. When specifying the position in data
# coordinates, we need to calculate the position relative to 0.
amount -= 1
if self.spine_type in ('left', 'right'):
self._spine_transform = ('data',
mtransforms.Affine2D().translate(
amount, 0))
elif self.spine_type in ('bottom', 'top'):
self._spine_transform = ('data',
mtransforms.Affine2D().translate(
0, amount))
else:
warnings.warn('unknown spine type "%s": no spine '
'offset performed' % self.spine_type)
self._spine_transform = ('identity',
mtransforms.IdentityTransform())
def set_position(self, position):
"""set the position of the spine
Spine position is specified by a 2 tuple of (position type,
amount). The position types are:
* 'outward' : place the spine out from the data area by the
specified number of points. (Negative values specify placing the
spine inward.)
* 'axes' : place the spine at the specified Axes coordinate (from
0.0-1.0).
* 'data' : place the spine at the specified data coordinate.
Additionally, shorthand notations define a special positions:
* 'center' -> ('axes',0.5)
* 'zero' -> ('data', 0.0)
"""
if position in ('center', 'zero'):
# special positions
pass
else:
if len(position) != 2:
raise ValueError("position should be 'center' or 2-tuple")
if position[0] not in ['outward', 'axes', 'data']:
raise ValueError("position[0] should be one of 'outward', "
"'axes', or 'data' ")
self._position = position
self._calc_offset_transform()
self.set_transform(self.get_spine_transform())
if self.axis is not None:
self.axis.reset_ticks()
self.stale = True
def get_position(self):
"""get the spine position"""
self._ensure_position_is_set()
return self._position
def get_spine_transform(self):
"""get the spine transform"""
self._ensure_position_is_set()
what, how = self._spine_transform
if what == 'data':
# special case data based spine locations
data_xform = self.axes.transScale + \
(how + self.axes.transLimits + self.axes.transAxes)
if self.spine_type in ['left', 'right']:
result = mtransforms.blended_transform_factory(
data_xform, self.axes.transData)
elif self.spine_type in ['top', 'bottom']:
result = mtransforms.blended_transform_factory(
self.axes.transData, data_xform)
else:
raise ValueError('unknown spine spine_type: %s' %
self.spine_type)
return result
if self.spine_type in ['left', 'right']:
base_transform = self.axes.get_yaxis_transform(which='grid')
elif self.spine_type in ['top', 'bottom']:
base_transform = self.axes.get_xaxis_transform(which='grid')
else:
raise ValueError('unknown spine spine_type: %s' %
self.spine_type)
if what == 'identity':
return base_transform
elif what == 'post':
return base_transform + how
elif what == 'pre':
return how + base_transform
else:
raise ValueError("unknown spine_transform type: %s" % what)
def set_bounds(self, low, high):
"""Set the bounds of the spine."""
if self.spine_type == 'circle':
raise ValueError(
'set_bounds() method incompatible with circular spines')
self._bounds = (low, high)
self.stale = True
def get_bounds(self):
"""Get the bounds of the spine."""
return self._bounds
@classmethod
def linear_spine(cls, axes, spine_type, **kwargs):
"""
(staticmethod) Returns a linear :class:`Spine`.
"""
# all values of 13 get replaced upon call to set_bounds()
if spine_type == 'left':
path = mpath.Path([(0.0, 13), (0.0, 13)])
elif spine_type == 'right':
path = mpath.Path([(1.0, 13), (1.0, 13)])
elif spine_type == 'bottom':
path = mpath.Path([(13, 0.0), (13, 0.0)])
elif spine_type == 'top':
path = mpath.Path([(13, 1.0), (13, 1.0)])
else:
raise ValueError('unable to make path for spine "%s"' % spine_type)
result = cls(axes, spine_type, path, **kwargs)
result.set_visible(rcParams['axes.spines.{0}'.format(spine_type)])
return result
@classmethod
def arc_spine(cls, axes, spine_type, center, radius, theta1, theta2,
**kwargs):
"""
(classmethod) Returns an arc :class:`Spine`.
"""
path = mpath.Path.arc(theta1, theta2)
result = cls(axes, spine_type, path, **kwargs)
result.set_patch_arc(center, radius, theta1, theta2)
return result
@classmethod
def circular_spine(cls, axes, center, radius, **kwargs):
"""
(staticmethod) Returns a circular :class:`Spine`.
"""
path = mpath.Path.unit_circle()
spine_type = 'circle'
result = cls(axes, spine_type, path, **kwargs)
result.set_patch_circle(center, radius)
return result
def set_color(self, c):
"""
Set the edgecolor.
ACCEPTS: matplotlib color arg or sequence of rgba tuples
.. seealso::
:meth:`set_facecolor`, :meth:`set_edgecolor`
For setting the edge or face color individually.
"""
# The facecolor of a spine is always 'none' by default -- let
# the user change it manually if desired.
self.set_edgecolor(c)
self.stale = True
| 21,143 | 37.939227 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/bezier.py
|
"""
A module providing some utility functions regarding bezier path manipulation.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import numpy as np
from matplotlib.path import Path
from operator import xor
import warnings
class NonIntersectingPathException(ValueError):
pass
# some functions
def get_intersection(cx1, cy1, cos_t1, sin_t1,
cx2, cy2, cos_t2, sin_t2):
""" return a intersecting point between a line through (cx1, cy1)
and having angle t1 and a line through (cx2, cy2) and angle t2.
"""
# line1 => sin_t1 * (x - cx1) - cos_t1 * (y - cy1) = 0.
# line1 => sin_t1 * x + cos_t1 * y = sin_t1*cx1 - cos_t1*cy1
line1_rhs = sin_t1 * cx1 - cos_t1 * cy1
line2_rhs = sin_t2 * cx2 - cos_t2 * cy2
# rhs matrix
a, b = sin_t1, -cos_t1
c, d = sin_t2, -cos_t2
ad_bc = a * d - b * c
if ad_bc == 0.:
raise ValueError("Given lines do not intersect")
# rhs_inverse
a_, b_ = d, -b
c_, d_ = -c, a
a_, b_, c_, d_ = [k / ad_bc for k in [a_, b_, c_, d_]]
x = a_ * line1_rhs + b_ * line2_rhs
y = c_ * line1_rhs + d_ * line2_rhs
return x, y
def get_normal_points(cx, cy, cos_t, sin_t, length):
"""
For a line passing through (*cx*, *cy*) and having a angle *t*, return
locations of the two points located along its perpendicular line at the
distance of *length*.
"""
if length == 0.:
return cx, cy, cx, cy
cos_t1, sin_t1 = sin_t, -cos_t
cos_t2, sin_t2 = -sin_t, cos_t
x1, y1 = length * cos_t1 + cx, length * sin_t1 + cy
x2, y2 = length * cos_t2 + cx, length * sin_t2 + cy
return x1, y1, x2, y2
# BEZIER routines
# subdividing bezier curve
# http://www.cs.mtu.edu/~shene/COURSES/cs3621/NOTES/spline/Bezier/bezier-sub.html
def _de_casteljau1(beta, t):
next_beta = beta[:-1] * (1 - t) + beta[1:] * t
return next_beta
def split_de_casteljau(beta, t):
"""split a bezier segment defined by its controlpoints *beta*
into two separate segment divided at *t* and return their control points.
"""
beta = np.asarray(beta)
beta_list = [beta]
while True:
beta = _de_casteljau1(beta, t)
beta_list.append(beta)
if len(beta) == 1:
break
left_beta = [beta[0] for beta in beta_list]
right_beta = [beta[-1] for beta in reversed(beta_list)]
return left_beta, right_beta
# FIXME spelling mistake in the name of the parameter ``tolerence``
def find_bezier_t_intersecting_with_closedpath(bezier_point_at_t,
inside_closedpath,
t0=0., t1=1., tolerence=0.01):
""" Find a parameter t0 and t1 of the given bezier path which
bounds the intersecting points with a provided closed
path(*inside_closedpath*). Search starts from *t0* and *t1* and it
uses a simple bisecting algorithm therefore one of the end point
must be inside the path while the orther doesn't. The search stop
when |t0-t1| gets smaller than the given tolerence.
value for
- bezier_point_at_t : a function which returns x, y coordinates at *t*
- inside_closedpath : return True if the point is inside the path
"""
# inside_closedpath : function
start = bezier_point_at_t(t0)
end = bezier_point_at_t(t1)
start_inside = inside_closedpath(start)
end_inside = inside_closedpath(end)
if start_inside == end_inside and start != end:
raise NonIntersectingPathException(
"Both points are on the same side of the closed path")
while True:
# return if the distance is smaller than the tolerence
if np.hypot(start[0] - end[0], start[1] - end[1]) < tolerence:
return t0, t1
# calculate the middle point
middle_t = 0.5 * (t0 + t1)
middle = bezier_point_at_t(middle_t)
middle_inside = inside_closedpath(middle)
if xor(start_inside, middle_inside):
t1 = middle_t
end = middle
end_inside = middle_inside
else:
t0 = middle_t
start = middle
start_inside = middle_inside
class BezierSegment(object):
"""
A simple class of a 2-dimensional bezier segment
"""
# Higher order bezier lines can be supported by simplying adding
# corresponding values.
_binom_coeff = {1: np.array([1., 1.]),
2: np.array([1., 2., 1.]),
3: np.array([1., 3., 3., 1.])}
def __init__(self, control_points):
"""
*control_points* : location of contol points. It needs have a
shpae of n * 2, where n is the order of the bezier line. 1<=
n <= 3 is supported.
"""
_o = len(control_points)
self._orders = np.arange(_o)
_coeff = BezierSegment._binom_coeff[_o - 1]
xx, yy = np.asarray(control_points).T
self._px = xx * _coeff
self._py = yy * _coeff
def point_at_t(self, t):
"evaluate a point at t"
tt = ((1 - t) ** self._orders)[::-1] * t ** self._orders
_x = np.dot(tt, self._px)
_y = np.dot(tt, self._py)
return _x, _y
def split_bezier_intersecting_with_closedpath(bezier,
inside_closedpath,
tolerence=0.01):
"""
bezier : control points of the bezier segment
inside_closedpath : a function which returns true if the point is inside
the path
"""
bz = BezierSegment(bezier)
bezier_point_at_t = bz.point_at_t
t0, t1 = find_bezier_t_intersecting_with_closedpath(bezier_point_at_t,
inside_closedpath,
tolerence=tolerence)
_left, _right = split_de_casteljau(bezier, (t0 + t1) / 2.)
return _left, _right
def find_r_to_boundary_of_closedpath(inside_closedpath, xy,
cos_t, sin_t,
rmin=0., rmax=1., tolerence=0.01):
"""
Find a radius r (centered at *xy*) between *rmin* and *rmax* at
which it intersect with the path.
inside_closedpath : function
cx, cy : center
cos_t, sin_t : cosine and sine for the angle
rmin, rmax :
"""
cx, cy = xy
def _f(r):
return cos_t * r + cx, sin_t * r + cy
find_bezier_t_intersecting_with_closedpath(_f, inside_closedpath,
t0=rmin, t1=rmax,
tolerence=tolerence)
# matplotlib specific
def split_path_inout(path, inside, tolerence=0.01, reorder_inout=False):
""" divide a path into two segment at the point where inside(x, y)
becomes False.
"""
path_iter = path.iter_segments()
ctl_points, command = next(path_iter)
begin_inside = inside(ctl_points[-2:]) # true if begin point is inside
ctl_points_old = ctl_points
concat = np.concatenate
iold = 0
i = 1
for ctl_points, command in path_iter:
iold = i
i += len(ctl_points) // 2
if inside(ctl_points[-2:]) != begin_inside:
bezier_path = concat([ctl_points_old[-2:], ctl_points])
break
ctl_points_old = ctl_points
else:
raise ValueError("The path does not intersect with the patch")
bp = bezier_path.reshape((-1, 2))
left, right = split_bezier_intersecting_with_closedpath(
bp, inside, tolerence)
if len(left) == 2:
codes_left = [Path.LINETO]
codes_right = [Path.MOVETO, Path.LINETO]
elif len(left) == 3:
codes_left = [Path.CURVE3, Path.CURVE3]
codes_right = [Path.MOVETO, Path.CURVE3, Path.CURVE3]
elif len(left) == 4:
codes_left = [Path.CURVE4, Path.CURVE4, Path.CURVE4]
codes_right = [Path.MOVETO, Path.CURVE4, Path.CURVE4, Path.CURVE4]
else:
raise AssertionError("This should never be reached")
verts_left = left[1:]
verts_right = right[:]
if path.codes is None:
path_in = Path(concat([path.vertices[:i], verts_left]))
path_out = Path(concat([verts_right, path.vertices[i:]]))
else:
path_in = Path(concat([path.vertices[:iold], verts_left]),
concat([path.codes[:iold], codes_left]))
path_out = Path(concat([verts_right, path.vertices[i:]]),
concat([codes_right, path.codes[i:]]))
if reorder_inout and begin_inside is False:
path_in, path_out = path_out, path_in
return path_in, path_out
def inside_circle(cx, cy, r):
r2 = r ** 2
def _f(xy):
x, y = xy
return (x - cx) ** 2 + (y - cy) ** 2 < r2
return _f
# quadratic bezier lines
def get_cos_sin(x0, y0, x1, y1):
dx, dy = x1 - x0, y1 - y0
d = (dx * dx + dy * dy) ** .5
# Account for divide by zero
if d == 0:
return 0.0, 0.0
return dx / d, dy / d
def check_if_parallel(dx1, dy1, dx2, dy2, tolerence=1.e-5):
""" returns
* 1 if two lines are parralel in same direction
* -1 if two lines are parralel in opposite direction
* 0 otherwise
"""
theta1 = np.arctan2(dx1, dy1)
theta2 = np.arctan2(dx2, dy2)
dtheta = np.abs(theta1 - theta2)
if dtheta < tolerence:
return 1
elif np.abs(dtheta - np.pi) < tolerence:
return -1
else:
return False
def get_parallels(bezier2, width):
"""
Given the quadratic bezier control points *bezier2*, returns
control points of quadratic bezier lines roughly parallel to given
one separated by *width*.
"""
# The parallel bezier lines are constructed by following ways.
# c1 and c2 are control points representing the begin and end of the
# bezier line.
# cm is the middle point
c1x, c1y = bezier2[0]
cmx, cmy = bezier2[1]
c2x, c2y = bezier2[2]
parallel_test = check_if_parallel(c1x - cmx, c1y - cmy,
cmx - c2x, cmy - c2y)
if parallel_test == -1:
warnings.warn(
"Lines do not intersect. A straight line is used instead.")
cos_t1, sin_t1 = get_cos_sin(c1x, c1y, c2x, c2y)
cos_t2, sin_t2 = cos_t1, sin_t1
else:
# t1 and t2 is the angle between c1 and cm, cm, c2. They are
# also a angle of the tangential line of the path at c1 and c2
cos_t1, sin_t1 = get_cos_sin(c1x, c1y, cmx, cmy)
cos_t2, sin_t2 = get_cos_sin(cmx, cmy, c2x, c2y)
# find c1_left, c1_right which are located along the lines
# through c1 and perpendicular to the tangential lines of the
# bezier path at a distance of width. Same thing for c2_left and
# c2_right with respect to c2.
c1x_left, c1y_left, c1x_right, c1y_right = (
get_normal_points(c1x, c1y, cos_t1, sin_t1, width)
)
c2x_left, c2y_left, c2x_right, c2y_right = (
get_normal_points(c2x, c2y, cos_t2, sin_t2, width)
)
# find cm_left which is the intersectng point of a line through
# c1_left with angle t1 and a line through c2_left with angle
# t2. Same with cm_right.
if parallel_test != 0:
# a special case for a straight line, i.e., angle between two
# lines are smaller than some (arbitrtay) value.
cmx_left, cmy_left = (
0.5 * (c1x_left + c2x_left), 0.5 * (c1y_left + c2y_left)
)
cmx_right, cmy_right = (
0.5 * (c1x_right + c2x_right), 0.5 * (c1y_right + c2y_right)
)
else:
cmx_left, cmy_left = get_intersection(c1x_left, c1y_left, cos_t1,
sin_t1, c2x_left, c2y_left,
cos_t2, sin_t2)
cmx_right, cmy_right = get_intersection(c1x_right, c1y_right, cos_t1,
sin_t1, c2x_right, c2y_right,
cos_t2, sin_t2)
# the parallel bezier lines are created with control points of
# [c1_left, cm_left, c2_left] and [c1_right, cm_right, c2_right]
path_left = [(c1x_left, c1y_left),
(cmx_left, cmy_left),
(c2x_left, c2y_left)]
path_right = [(c1x_right, c1y_right),
(cmx_right, cmy_right),
(c2x_right, c2y_right)]
return path_left, path_right
def find_control_points(c1x, c1y, mmx, mmy, c2x, c2y):
""" Find control points of the bezier line through c1, mm, c2. We
simply assume that c1, mm, c2 which have parametric value 0, 0.5, and 1.
"""
cmx = .5 * (4 * mmx - (c1x + c2x))
cmy = .5 * (4 * mmy - (c1y + c2y))
return [(c1x, c1y), (cmx, cmy), (c2x, c2y)]
def make_wedged_bezier2(bezier2, width, w1=1., wm=0.5, w2=0.):
"""
Being similar to get_parallels, returns control points of two quadrativ
bezier lines having a width roughly parallel to given one separated by
*width*.
"""
# c1, cm, c2
c1x, c1y = bezier2[0]
cmx, cmy = bezier2[1]
c3x, c3y = bezier2[2]
# t1 and t2 is the angle between c1 and cm, cm, c3.
# They are also a angle of the tangential line of the path at c1 and c3
cos_t1, sin_t1 = get_cos_sin(c1x, c1y, cmx, cmy)
cos_t2, sin_t2 = get_cos_sin(cmx, cmy, c3x, c3y)
# find c1_left, c1_right which are located along the lines
# through c1 and perpendicular to the tangential lines of the
# bezier path at a distance of width. Same thing for c3_left and
# c3_right with respect to c3.
c1x_left, c1y_left, c1x_right, c1y_right = (
get_normal_points(c1x, c1y, cos_t1, sin_t1, width * w1)
)
c3x_left, c3y_left, c3x_right, c3y_right = (
get_normal_points(c3x, c3y, cos_t2, sin_t2, width * w2)
)
# find c12, c23 and c123 which are middle points of c1-cm, cm-c3 and
# c12-c23
c12x, c12y = (c1x + cmx) * .5, (c1y + cmy) * .5
c23x, c23y = (cmx + c3x) * .5, (cmy + c3y) * .5
c123x, c123y = (c12x + c23x) * .5, (c12y + c23y) * .5
# tangential angle of c123 (angle between c12 and c23)
cos_t123, sin_t123 = get_cos_sin(c12x, c12y, c23x, c23y)
c123x_left, c123y_left, c123x_right, c123y_right = (
get_normal_points(c123x, c123y, cos_t123, sin_t123, width * wm)
)
path_left = find_control_points(c1x_left, c1y_left,
c123x_left, c123y_left,
c3x_left, c3y_left)
path_right = find_control_points(c1x_right, c1y_right,
c123x_right, c123y_right,
c3x_right, c3y_right)
return path_left, path_right
def make_path_regular(p):
"""
fill in the codes if None.
"""
c = p.codes
if c is None:
c = np.empty(p.vertices.shape[:1], "i")
c.fill(Path.LINETO)
c[0] = Path.MOVETO
return Path(p.vertices, c)
else:
return p
def concatenate_paths(paths):
"""
concatenate list of paths into a single path.
"""
vertices = []
codes = []
for p in paths:
p = make_path_regular(p)
vertices.append(p.vertices)
codes.append(p.codes)
_path = Path(np.concatenate(vertices),
np.concatenate(codes))
return _path
| 15,455 | 30.16129 | 81 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/textpath.py
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import six
from six.moves import zip
import warnings
import numpy as np
from matplotlib.path import Path
from matplotlib import rcParams
import matplotlib.font_manager as font_manager
from matplotlib.ft2font import KERNING_DEFAULT, LOAD_NO_HINTING
from matplotlib.ft2font import LOAD_TARGET_LIGHT
from matplotlib.mathtext import MathTextParser
import matplotlib.dviread as dviread
from matplotlib.font_manager import FontProperties, get_font
from matplotlib.transforms import Affine2D
from six.moves.urllib.parse import quote as urllib_quote
class TextToPath(object):
"""
A class that convert a given text to a path using ttf fonts.
"""
FONT_SCALE = 100.
DPI = 72
def __init__(self):
"""
Initialization
"""
self.mathtext_parser = MathTextParser('path')
self.tex_font_map = None
from matplotlib.cbook import maxdict
self._ps_fontd = maxdict(50)
self._texmanager = None
self._adobe_standard_encoding = None
def _get_adobe_standard_encoding(self):
enc_name = dviread.find_tex_file('8a.enc')
enc = dviread.Encoding(enc_name)
return {c: i for i, c in enumerate(enc.encoding)}
def _get_font(self, prop):
"""
find a ttf font.
"""
fname = font_manager.findfont(prop)
font = get_font(fname)
font.set_size(self.FONT_SCALE, self.DPI)
return font
def _get_hinting_flag(self):
return LOAD_NO_HINTING
def _get_char_id(self, font, ccode):
"""
Return a unique id for the given font and character-code set.
"""
sfnt = font.get_sfnt()
try:
ps_name = sfnt[1, 0, 0, 6].decode('mac_roman')
except KeyError:
ps_name = sfnt[3, 1, 0x0409, 6].decode('utf-16be')
char_id = urllib_quote('%s-%x' % (ps_name, ccode))
return char_id
def _get_char_id_ps(self, font, ccode):
"""
Return a unique id for the given font and character-code set (for tex).
"""
ps_name = font.get_ps_font_info()[2]
char_id = urllib_quote('%s-%d' % (ps_name, ccode))
return char_id
def glyph_to_path(self, font, currx=0.):
"""
convert the ft2font glyph to vertices and codes.
"""
verts, codes = font.get_path()
if currx != 0.0:
verts[:, 0] += currx
return verts, codes
def get_text_width_height_descent(self, s, prop, ismath):
if rcParams['text.usetex']:
texmanager = self.get_texmanager()
fontsize = prop.get_size_in_points()
w, h, d = texmanager.get_text_width_height_descent(s, fontsize,
renderer=None)
return w, h, d
fontsize = prop.get_size_in_points()
scale = fontsize / self.FONT_SCALE
if ismath:
prop = prop.copy()
prop.set_size(self.FONT_SCALE)
width, height, descent, trash, used_characters = \
self.mathtext_parser.parse(s, 72, prop)
return width * scale, height * scale, descent * scale
font = self._get_font(prop)
font.set_text(s, 0.0, flags=LOAD_NO_HINTING)
w, h = font.get_width_height()
w /= 64.0 # convert from subpixels
h /= 64.0
d = font.get_descent()
d /= 64.0
return w * scale, h * scale, d * scale
def get_text_path(self, prop, s, ismath=False, usetex=False):
"""
convert text *s* to path (a tuple of vertices and codes for
matplotlib.path.Path).
*prop*
font property
*s*
text to be converted
*usetex*
If True, use matplotlib usetex mode.
*ismath*
If True, use mathtext parser. Effective only if usetex == False.
"""
if not usetex:
if not ismath:
font = self._get_font(prop)
glyph_info, glyph_map, rects = self.get_glyphs_with_font(
font, s)
else:
glyph_info, glyph_map, rects = self.get_glyphs_mathtext(
prop, s)
else:
glyph_info, glyph_map, rects = self.get_glyphs_tex(prop, s)
verts, codes = [], []
for glyph_id, xposition, yposition, scale in glyph_info:
verts1, codes1 = glyph_map[glyph_id]
if len(verts1):
verts1 = np.array(verts1) * scale + [xposition, yposition]
verts.extend(verts1)
codes.extend(codes1)
for verts1, codes1 in rects:
verts.extend(verts1)
codes.extend(codes1)
return verts, codes
def get_glyphs_with_font(self, font, s, glyph_map=None,
return_new_glyphs_only=False):
"""
convert the string *s* to vertices and codes using the
provided ttf font.
"""
# Mostly copied from backend_svg.py.
lastgind = None
currx = 0
xpositions = []
glyph_ids = []
if glyph_map is None:
glyph_map = OrderedDict()
if return_new_glyphs_only:
glyph_map_new = OrderedDict()
else:
glyph_map_new = glyph_map
# I'm not sure if I get kernings right. Needs to be verified. -JJL
for c in s:
ccode = ord(c)
gind = font.get_char_index(ccode)
if gind is None:
ccode = ord('?')
gind = 0
if lastgind is not None:
kern = font.get_kerning(lastgind, gind, KERNING_DEFAULT)
else:
kern = 0
glyph = font.load_char(ccode, flags=LOAD_NO_HINTING)
horiz_advance = (glyph.linearHoriAdvance / 65536.0)
char_id = self._get_char_id(font, ccode)
if char_id not in glyph_map:
glyph_map_new[char_id] = self.glyph_to_path(font)
currx += (kern / 64.0)
xpositions.append(currx)
glyph_ids.append(char_id)
currx += horiz_advance
lastgind = gind
ypositions = [0] * len(xpositions)
sizes = [1.] * len(xpositions)
rects = []
return (list(zip(glyph_ids, xpositions, ypositions, sizes)),
glyph_map_new, rects)
def get_glyphs_mathtext(self, prop, s, glyph_map=None,
return_new_glyphs_only=False):
"""
convert the string *s* to vertices and codes by parsing it with
mathtext.
"""
prop = prop.copy()
prop.set_size(self.FONT_SCALE)
width, height, descent, glyphs, rects = self.mathtext_parser.parse(
s, self.DPI, prop)
if not glyph_map:
glyph_map = OrderedDict()
if return_new_glyphs_only:
glyph_map_new = OrderedDict()
else:
glyph_map_new = glyph_map
xpositions = []
ypositions = []
glyph_ids = []
sizes = []
currx, curry = 0, 0
for font, fontsize, ccode, ox, oy in glyphs:
char_id = self._get_char_id(font, ccode)
if char_id not in glyph_map:
font.clear()
font.set_size(self.FONT_SCALE, self.DPI)
glyph = font.load_char(ccode, flags=LOAD_NO_HINTING)
glyph_map_new[char_id] = self.glyph_to_path(font)
xpositions.append(ox)
ypositions.append(oy)
glyph_ids.append(char_id)
size = fontsize / self.FONT_SCALE
sizes.append(size)
myrects = []
for ox, oy, w, h in rects:
vert1 = [(ox, oy), (ox, oy + h), (ox + w, oy + h),
(ox + w, oy), (ox, oy), (0, 0)]
code1 = [Path.MOVETO,
Path.LINETO, Path.LINETO, Path.LINETO, Path.LINETO,
Path.CLOSEPOLY]
myrects.append((vert1, code1))
return (list(zip(glyph_ids, xpositions, ypositions, sizes)),
glyph_map_new, myrects)
def get_texmanager(self):
"""
return the :class:`matplotlib.texmanager.TexManager` instance
"""
if self._texmanager is None:
from matplotlib.texmanager import TexManager
self._texmanager = TexManager()
return self._texmanager
def get_glyphs_tex(self, prop, s, glyph_map=None,
return_new_glyphs_only=False):
"""
convert the string *s* to vertices and codes using matplotlib's usetex
mode.
"""
# codes are modstly borrowed from pdf backend.
texmanager = self.get_texmanager()
if self.tex_font_map is None:
self.tex_font_map = dviread.PsfontsMap(
dviread.find_tex_file('pdftex.map'))
if self._adobe_standard_encoding is None:
self._adobe_standard_encoding = self._get_adobe_standard_encoding()
fontsize = prop.get_size_in_points()
if hasattr(texmanager, "get_dvi"):
dvifilelike = texmanager.get_dvi(s, self.FONT_SCALE)
dvi = dviread.DviFromFileLike(dvifilelike, self.DPI)
else:
dvifile = texmanager.make_dvi(s, self.FONT_SCALE)
dvi = dviread.Dvi(dvifile, self.DPI)
with dvi:
page = next(iter(dvi))
if glyph_map is None:
glyph_map = OrderedDict()
if return_new_glyphs_only:
glyph_map_new = OrderedDict()
else:
glyph_map_new = glyph_map
glyph_ids, xpositions, ypositions, sizes = [], [], [], []
# Gather font information and do some setup for combining
# characters into strings.
# oldfont, seq = None, []
for x1, y1, dvifont, glyph, width in page.text:
font_and_encoding = self._ps_fontd.get(dvifont.texname)
font_bunch = self.tex_font_map[dvifont.texname]
if font_and_encoding is None:
if font_bunch.filename is None:
raise ValueError(
("No usable font file found for %s (%s). "
"The font may lack a Type-1 version.")
% (font_bunch.psname, dvifont.texname))
font = get_font(font_bunch.filename)
for charmap_name, charmap_code in [("ADOBE_CUSTOM",
1094992451),
("ADOBE_STANDARD",
1094995778)]:
try:
font.select_charmap(charmap_code)
except (ValueError, RuntimeError):
pass
else:
break
else:
charmap_name = ""
warnings.warn("No supported encoding in font (%s)." %
font_bunch.filename)
if charmap_name == "ADOBE_STANDARD" and font_bunch.encoding:
enc0 = dviread.Encoding(font_bunch.encoding)
enc = {i: self._adobe_standard_encoding.get(c, None)
for i, c in enumerate(enc0.encoding)}
else:
enc = {}
self._ps_fontd[dvifont.texname] = font, enc
else:
font, enc = font_and_encoding
ft2font_flag = LOAD_TARGET_LIGHT
char_id = self._get_char_id_ps(font, glyph)
if char_id not in glyph_map:
font.clear()
font.set_size(self.FONT_SCALE, self.DPI)
if enc:
charcode = enc.get(glyph, None)
else:
charcode = glyph
if charcode is not None:
glyph0 = font.load_char(charcode, flags=ft2font_flag)
else:
warnings.warn("The glyph (%d) of font (%s) cannot be "
"converted with the encoding. Glyph may "
"be wrong" % (glyph, font_bunch.filename))
glyph0 = font.load_char(glyph, flags=ft2font_flag)
glyph_map_new[char_id] = self.glyph_to_path(font)
glyph_ids.append(char_id)
xpositions.append(x1)
ypositions.append(y1)
sizes.append(dvifont.size / self.FONT_SCALE)
myrects = []
for ox, oy, h, w in page.boxes:
vert1 = [(ox, oy), (ox + w, oy), (ox + w, oy + h),
(ox, oy + h), (ox, oy), (0, 0)]
code1 = [Path.MOVETO,
Path.LINETO, Path.LINETO, Path.LINETO, Path.LINETO,
Path.CLOSEPOLY]
myrects.append((vert1, code1))
return (list(zip(glyph_ids, xpositions, ypositions, sizes)),
glyph_map_new, myrects)
text_to_path = TextToPath()
class TextPath(Path):
"""
Create a path from the text.
"""
def __init__(self, xy, s, size=None, prop=None,
_interpolation_steps=1, usetex=False,
*kl, **kwargs):
"""
Create a path from the text. No support for TeX yet. Note that
it simply is a path, not an artist. You need to use the
PathPatch (or other artists) to draw this path onto the
canvas.
xy : position of the text.
s : text
size : font size
prop : font property
"""
if prop is None:
prop = FontProperties()
if size is None:
size = prop.get_size_in_points()
self._xy = xy
self.set_size(size)
self._cached_vertices = None
self._vertices, self._codes = self.text_get_vertices_codes(
prop, s,
usetex=usetex)
self._should_simplify = False
self._simplify_threshold = rcParams['path.simplify_threshold']
self._has_nonfinite = False
self._interpolation_steps = _interpolation_steps
def set_size(self, size):
"""
set the size of the text
"""
self._size = size
self._invalid = True
def get_size(self):
"""
get the size of the text
"""
return self._size
def _get_vertices(self):
"""
Return the cached path after updating it if necessary.
"""
self._revalidate_path()
return self._cached_vertices
def _get_codes(self):
"""
Return the codes
"""
return self._codes
vertices = property(_get_vertices)
codes = property(_get_codes)
def _revalidate_path(self):
"""
update the path if necessary.
The path for the text is initially create with the font size
of FONT_SCALE, and this path is rescaled to other size when
necessary.
"""
if (self._invalid or
(self._cached_vertices is None)):
tr = Affine2D().scale(
self._size / text_to_path.FONT_SCALE,
self._size / text_to_path.FONT_SCALE).translate(*self._xy)
self._cached_vertices = tr.transform(self._vertices)
self._invalid = False
def is_math_text(self, s):
"""
Returns True if the given string *s* contains any mathtext.
"""
# copied from Text.is_math_text -JJL
# Did we find an even number of non-escaped dollar signs?
# If so, treat is as math text.
dollar_count = s.count(r'$') - s.count(r'\$')
even_dollars = (dollar_count > 0 and dollar_count % 2 == 0)
if rcParams['text.usetex']:
return s, 'TeX'
if even_dollars:
return s, True
else:
return s.replace(r'\$', '$'), False
def text_get_vertices_codes(self, prop, s, usetex):
"""
convert the string *s* to vertices and codes using the
provided font property *prop*. Mostly copied from
backend_svg.py.
"""
if usetex:
verts, codes = text_to_path.get_text_path(prop, s, usetex=True)
else:
clean_line, ismath = self.is_math_text(s)
verts, codes = text_to_path.get_text_path(prop, clean_line,
ismath=ismath)
return verts, codes
| 16,879 | 30.433892 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/image.py
|
"""
The image module supports basic image loading, rescaling and display
operations.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves.urllib.parse import urlparse
from six.moves.urllib.request import urlopen
from io import BytesIO
from math import ceil
import os
import logging
import numpy as np
from matplotlib import rcParams
import matplotlib.artist as martist
from matplotlib.artist import allow_rasterization
import matplotlib.colors as mcolors
import matplotlib.cm as cm
import matplotlib.cbook as cbook
# For clarity, names from _image are given explicitly in this module:
import matplotlib._image as _image
import matplotlib._png as _png
# For user convenience, the names from _image are also imported into
# the image namespace:
from matplotlib._image import *
from matplotlib.transforms import (Affine2D, BboxBase, Bbox, BboxTransform,
IdentityTransform, TransformedBbox)
_log = logging.getLogger(__name__)
# map interpolation strings to module constants
_interpd_ = {
'none': _image.NEAREST, # fall back to nearest when not supported
'nearest': _image.NEAREST,
'bilinear': _image.BILINEAR,
'bicubic': _image.BICUBIC,
'spline16': _image.SPLINE16,
'spline36': _image.SPLINE36,
'hanning': _image.HANNING,
'hamming': _image.HAMMING,
'hermite': _image.HERMITE,
'kaiser': _image.KAISER,
'quadric': _image.QUADRIC,
'catrom': _image.CATROM,
'gaussian': _image.GAUSSIAN,
'bessel': _image.BESSEL,
'mitchell': _image.MITCHELL,
'sinc': _image.SINC,
'lanczos': _image.LANCZOS,
'blackman': _image.BLACKMAN,
}
interpolations_names = set(_interpd_)
def composite_images(images, renderer, magnification=1.0):
"""
Composite a number of RGBA images into one. The images are
composited in the order in which they appear in the `images` list.
Parameters
----------
images : list of Images
Each must have a `make_image` method. For each image,
`can_composite` should return `True`, though this is not
enforced by this function. Each image must have a purely
affine transformation with no shear.
renderer : RendererBase instance
magnification : float
The additional magnification to apply for the renderer in use.
Returns
-------
tuple : image, offset_x, offset_y
Returns the tuple:
- image: A numpy array of the same type as the input images.
- offset_x, offset_y: The offset of the image (left, bottom)
in the output figure.
"""
if len(images) == 0:
return np.empty((0, 0, 4), dtype=np.uint8), 0, 0
parts = []
bboxes = []
for image in images:
data, x, y, trans = image.make_image(renderer, magnification)
if data is not None:
x *= magnification
y *= magnification
parts.append((data, x, y, image.get_alpha() or 1.0))
bboxes.append(
Bbox([[x, y], [x + data.shape[1], y + data.shape[0]]]))
if len(parts) == 0:
return np.empty((0, 0, 4), dtype=np.uint8), 0, 0
bbox = Bbox.union(bboxes)
output = np.zeros(
(int(bbox.height), int(bbox.width), 4), dtype=np.uint8)
for data, x, y, alpha in parts:
trans = Affine2D().translate(x - bbox.x0, y - bbox.y0)
_image.resample(data, output, trans, _image.NEAREST,
resample=False, alpha=alpha)
return output, bbox.x0 / magnification, bbox.y0 / magnification
def _draw_list_compositing_images(
renderer, parent, artists, suppress_composite=None):
"""
Draw a sorted list of artists, compositing images into a single
image where possible.
For internal matplotlib use only: It is here to reduce duplication
between `Figure.draw` and `Axes.draw`, but otherwise should not be
generally useful.
"""
has_images = any(isinstance(x, _ImageBase) for x in artists)
# override the renderer default if suppressComposite is not None
not_composite = (suppress_composite if suppress_composite is not None
else renderer.option_image_nocomposite())
if not_composite or not has_images:
for a in artists:
a.draw(renderer)
else:
# Composite any adjacent images together
image_group = []
mag = renderer.get_image_magnification()
def flush_images():
if len(image_group) == 1:
image_group[0].draw(renderer)
elif len(image_group) > 1:
data, l, b = composite_images(image_group, renderer, mag)
if data.size != 0:
gc = renderer.new_gc()
gc.set_clip_rectangle(parent.bbox)
gc.set_clip_path(parent.get_clip_path())
renderer.draw_image(gc, np.round(l), np.round(b), data)
gc.restore()
del image_group[:]
for a in artists:
if isinstance(a, _ImageBase) and a.can_composite():
image_group.append(a)
else:
flush_images()
a.draw(renderer)
flush_images()
def _rgb_to_rgba(A):
"""
Convert an RGB image to RGBA, as required by the image resample C++
extension.
"""
rgba = np.zeros((A.shape[0], A.shape[1], 4), dtype=A.dtype)
rgba[:, :, :3] = A
if rgba.dtype == np.uint8:
rgba[:, :, 3] = 255
else:
rgba[:, :, 3] = 1.0
return rgba
class _ImageBase(martist.Artist, cm.ScalarMappable):
zorder = 0
@property
@cbook.deprecated("2.1")
def _interpd(self):
return _interpd_
@property
@cbook.deprecated("2.1")
def _interpdr(self):
return {v: k for k, v in six.iteritems(_interpd_)}
@property
@cbook.deprecated("2.1", alternative="mpl.image.interpolation_names")
def iterpnames(self):
return interpolations_names
def __str__(self):
return "AxesImage(%g,%g;%gx%g)" % tuple(self.axes.bbox.bounds)
def __init__(self, ax,
cmap=None,
norm=None,
interpolation=None,
origin=None,
filternorm=1,
filterrad=4.0,
resample=False,
**kwargs
):
"""
interpolation and cmap default to their rc settings
cmap is a colors.Colormap instance
norm is a colors.Normalize instance to map luminance to 0-1
extent is data axes (left, right, bottom, top) for making image plots
registered with data plots. Default is to label the pixel
centers with the zero-based row and column indices.
Additional kwargs are matplotlib.artist properties
"""
martist.Artist.__init__(self)
cm.ScalarMappable.__init__(self, norm, cmap)
self._mouseover = True
if origin is None:
origin = rcParams['image.origin']
self.origin = origin
self.set_filternorm(filternorm)
self.set_filterrad(filterrad)
self.set_interpolation(interpolation)
self.set_resample(resample)
self.axes = ax
self._imcache = None
self.update(kwargs)
def __getstate__(self):
state = super(_ImageBase, self).__getstate__()
# We can't pickle the C Image cached object.
state['_imcache'] = None
return state
def get_size(self):
"""Get the numrows, numcols of the input image"""
if self._A is None:
raise RuntimeError('You must first set the image array')
return self._A.shape[:2]
def set_alpha(self, alpha):
"""
Set the alpha value used for blending - not supported on
all backends
ACCEPTS: float
"""
martist.Artist.set_alpha(self, alpha)
self._imcache = None
def changed(self):
"""
Call this whenever the mappable is changed so observers can
update state
"""
self._imcache = None
self._rgbacache = None
cm.ScalarMappable.changed(self)
def _make_image(self, A, in_bbox, out_bbox, clip_bbox, magnification=1.0,
unsampled=False, round_to_pixel_border=True):
"""
Normalize, rescale and color the image `A` from the given
in_bbox (in data space), to the given out_bbox (in pixel
space) clipped to the given clip_bbox (also in pixel space),
and magnified by the magnification factor.
`A` may be a greyscale image (MxN) with a dtype of `float32`,
`float64`, `uint16` or `uint8`, or an RGBA image (MxNx4) with
a dtype of `float32`, `float64`, or `uint8`.
If `unsampled` is True, the image will not be scaled, but an
appropriate affine transformation will be returned instead.
If `round_to_pixel_border` is True, the output image size will
be rounded to the nearest pixel boundary. This makes the
images align correctly with the axes. It should not be used
in cases where you want exact scaling, however, such as
FigureImage.
Returns the resulting (image, x, y, trans), where (x, y) is
the upper left corner of the result in pixel space, and
`trans` is the affine transformation from the image to pixel
space.
"""
if A is None:
raise RuntimeError('You must first set the image '
'array or the image attribute')
if A.size == 0:
raise RuntimeError("_make_image must get a non-empty image. "
"Your Artist's draw method must filter before "
"this method is called.")
clipped_bbox = Bbox.intersection(out_bbox, clip_bbox)
if clipped_bbox is None:
return None, 0, 0, None
out_width_base = clipped_bbox.width * magnification
out_height_base = clipped_bbox.height * magnification
if out_width_base == 0 or out_height_base == 0:
return None, 0, 0, None
if self.origin == 'upper':
# Flip the input image using a transform. This avoids the
# problem with flipping the array, which results in a copy
# when it is converted to contiguous in the C wrapper
t0 = Affine2D().translate(0, -A.shape[0]).scale(1, -1)
else:
t0 = IdentityTransform()
t0 += (
Affine2D()
.scale(
in_bbox.width / A.shape[1],
in_bbox.height / A.shape[0])
.translate(in_bbox.x0, in_bbox.y0)
+ self.get_transform())
t = (t0
+ Affine2D().translate(
-clipped_bbox.x0,
-clipped_bbox.y0)
.scale(magnification, magnification))
# So that the image is aligned with the edge of the axes, we want
# to round up the output width to the next integer. This also
# means scaling the transform just slightly to account for the
# extra subpixel.
if (t.is_affine and round_to_pixel_border and
(out_width_base % 1.0 != 0.0 or out_height_base % 1.0 != 0.0)):
out_width = int(ceil(out_width_base))
out_height = int(ceil(out_height_base))
extra_width = (out_width - out_width_base) / out_width_base
extra_height = (out_height - out_height_base) / out_height_base
t += Affine2D().scale(1.0 + extra_width, 1.0 + extra_height)
else:
out_width = int(out_width_base)
out_height = int(out_height_base)
if not unsampled:
if A.ndim not in (2, 3):
raise ValueError("Invalid dimensions, got {}".format(A.shape))
if A.ndim == 2:
# if we are a 2D array, then we are running through the
# norm + colormap transformation. However, in general the
# input data is not going to match the size on the screen so we
# have to resample to the correct number of pixels
# need to
# TODO slice input array first
inp_dtype = A.dtype
a_min = A.min()
a_max = A.max()
# figure out the type we should scale to. For floats,
# leave as is. For integers cast to an appropriate-sized
# float. Small integers get smaller floats in an attempt
# to keep the memory footprint reasonable.
if a_min is np.ma.masked:
# all masked, so values don't matter
a_min, a_max = np.int32(0), np.int32(1)
if inp_dtype.kind == 'f':
scaled_dtype = A.dtype
else:
# probably an integer of some type.
da = a_max.astype(np.float64) - a_min.astype(np.float64)
if da > 1e8:
# give more breathing room if a big dynamic range
scaled_dtype = np.float64
else:
scaled_dtype = np.float32
# scale the input data to [.1, .9]. The Agg
# interpolators clip to [0, 1] internally, use a
# smaller input scale to identify which of the
# interpolated points need to be should be flagged as
# over / under.
# This may introduce numeric instabilities in very broadly
# scaled data
A_scaled = np.empty(A.shape, dtype=scaled_dtype)
A_scaled[:] = A
# clip scaled data around norm if necessary.
# This is necessary for big numbers at the edge of
# float64's ability to represent changes. Applying
# a norm first would be good, but ruins the interpolation
# of over numbers.
if self.norm.vmin is not None and self.norm.vmax is not None:
dv = (np.float64(self.norm.vmax) -
np.float64(self.norm.vmin))
vmid = self.norm.vmin + dv / 2
newmin = vmid - dv * 1.e7
if newmin < a_min:
newmin = None
else:
a_min = np.float64(newmin)
newmax = vmid + dv * 1.e7
if newmax > a_max:
newmax = None
else:
a_max = np.float64(newmax)
if newmax is not None or newmin is not None:
A_scaled = np.clip(A_scaled, newmin, newmax)
A_scaled -= a_min
# a_min and a_max might be ndarray subclasses so use
# asscalar to ensure they are scalars to avoid errors
a_min = np.asscalar(a_min.astype(scaled_dtype))
a_max = np.asscalar(a_max.astype(scaled_dtype))
if a_min != a_max:
A_scaled /= ((a_max - a_min) / 0.8)
A_scaled += 0.1
A_resampled = np.zeros((out_height, out_width),
dtype=A_scaled.dtype)
# resample the input data to the correct resolution and shape
_image.resample(A_scaled, A_resampled,
t,
_interpd_[self.get_interpolation()],
self.get_resample(), 1.0,
self.get_filternorm() or 0.0,
self.get_filterrad() or 0.0)
# we are done with A_scaled now, remove from namespace
# to be sure!
del A_scaled
# un-scale the resampled data to approximately the
# original range things that interpolated to above /
# below the original min/max will still be above /
# below, but possibly clipped in the case of higher order
# interpolation + drastically changing data.
A_resampled -= 0.1
if a_min != a_max:
A_resampled *= ((a_max - a_min) / 0.8)
A_resampled += a_min
# if using NoNorm, cast back to the original datatype
if isinstance(self.norm, mcolors.NoNorm):
A_resampled = A_resampled.astype(A.dtype)
mask = np.empty(A.shape, dtype=np.float32)
if A.mask.shape == A.shape:
# this is the case of a nontrivial mask
mask[:] = np.where(A.mask, np.float32(np.nan),
np.float32(1))
else:
mask[:] = 1
# we always have to interpolate the mask to account for
# non-affine transformations
out_mask = np.zeros((out_height, out_width),
dtype=mask.dtype)
_image.resample(mask, out_mask,
t,
_interpd_[self.get_interpolation()],
True, 1,
self.get_filternorm() or 0.0,
self.get_filterrad() or 0.0)
# we are done with the mask, delete from namespace to be sure!
del mask
# Agg updates the out_mask in place. If the pixel has
# no image data it will not be updated (and still be 0
# as we initialized it), if input data that would go
# into that output pixel than it will be `nan`, if all
# the input data for a pixel is good it will be 1, and
# if there is _some_ good data in that output pixel it
# will be between [0, 1] (such as a rotated image).
out_alpha = np.array(out_mask)
out_mask = np.isnan(out_mask)
out_alpha[out_mask] = 1
# mask and run through the norm
output = self.norm(np.ma.masked_array(A_resampled, out_mask))
else:
# Always convert to RGBA, even if only RGB input
if A.shape[2] == 3:
A = _rgb_to_rgba(A)
elif A.shape[2] != 4:
raise ValueError("Invalid dimensions, got %s" % (A.shape,))
output = np.zeros((out_height, out_width, 4), dtype=A.dtype)
alpha = self.get_alpha()
if alpha is None:
alpha = 1.0
_image.resample(
A, output, t, _interpd_[self.get_interpolation()],
self.get_resample(), alpha,
self.get_filternorm() or 0.0, self.get_filterrad() or 0.0)
# at this point output is either a 2D array of normed data
# (of int or float)
# or an RGBA array of re-sampled input
output = self.to_rgba(output, bytes=True, norm=False)
# output is now a correctly sized RGBA array of uint8
# Apply alpha *after* if the input was greyscale without a mask
if A.ndim == 2:
alpha = self.get_alpha()
if alpha is None:
alpha = 1
alpha_channel = output[:, :, 3]
alpha_channel[:] = np.asarray(
np.asarray(alpha_channel, np.float32) * out_alpha * alpha,
np.uint8)
else:
if self._imcache is None:
self._imcache = self.to_rgba(A, bytes=True, norm=(A.ndim == 2))
output = self._imcache
# Subset the input image to only the part that will be
# displayed
subset = TransformedBbox(
clip_bbox, t0.frozen().inverted()).frozen()
output = output[
int(max(subset.ymin, 0)):
int(min(subset.ymax + 1, output.shape[0])),
int(max(subset.xmin, 0)):
int(min(subset.xmax + 1, output.shape[1]))]
t = Affine2D().translate(
int(max(subset.xmin, 0)), int(max(subset.ymin, 0))) + t
return output, clipped_bbox.x0, clipped_bbox.y0, t
def make_image(self, renderer, magnification=1.0, unsampled=False):
raise RuntimeError('The make_image method must be overridden.')
def _draw_unsampled_image(self, renderer, gc):
"""
draw unsampled image. The renderer should support a draw_image method
with scale parameter.
"""
im, l, b, trans = self.make_image(renderer, unsampled=True)
if im is None:
return
trans = Affine2D().scale(im.shape[1], im.shape[0]) + trans
renderer.draw_image(gc, l, b, im, trans)
def _check_unsampled_image(self, renderer):
"""
return True if the image is better to be drawn unsampled.
The derived class needs to override it.
"""
return False
@allow_rasterization
def draw(self, renderer, *args, **kwargs):
# if not visible, declare victory and return
if not self.get_visible():
self.stale = False
return
# for empty images, there is nothing to draw!
if self.get_array().size == 0:
self.stale = False
return
# actually render the image.
gc = renderer.new_gc()
self._set_gc_clip(gc)
gc.set_alpha(self.get_alpha())
gc.set_url(self.get_url())
gc.set_gid(self.get_gid())
if (self._check_unsampled_image(renderer) and
self.get_transform().is_affine):
self._draw_unsampled_image(renderer, gc)
else:
im, l, b, trans = self.make_image(
renderer, renderer.get_image_magnification())
if im is not None:
renderer.draw_image(gc, l, b, im)
gc.restore()
self.stale = False
def contains(self, mouseevent):
"""
Test whether the mouse event occurred within the image.
"""
if callable(self._contains):
return self._contains(self, mouseevent)
# TODO: make sure this is consistent with patch and patch
# collection on nonlinear transformed coordinates.
# TODO: consider returning image coordinates (shouldn't
# be too difficult given that the image is rectilinear
x, y = mouseevent.xdata, mouseevent.ydata
xmin, xmax, ymin, ymax = self.get_extent()
if xmin > xmax:
xmin, xmax = xmax, xmin
if ymin > ymax:
ymin, ymax = ymax, ymin
if x is not None and y is not None:
inside = (xmin <= x <= xmax) and (ymin <= y <= ymax)
else:
inside = False
return inside, {}
def write_png(self, fname):
"""Write the image to png file with fname"""
im = self.to_rgba(self._A[::-1] if self.origin == 'lower' else self._A,
bytes=True, norm=True)
_png.write_png(im, fname)
def set_data(self, A):
"""
Set the image array.
ACCEPTS: numpy/PIL Image A
Note that this function does *not* update the normalization used.
"""
# check if data is PIL Image without importing Image
if hasattr(A, 'getpixel'):
if A.mode == 'L':
# greyscale image, but our logic assumes rgba:
self._A = pil_to_array(A.convert('RGBA'))
else:
self._A = pil_to_array(A)
else:
self._A = cbook.safe_masked_invalid(A, copy=True)
if (self._A.dtype != np.uint8 and
not np.can_cast(self._A.dtype, float, "same_kind")):
raise TypeError("Image data cannot be converted to float")
if not (self._A.ndim == 2
or self._A.ndim == 3 and self._A.shape[-1] in [3, 4]):
raise TypeError("Invalid dimensions for image data")
if self._A.ndim == 3:
# If the input data has values outside the valid range (after
# normalisation), we issue a warning and then clip X to the bounds
# - otherwise casting wraps extreme values, hiding outliers and
# making reliable interpretation impossible.
high = 255 if np.issubdtype(self._A.dtype, np.integer) else 1
if self._A.min() < 0 or high < self._A.max():
_log.warning(
'Clipping input data to the valid range for imshow with '
'RGB data ([0..1] for floats or [0..255] for integers).'
)
self._A = np.clip(self._A, 0, high)
# Cast unsupported integer types to uint8
if self._A.dtype != np.uint8 and np.issubdtype(self._A.dtype,
np.integer):
self._A = self._A.astype(np.uint8)
self._imcache = None
self._rgbacache = None
self.stale = True
def set_array(self, A):
"""
Retained for backwards compatibility - use set_data instead
ACCEPTS: numpy array A or PIL Image
"""
# This also needs to be here to override the inherited
# cm.ScalarMappable.set_array method so it is not invoked
# by mistake.
self.set_data(A)
def get_interpolation(self):
"""
Return the interpolation method the image uses when resizing.
One of 'nearest', 'bilinear', 'bicubic', 'spline16', 'spline36',
'hanning', 'hamming', 'hermite', 'kaiser', 'quadric', 'catrom',
'gaussian', 'bessel', 'mitchell', 'sinc', 'lanczos', or 'none'.
"""
return self._interpolation
def set_interpolation(self, s):
"""
Set the interpolation method the image uses when resizing.
if None, use a value from rc setting. If 'none', the image is
shown as is without interpolating. 'none' is only supported in
agg, ps and pdf backends and will fall back to 'nearest' mode
for other backends.
.. ACCEPTS: ['nearest' | 'bilinear' | 'bicubic' | 'spline16' |
'spline36' | 'hanning' | 'hamming' | 'hermite' | 'kaiser' |
'quadric' | 'catrom' | 'gaussian' | 'bessel' | 'mitchell' |
'sinc' | 'lanczos' | 'none' ]
"""
if s is None:
s = rcParams['image.interpolation']
s = s.lower()
if s not in _interpd_:
raise ValueError('Illegal interpolation string')
self._interpolation = s
self.stale = True
def can_composite(self):
"""
Returns `True` if the image can be composited with its neighbors.
"""
trans = self.get_transform()
return (
self._interpolation != 'none' and
trans.is_affine and
trans.is_separable)
def set_resample(self, v):
"""
Set whether or not image resampling is used.
ACCEPTS: True|False
"""
if v is None:
v = rcParams['image.resample']
self._resample = v
self.stale = True
def get_resample(self):
"""Return the image resample boolean."""
return self._resample
def set_filternorm(self, filternorm):
"""
Set whether the resize filter norms the weights -- see
help for imshow
ACCEPTS: 0 or 1
"""
if filternorm:
self._filternorm = 1
else:
self._filternorm = 0
self.stale = True
def get_filternorm(self):
"""Return the filternorm setting."""
return self._filternorm
def set_filterrad(self, filterrad):
"""
Set the resize filter radius only applicable to some
interpolation schemes -- see help for imshow
ACCEPTS: positive float
"""
r = float(filterrad)
if r <= 0:
raise ValueError("The filter radius must be a positive number")
self._filterrad = r
self.stale = True
def get_filterrad(self):
"""Return the filterrad setting."""
return self._filterrad
class AxesImage(_ImageBase):
def __str__(self):
return "AxesImage(%g,%g;%gx%g)" % tuple(self.axes.bbox.bounds)
def __init__(self, ax,
cmap=None,
norm=None,
interpolation=None,
origin=None,
extent=None,
filternorm=1,
filterrad=4.0,
resample=False,
**kwargs
):
"""
interpolation and cmap default to their rc settings
cmap is a colors.Colormap instance
norm is a colors.Normalize instance to map luminance to 0-1
extent is data axes (left, right, bottom, top) for making image plots
registered with data plots. Default is to label the pixel
centers with the zero-based row and column indices.
Additional kwargs are matplotlib.artist properties
"""
self._extent = extent
super(AxesImage, self).__init__(
ax,
cmap=cmap,
norm=norm,
interpolation=interpolation,
origin=origin,
filternorm=filternorm,
filterrad=filterrad,
resample=resample,
**kwargs
)
def get_window_extent(self, renderer=None):
x0, x1, y0, y1 = self._extent
bbox = Bbox.from_extents([x0, y0, x1, y1])
return bbox.transformed(self.axes.transData)
def make_image(self, renderer, magnification=1.0, unsampled=False):
trans = self.get_transform()
# image is created in the canvas coordinate.
x1, x2, y1, y2 = self.get_extent()
bbox = Bbox(np.array([[x1, y1], [x2, y2]]))
transformed_bbox = TransformedBbox(bbox, trans)
return self._make_image(
self._A, bbox, transformed_bbox, self.axes.bbox, magnification,
unsampled=unsampled)
def _check_unsampled_image(self, renderer):
"""
Return whether the image would be better drawn unsampled.
"""
return (self.get_interpolation() == "none"
and renderer.option_scale_image())
def set_extent(self, extent):
"""
extent is data axes (left, right, bottom, top) for making image plots
This updates ax.dataLim, and, if autoscaling, sets viewLim
to tightly fit the image, regardless of dataLim. Autoscaling
state is not changed, so following this with ax.autoscale_view
will redo the autoscaling in accord with dataLim.
"""
self._extent = xmin, xmax, ymin, ymax = extent
corners = (xmin, ymin), (xmax, ymax)
self.axes.update_datalim(corners)
self.sticky_edges.x[:] = [xmin, xmax]
self.sticky_edges.y[:] = [ymin, ymax]
if self.axes._autoscaleXon:
self.axes.set_xlim((xmin, xmax), auto=None)
if self.axes._autoscaleYon:
self.axes.set_ylim((ymin, ymax), auto=None)
self.stale = True
def get_extent(self):
"""Get the image extent: left, right, bottom, top"""
if self._extent is not None:
return self._extent
else:
sz = self.get_size()
numrows, numcols = sz
if self.origin == 'upper':
return (-0.5, numcols-0.5, numrows-0.5, -0.5)
else:
return (-0.5, numcols-0.5, -0.5, numrows-0.5)
def get_cursor_data(self, event):
"""Get the cursor data for a given event"""
xmin, xmax, ymin, ymax = self.get_extent()
if self.origin == 'upper':
ymin, ymax = ymax, ymin
arr = self.get_array()
data_extent = Bbox([[ymin, xmin], [ymax, xmax]])
array_extent = Bbox([[0, 0], arr.shape[:2]])
trans = BboxTransform(boxin=data_extent, boxout=array_extent)
y, x = event.ydata, event.xdata
point = trans.transform_point([y, x])
if any(np.isnan(point)):
return None
i, j = point.astype(int)
# Clip the coordinates at array bounds
if not (0 <= i < arr.shape[0]) or not (0 <= j < arr.shape[1]):
return None
else:
return arr[i, j]
class NonUniformImage(AxesImage):
def __init__(self, ax, **kwargs):
"""
kwargs are identical to those for AxesImage, except
that 'nearest' and 'bilinear' are the only supported 'interpolation'
options.
"""
interp = kwargs.pop('interpolation', 'nearest')
super(NonUniformImage, self).__init__(ax, **kwargs)
self.set_interpolation(interp)
def _check_unsampled_image(self, renderer):
"""
return False. Do not use unsampled image.
"""
return False
def make_image(self, renderer, magnification=1.0, unsampled=False):
if self._A is None:
raise RuntimeError('You must first set the image array')
if unsampled:
raise ValueError('unsampled not supported on NonUniformImage')
A = self._A
if A.ndim == 2:
if A.dtype != np.uint8:
A = self.to_rgba(A, bytes=True)
self.is_grayscale = self.cmap.is_gray()
else:
A = np.repeat(A[:, :, np.newaxis], 4, 2)
A[:, :, 3] = 255
self.is_grayscale = True
else:
if A.dtype != np.uint8:
A = (255*A).astype(np.uint8)
if A.shape[2] == 3:
B = np.zeros(tuple(list(A.shape[0:2]) + [4]), np.uint8)
B[:, :, 0:3] = A
B[:, :, 3] = 255
A = B
self.is_grayscale = False
x0, y0, v_width, v_height = self.axes.viewLim.bounds
l, b, r, t = self.axes.bbox.extents
width = (np.round(r) + 0.5) - (np.round(l) - 0.5)
height = (np.round(t) + 0.5) - (np.round(b) - 0.5)
width *= magnification
height *= magnification
im = _image.pcolor(self._Ax, self._Ay, A,
int(height), int(width),
(x0, x0+v_width, y0, y0+v_height),
_interpd_[self._interpolation])
return im, l, b, IdentityTransform()
def set_data(self, x, y, A):
"""
Set the grid for the pixel centers, and the pixel values.
*x* and *y* are monotonic 1-D ndarrays of lengths N and M,
respectively, specifying pixel centers
*A* is an (M,N) ndarray or masked array of values to be
colormapped, or a (M,N,3) RGB array, or a (M,N,4) RGBA
array.
"""
x = np.array(x, np.float32)
y = np.array(y, np.float32)
A = cbook.safe_masked_invalid(A, copy=True)
if not (x.ndim == y.ndim == 1 and A.shape[0:2] == y.shape + x.shape):
raise TypeError("Axes don't match array shape")
if A.ndim not in [2, 3]:
raise TypeError("Can only plot 2D or 3D data")
if A.ndim == 3 and A.shape[2] not in [1, 3, 4]:
raise TypeError("3D arrays must have three (RGB) "
"or four (RGBA) color components")
if A.ndim == 3 and A.shape[2] == 1:
A.shape = A.shape[0:2]
self._A = A
self._Ax = x
self._Ay = y
self._imcache = None
self.stale = True
def set_array(self, *args):
raise NotImplementedError('Method not supported')
def set_interpolation(self, s):
"""
Parameters
----------
s : str, None
Either 'nearest', 'bilinear', or ``None``.
"""
if s is not None and s not in ('nearest', 'bilinear'):
raise NotImplementedError('Only nearest neighbor and '
'bilinear interpolations are supported')
AxesImage.set_interpolation(self, s)
def get_extent(self):
if self._A is None:
raise RuntimeError('Must set data first')
return self._Ax[0], self._Ax[-1], self._Ay[0], self._Ay[-1]
def set_filternorm(self, s):
pass
def set_filterrad(self, s):
pass
def set_norm(self, norm):
if self._A is not None:
raise RuntimeError('Cannot change colors after loading data')
super(NonUniformImage, self).set_norm(norm)
def set_cmap(self, cmap):
if self._A is not None:
raise RuntimeError('Cannot change colors after loading data')
super(NonUniformImage, self).set_cmap(cmap)
class PcolorImage(AxesImage):
"""
Make a pcolor-style plot with an irregular rectangular grid.
This uses a variation of the original irregular image code,
and it is used by pcolorfast for the corresponding grid type.
"""
def __init__(self, ax,
x=None,
y=None,
A=None,
cmap=None,
norm=None,
**kwargs
):
"""
cmap defaults to its rc setting
cmap is a colors.Colormap instance
norm is a colors.Normalize instance to map luminance to 0-1
Additional kwargs are matplotlib.artist properties
"""
super(PcolorImage, self).__init__(ax, norm=norm, cmap=cmap)
self.update(kwargs)
if A is not None:
self.set_data(x, y, A)
def make_image(self, renderer, magnification=1.0, unsampled=False):
if self._A is None:
raise RuntimeError('You must first set the image array')
if unsampled:
raise ValueError('unsampled not supported on PColorImage')
fc = self.axes.patch.get_facecolor()
bg = mcolors.to_rgba(fc, 0)
bg = (np.array(bg)*255).astype(np.uint8)
l, b, r, t = self.axes.bbox.extents
width = (np.round(r) + 0.5) - (np.round(l) - 0.5)
height = (np.round(t) + 0.5) - (np.round(b) - 0.5)
# The extra cast-to-int is only needed for python2
width = int(np.round(width * magnification))
height = int(np.round(height * magnification))
if self._rgbacache is None:
A = self.to_rgba(self._A, bytes=True)
self._rgbacache = A
if self._A.ndim == 2:
self.is_grayscale = self.cmap.is_gray()
else:
A = self._rgbacache
vl = self.axes.viewLim
im = _image.pcolor2(self._Ax, self._Ay, A,
height,
width,
(vl.x0, vl.x1, vl.y0, vl.y1),
bg)
return im, l, b, IdentityTransform()
def _check_unsampled_image(self, renderer):
return False
def set_data(self, x, y, A):
"""
Set the grid for the rectangle boundaries, and the data values.
*x* and *y* are monotonic 1-D ndarrays of lengths N+1 and M+1,
respectively, specifying rectangle boundaries. If None,
they will be created as uniform arrays from 0 through N
and 0 through M, respectively.
*A* is an (M,N) ndarray or masked array of values to be
colormapped, or a (M,N,3) RGB array, or a (M,N,4) RGBA
array.
"""
A = cbook.safe_masked_invalid(A, copy=True)
if x is None:
x = np.arange(0, A.shape[1]+1, dtype=np.float64)
else:
x = np.array(x, np.float64).ravel()
if y is None:
y = np.arange(0, A.shape[0]+1, dtype=np.float64)
else:
y = np.array(y, np.float64).ravel()
if A.shape[:2] != (y.size-1, x.size-1):
raise ValueError(
"Axes don't match array shape. Got %s, expected %s." %
(A.shape[:2], (y.size - 1, x.size - 1)))
if A.ndim not in [2, 3]:
raise ValueError("A must be 2D or 3D")
if A.ndim == 3 and A.shape[2] == 1:
A.shape = A.shape[:2]
self.is_grayscale = False
if A.ndim == 3:
if A.shape[2] in [3, 4]:
if ((A[:, :, 0] == A[:, :, 1]).all() and
(A[:, :, 0] == A[:, :, 2]).all()):
self.is_grayscale = True
else:
raise ValueError("3D arrays must have RGB or RGBA as last dim")
# For efficient cursor readout, ensure x and y are increasing.
if x[-1] < x[0]:
x = x[::-1]
A = A[:, ::-1]
if y[-1] < y[0]:
y = y[::-1]
A = A[::-1]
self._A = A
self._Ax = x
self._Ay = y
self._rgbacache = None
self.stale = True
def set_array(self, *args):
raise NotImplementedError('Method not supported')
def get_cursor_data(self, event):
"""Get the cursor data for a given event"""
x, y = event.xdata, event.ydata
if (x < self._Ax[0] or x > self._Ax[-1] or
y < self._Ay[0] or y > self._Ay[-1]):
return None
j = np.searchsorted(self._Ax, x) - 1
i = np.searchsorted(self._Ay, y) - 1
try:
return self._A[i, j]
except IndexError:
return None
class FigureImage(_ImageBase):
zorder = 0
_interpolation = 'nearest'
def __init__(self, fig,
cmap=None,
norm=None,
offsetx=0,
offsety=0,
origin=None,
**kwargs
):
"""
cmap is a colors.Colormap instance
norm is a colors.Normalize instance to map luminance to 0-1
kwargs are an optional list of Artist keyword args
"""
super(FigureImage, self).__init__(
None,
norm=norm,
cmap=cmap,
origin=origin
)
self.figure = fig
self.ox = offsetx
self.oy = offsety
self.update(kwargs)
self.magnification = 1.0
def get_extent(self):
"""Get the image extent: left, right, bottom, top"""
numrows, numcols = self.get_size()
return (-0.5 + self.ox, numcols-0.5 + self.ox,
-0.5 + self.oy, numrows-0.5 + self.oy)
def make_image(self, renderer, magnification=1.0, unsampled=False):
fac = renderer.dpi/self.figure.dpi
# fac here is to account for pdf, eps, svg backends where
# figure.dpi is set to 72. This means we need to scale the
# image (using magification) and offset it appropriately.
bbox = Bbox([[self.ox/fac, self.oy/fac],
[(self.ox/fac + self._A.shape[1]),
(self.oy/fac + self._A.shape[0])]])
width, height = self.figure.get_size_inches()
width *= renderer.dpi
height *= renderer.dpi
clip = Bbox([[0, 0], [width, height]])
return self._make_image(
self._A, bbox, bbox, clip, magnification=magnification / fac,
unsampled=unsampled, round_to_pixel_border=False)
def set_data(self, A):
"""Set the image array."""
cm.ScalarMappable.set_array(self,
cbook.safe_masked_invalid(A, copy=True))
self.stale = True
class BboxImage(_ImageBase):
"""The Image class whose size is determined by the given bbox."""
def __init__(self, bbox,
cmap=None,
norm=None,
interpolation=None,
origin=None,
filternorm=1,
filterrad=4.0,
resample=False,
interp_at_native=True,
**kwargs
):
"""
cmap is a colors.Colormap instance
norm is a colors.Normalize instance to map luminance to 0-1
interp_at_native is a flag that determines whether or not
interpolation should still be applied when the image is
displayed at its native resolution. A common use case for this
is when displaying an image for annotational purposes; it is
treated similarly to Photoshop (interpolation is only used when
displaying the image at non-native resolutions).
kwargs are an optional list of Artist keyword args
"""
super(BboxImage, self).__init__(
None,
cmap=cmap,
norm=norm,
interpolation=interpolation,
origin=origin,
filternorm=filternorm,
filterrad=filterrad,
resample=resample,
**kwargs
)
self.bbox = bbox
self.interp_at_native = interp_at_native
self._transform = IdentityTransform()
def get_transform(self):
return self._transform
def get_window_extent(self, renderer=None):
if renderer is None:
renderer = self.get_figure()._cachedRenderer
if isinstance(self.bbox, BboxBase):
return self.bbox
elif callable(self.bbox):
return self.bbox(renderer)
else:
raise ValueError("unknown type of bbox")
def contains(self, mouseevent):
"""Test whether the mouse event occurred within the image."""
if callable(self._contains):
return self._contains(self, mouseevent)
if not self.get_visible(): # or self.get_figure()._renderer is None:
return False, {}
x, y = mouseevent.x, mouseevent.y
inside = self.get_window_extent().contains(x, y)
return inside, {}
def make_image(self, renderer, magnification=1.0, unsampled=False):
width, height = renderer.get_canvas_width_height()
bbox_in = self.get_window_extent(renderer).frozen()
bbox_in._points /= [width, height]
bbox_out = self.get_window_extent(renderer)
clip = Bbox([[0, 0], [width, height]])
self._transform = BboxTransform(Bbox([[0, 0], [1, 1]]), clip)
return self._make_image(
self._A,
bbox_in, bbox_out, clip, magnification, unsampled=unsampled)
def imread(fname, format=None):
"""
Read an image from a file into an array.
*fname* may be a string path, a valid URL, or a Python
file-like object. If using a file object, it must be opened in binary
mode.
If *format* is provided, will try to read file of that type,
otherwise the format is deduced from the filename. If nothing can
be deduced, PNG is tried.
Return value is a :class:`numpy.array`. For grayscale images, the
return array is MxN. For RGB images, the return value is MxNx3.
For RGBA images the return value is MxNx4.
matplotlib can only read PNGs natively, but if `PIL
<http://www.pythonware.com/products/pil/>`_ is installed, it will
use it to load the image and return an array (if possible) which
can be used with :func:`~matplotlib.pyplot.imshow`. Note, URL strings
may not be compatible with PIL. Check the PIL documentation for more
information.
"""
def pilread(fname):
"""try to load the image with PIL or return None"""
try:
from PIL import Image
except ImportError:
return None
with Image.open(fname) as image:
return pil_to_array(image)
handlers = {'png': _png.read_png, }
if format is None:
if isinstance(fname, six.string_types):
parsed = urlparse(fname)
# If the string is a URL, assume png
if len(parsed.scheme) > 1:
ext = 'png'
else:
basename, ext = os.path.splitext(fname)
ext = ext.lower()[1:]
elif hasattr(fname, 'name'):
basename, ext = os.path.splitext(fname.name)
ext = ext.lower()[1:]
else:
ext = 'png'
else:
ext = format
if ext not in handlers:
im = pilread(fname)
if im is None:
raise ValueError('Only know how to handle extensions: %s; '
'with Pillow installed matplotlib can handle '
'more images' % list(handlers))
return im
handler = handlers[ext]
# To handle Unicode filenames, we pass a file object to the PNG
# reader extension, since Python handles them quite well, but it's
# tricky in C.
if isinstance(fname, six.string_types):
parsed = urlparse(fname)
# If fname is a URL, download the data
if len(parsed.scheme) > 1:
fd = BytesIO(urlopen(fname).read())
return handler(fd)
else:
with open(fname, 'rb') as fd:
return handler(fd)
else:
return handler(fname)
def imsave(fname, arr, vmin=None, vmax=None, cmap=None, format=None,
origin=None, dpi=100):
"""
Save an array as in image file.
The output formats available depend on the backend being used.
Parameters
----------
fname : str or file-like
Path string to a filename, or a Python file-like object.
If *format* is *None* and *fname* is a string, the output
format is deduced from the extension of the filename.
arr : array-like
An MxN (luminance), MxNx3 (RGB) or MxNx4 (RGBA) array.
vmin, vmax: [ None | scalar ]
*vmin* and *vmax* set the color scaling for the image by fixing the
values that map to the colormap color limits. If either *vmin*
or *vmax* is None, that limit is determined from the *arr*
min/max value.
cmap : matplotlib.colors.Colormap, optional
For example, ``cm.viridis``. If ``None``, defaults to the
``image.cmap`` rcParam.
format : str
One of the file extensions supported by the active backend. Most
backends support png, pdf, ps, eps and svg.
origin : [ 'upper' | 'lower' ]
Indicates whether the ``(0, 0)`` index of the array is in the
upper left or lower left corner of the axes. Defaults to the
``image.origin`` rcParam.
dpi : int
The DPI to store in the metadata of the file. This does not affect the
resolution of the output image.
"""
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
if isinstance(fname, getattr(os, "PathLike", ())):
fname = os.fspath(fname)
if (format == 'png'
or (format is None
and isinstance(fname, six.string_types)
and fname.lower().endswith('.png'))):
image = AxesImage(None, cmap=cmap, origin=origin)
image.set_data(arr)
image.set_clim(vmin, vmax)
image.write_png(fname)
else:
fig = Figure(dpi=dpi, frameon=False)
FigureCanvas(fig)
fig.figimage(arr, cmap=cmap, vmin=vmin, vmax=vmax, origin=origin,
resize=True)
fig.savefig(fname, dpi=dpi, format=format, transparent=True)
def pil_to_array(pilImage):
"""Load a PIL image and return it as a numpy array.
Grayscale images are returned as ``(M, N)`` arrays. RGB images are
returned as ``(M, N, 3)`` arrays. RGBA images are returned as ``(M, N,
4)`` arrays.
"""
if pilImage.mode in ['RGBA', 'RGBX', 'RGB', 'L']:
# return MxNx4 RGBA, MxNx3 RBA, or MxN luminance array
return np.asarray(pilImage)
elif pilImage.mode.startswith('I;16'):
# return MxN luminance array of uint16
raw = pilImage.tobytes('raw', pilImage.mode)
if pilImage.mode.endswith('B'):
x = np.fromstring(raw, '>u2')
else:
x = np.fromstring(raw, '<u2')
return x.reshape(pilImage.size[::-1]).astype('=u2')
else: # try to convert to an rgba image
try:
pilImage = pilImage.convert('RGBA')
except ValueError:
raise RuntimeError('Unknown image mode')
return np.asarray(pilImage) # return MxNx4 RGBA array
def thumbnail(infile, thumbfile, scale=0.1, interpolation='bilinear',
preview=False):
"""
make a thumbnail of image in *infile* with output filename
*thumbfile*.
*infile* the image file -- must be PNG or Pillow-readable if you
have `Pillow <http://python-pillow.org/>`_ installed
*thumbfile*
the thumbnail filename
*scale*
the scale factor for the thumbnail
*interpolation*
the interpolation scheme used in the resampling
*preview*
if True, the default backend (presumably a user interface
backend) will be used which will cause a figure to be raised
if :func:`~matplotlib.pyplot.show` is called. If it is False,
a pure image backend will be used depending on the extension,
'png'->FigureCanvasAgg, 'pdf'->FigureCanvasPdf,
'svg'->FigureCanvasSVG
See examples/misc/image_thumbnail.py.
.. htmlonly::
:ref:`sphx_glr_gallery_misc_image_thumbnail_sgskip.py`
Return value is the figure instance containing the thumbnail
"""
basedir, basename = os.path.split(infile)
baseout, extout = os.path.splitext(thumbfile)
im = imread(infile)
rows, cols, depth = im.shape
# this doesn't really matter, it will cancel in the end, but we
# need it for the mpl API
dpi = 100
height = rows / dpi * scale
width = cols / dpi * scale
extension = extout.lower()
if preview:
# let the UI backend do everything
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(width, height), dpi=dpi)
else:
if extension == '.png':
from matplotlib.backends.backend_agg \
import FigureCanvasAgg as FigureCanvas
elif extension == '.pdf':
from matplotlib.backends.backend_pdf \
import FigureCanvasPdf as FigureCanvas
elif extension == '.svg':
from matplotlib.backends.backend_svg \
import FigureCanvasSVG as FigureCanvas
else:
raise ValueError("Can only handle "
"extensions 'png', 'svg' or 'pdf'")
from matplotlib.figure import Figure
fig = Figure(figsize=(width, height), dpi=dpi)
FigureCanvas(fig)
ax = fig.add_axes([0, 0, 1, 1], aspect='auto',
frameon=False, xticks=[], yticks=[])
basename, ext = os.path.splitext(basename)
ax.imshow(im, aspect='auto', resample=True, interpolation=interpolation)
fig.savefig(thumbfile, dpi=dpi)
return fig
| 55,141 | 34.760052 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/_mathtext_data.py
|
"""
font data tables for truetype and afm computer modern fonts
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
latex_to_bakoma = {
'\\__sqrt__' : ('cmex10', 0x70),
'\\bigcap' : ('cmex10', 0x5c),
'\\bigcup' : ('cmex10', 0x5b),
'\\bigodot' : ('cmex10', 0x4b),
'\\bigoplus' : ('cmex10', 0x4d),
'\\bigotimes' : ('cmex10', 0x4f),
'\\biguplus' : ('cmex10', 0x5d),
'\\bigvee' : ('cmex10', 0x5f),
'\\bigwedge' : ('cmex10', 0x5e),
'\\coprod' : ('cmex10', 0x61),
'\\int' : ('cmex10', 0x5a),
'\\langle' : ('cmex10', 0xad),
'\\leftangle' : ('cmex10', 0xad),
'\\leftbrace' : ('cmex10', 0xa9),
'\\oint' : ('cmex10', 0x49),
'\\prod' : ('cmex10', 0x59),
'\\rangle' : ('cmex10', 0xae),
'\\rightangle' : ('cmex10', 0xae),
'\\rightbrace' : ('cmex10', 0xaa),
'\\sum' : ('cmex10', 0x58),
'\\widehat' : ('cmex10', 0x62),
'\\widetilde' : ('cmex10', 0x65),
'\\{' : ('cmex10', 0xa9),
'\\}' : ('cmex10', 0xaa),
'{' : ('cmex10', 0xa9),
'}' : ('cmex10', 0xaa),
',' : ('cmmi10', 0x3b),
'.' : ('cmmi10', 0x3a),
'/' : ('cmmi10', 0x3d),
'<' : ('cmmi10', 0x3c),
'>' : ('cmmi10', 0x3e),
'\\alpha' : ('cmmi10', 0xae),
'\\beta' : ('cmmi10', 0xaf),
'\\chi' : ('cmmi10', 0xc2),
'\\combiningrightarrowabove' : ('cmmi10', 0x7e),
'\\delta' : ('cmmi10', 0xb1),
'\\ell' : ('cmmi10', 0x60),
'\\epsilon' : ('cmmi10', 0xb2),
'\\eta' : ('cmmi10', 0xb4),
'\\flat' : ('cmmi10', 0x5b),
'\\frown' : ('cmmi10', 0x5f),
'\\gamma' : ('cmmi10', 0xb0),
'\\imath' : ('cmmi10', 0x7b),
'\\iota' : ('cmmi10', 0xb6),
'\\jmath' : ('cmmi10', 0x7c),
'\\kappa' : ('cmmi10', 0x2219),
'\\lambda' : ('cmmi10', 0xb8),
'\\leftharpoondown' : ('cmmi10', 0x29),
'\\leftharpoonup' : ('cmmi10', 0x28),
'\\mu' : ('cmmi10', 0xb9),
'\\natural' : ('cmmi10', 0x5c),
'\\nu' : ('cmmi10', 0xba),
'\\omega' : ('cmmi10', 0x21),
'\\phi' : ('cmmi10', 0xc1),
'\\pi' : ('cmmi10', 0xbc),
'\\psi' : ('cmmi10', 0xc3),
'\\rho' : ('cmmi10', 0xbd),
'\\rightharpoondown' : ('cmmi10', 0x2b),
'\\rightharpoonup' : ('cmmi10', 0x2a),
'\\sharp' : ('cmmi10', 0x5d),
'\\sigma' : ('cmmi10', 0xbe),
'\\smile' : ('cmmi10', 0x5e),
'\\tau' : ('cmmi10', 0xbf),
'\\theta' : ('cmmi10', 0xb5),
'\\triangleleft' : ('cmmi10', 0x2f),
'\\triangleright' : ('cmmi10', 0x2e),
'\\upsilon' : ('cmmi10', 0xc0),
'\\varepsilon' : ('cmmi10', 0x22),
'\\varphi' : ('cmmi10', 0x27),
'\\varrho' : ('cmmi10', 0x25),
'\\varsigma' : ('cmmi10', 0x26),
'\\vartheta' : ('cmmi10', 0x23),
'\\wp' : ('cmmi10', 0x7d),
'\\xi' : ('cmmi10', 0xbb),
'\\zeta' : ('cmmi10', 0xb3),
'!' : ('cmr10', 0x21),
'%' : ('cmr10', 0x25),
'&' : ('cmr10', 0x26),
'(' : ('cmr10', 0x28),
')' : ('cmr10', 0x29),
'+' : ('cmr10', 0x2b),
'0' : ('cmr10', 0x30),
'1' : ('cmr10', 0x31),
'2' : ('cmr10', 0x32),
'3' : ('cmr10', 0x33),
'4' : ('cmr10', 0x34),
'5' : ('cmr10', 0x35),
'6' : ('cmr10', 0x36),
'7' : ('cmr10', 0x37),
'8' : ('cmr10', 0x38),
'9' : ('cmr10', 0x39),
':' : ('cmr10', 0x3a),
';' : ('cmr10', 0x3b),
'=' : ('cmr10', 0x3d),
'?' : ('cmr10', 0x3f),
'@' : ('cmr10', 0x40),
'[' : ('cmr10', 0x5b),
'\\#' : ('cmr10', 0x23),
'\\$' : ('cmr10', 0x24),
'\\%' : ('cmr10', 0x25),
'\\Delta' : ('cmr10', 0xa2),
'\\Gamma' : ('cmr10', 0xa1),
'\\Lambda' : ('cmr10', 0xa4),
'\\Omega' : ('cmr10', 0xad),
'\\Phi' : ('cmr10', 0xa9),
'\\Pi' : ('cmr10', 0xa6),
'\\Psi' : ('cmr10', 0xaa),
'\\Sigma' : ('cmr10', 0xa7),
'\\Theta' : ('cmr10', 0xa3),
'\\Upsilon' : ('cmr10', 0xa8),
'\\Xi' : ('cmr10', 0xa5),
'\\circumflexaccent' : ('cmr10', 0x5e),
'\\combiningacuteaccent' : ('cmr10', 0xb6),
'\\combiningbreve' : ('cmr10', 0xb8),
'\\combiningdiaeresis' : ('cmr10', 0xc4),
'\\combiningdotabove' : ('cmr10', 0x5f),
'\\combininggraveaccent' : ('cmr10', 0xb5),
'\\combiningoverline' : ('cmr10', 0xb9),
'\\combiningtilde' : ('cmr10', 0x7e),
'\\leftbracket' : ('cmr10', 0x5b),
'\\leftparen' : ('cmr10', 0x28),
'\\rightbracket' : ('cmr10', 0x5d),
'\\rightparen' : ('cmr10', 0x29),
'\\widebar' : ('cmr10', 0xb9),
']' : ('cmr10', 0x5d),
'*' : ('cmsy10', 0xa4),
'-' : ('cmsy10', 0xa1),
'\\Downarrow' : ('cmsy10', 0x2b),
'\\Im' : ('cmsy10', 0x3d),
'\\Leftarrow' : ('cmsy10', 0x28),
'\\Leftrightarrow' : ('cmsy10', 0x2c),
'\\P' : ('cmsy10', 0x7b),
'\\Re' : ('cmsy10', 0x3c),
'\\Rightarrow' : ('cmsy10', 0x29),
'\\S' : ('cmsy10', 0x78),
'\\Uparrow' : ('cmsy10', 0x2a),
'\\Updownarrow' : ('cmsy10', 0x6d),
'\\Vert' : ('cmsy10', 0x6b),
'\\aleph' : ('cmsy10', 0x40),
'\\approx' : ('cmsy10', 0xbc),
'\\ast' : ('cmsy10', 0xa4),
'\\asymp' : ('cmsy10', 0xb3),
'\\backslash' : ('cmsy10', 0x6e),
'\\bigcirc' : ('cmsy10', 0xb0),
'\\bigtriangledown' : ('cmsy10', 0x35),
'\\bigtriangleup' : ('cmsy10', 0x34),
'\\bot' : ('cmsy10', 0x3f),
'\\bullet' : ('cmsy10', 0xb2),
'\\cap' : ('cmsy10', 0x5c),
'\\cdot' : ('cmsy10', 0xa2),
'\\circ' : ('cmsy10', 0xb1),
'\\clubsuit' : ('cmsy10', 0x7c),
'\\cup' : ('cmsy10', 0x5b),
'\\dag' : ('cmsy10', 0x79),
'\\dashv' : ('cmsy10', 0x61),
'\\ddag' : ('cmsy10', 0x7a),
'\\diamond' : ('cmsy10', 0xa6),
'\\diamondsuit' : ('cmsy10', 0x7d),
'\\div' : ('cmsy10', 0xa5),
'\\downarrow' : ('cmsy10', 0x23),
'\\emptyset' : ('cmsy10', 0x3b),
'\\equiv' : ('cmsy10', 0xb4),
'\\exists' : ('cmsy10', 0x39),
'\\forall' : ('cmsy10', 0x38),
'\\geq' : ('cmsy10', 0xb8),
'\\gg' : ('cmsy10', 0xc0),
'\\heartsuit' : ('cmsy10', 0x7e),
'\\in' : ('cmsy10', 0x32),
'\\infty' : ('cmsy10', 0x31),
'\\lbrace' : ('cmsy10', 0x66),
'\\lceil' : ('cmsy10', 0x64),
'\\leftarrow' : ('cmsy10', 0xc3),
'\\leftrightarrow' : ('cmsy10', 0x24),
'\\leq' : ('cmsy10', 0x2219),
'\\lfloor' : ('cmsy10', 0x62),
'\\ll' : ('cmsy10', 0xbf),
'\\mid' : ('cmsy10', 0x6a),
'\\mp' : ('cmsy10', 0xa8),
'\\nabla' : ('cmsy10', 0x72),
'\\nearrow' : ('cmsy10', 0x25),
'\\neg' : ('cmsy10', 0x3a),
'\\ni' : ('cmsy10', 0x33),
'\\nwarrow' : ('cmsy10', 0x2d),
'\\odot' : ('cmsy10', 0xaf),
'\\ominus' : ('cmsy10', 0xaa),
'\\oplus' : ('cmsy10', 0xa9),
'\\oslash' : ('cmsy10', 0xae),
'\\otimes' : ('cmsy10', 0xad),
'\\pm' : ('cmsy10', 0xa7),
'\\prec' : ('cmsy10', 0xc1),
'\\preceq' : ('cmsy10', 0xb9),
'\\prime' : ('cmsy10', 0x30),
'\\propto' : ('cmsy10', 0x2f),
'\\rbrace' : ('cmsy10', 0x67),
'\\rceil' : ('cmsy10', 0x65),
'\\rfloor' : ('cmsy10', 0x63),
'\\rightarrow' : ('cmsy10', 0x21),
'\\searrow' : ('cmsy10', 0x26),
'\\sim' : ('cmsy10', 0xbb),
'\\simeq' : ('cmsy10', 0x27),
'\\slash' : ('cmsy10', 0x36),
'\\spadesuit' : ('cmsy10', 0xc4),
'\\sqcap' : ('cmsy10', 0x75),
'\\sqcup' : ('cmsy10', 0x74),
'\\sqsubseteq' : ('cmsy10', 0x76),
'\\sqsupseteq' : ('cmsy10', 0x77),
'\\subset' : ('cmsy10', 0xbd),
'\\subseteq' : ('cmsy10', 0xb5),
'\\succ' : ('cmsy10', 0xc2),
'\\succeq' : ('cmsy10', 0xba),
'\\supset' : ('cmsy10', 0xbe),
'\\supseteq' : ('cmsy10', 0xb6),
'\\swarrow' : ('cmsy10', 0x2e),
'\\times' : ('cmsy10', 0xa3),
'\\to' : ('cmsy10', 0x21),
'\\top' : ('cmsy10', 0x3e),
'\\uparrow' : ('cmsy10', 0x22),
'\\updownarrow' : ('cmsy10', 0x6c),
'\\uplus' : ('cmsy10', 0x5d),
'\\vdash' : ('cmsy10', 0x60),
'\\vee' : ('cmsy10', 0x5f),
'\\vert' : ('cmsy10', 0x6a),
'\\wedge' : ('cmsy10', 0x5e),
'\\wr' : ('cmsy10', 0x6f),
'\\|' : ('cmsy10', 0x6b),
'|' : ('cmsy10', 0x6a),
'\\_' : ('cmtt10', 0x5f)
}
latex_to_cmex = {
r'\__sqrt__' : 112,
r'\bigcap' : 92,
r'\bigcup' : 91,
r'\bigodot' : 75,
r'\bigoplus' : 77,
r'\bigotimes' : 79,
r'\biguplus' : 93,
r'\bigvee' : 95,
r'\bigwedge' : 94,
r'\coprod' : 97,
r'\int' : 90,
r'\leftangle' : 173,
r'\leftbrace' : 169,
r'\oint' : 73,
r'\prod' : 89,
r'\rightangle' : 174,
r'\rightbrace' : 170,
r'\sum' : 88,
r'\widehat' : 98,
r'\widetilde' : 101,
}
latex_to_standard = {
r'\cong' : ('psyr', 64),
r'\Delta' : ('psyr', 68),
r'\Phi' : ('psyr', 70),
r'\Gamma' : ('psyr', 89),
r'\alpha' : ('psyr', 97),
r'\beta' : ('psyr', 98),
r'\chi' : ('psyr', 99),
r'\delta' : ('psyr', 100),
r'\varepsilon' : ('psyr', 101),
r'\phi' : ('psyr', 102),
r'\gamma' : ('psyr', 103),
r'\eta' : ('psyr', 104),
r'\iota' : ('psyr', 105),
r'\varpsi' : ('psyr', 106),
r'\kappa' : ('psyr', 108),
r'\nu' : ('psyr', 110),
r'\pi' : ('psyr', 112),
r'\theta' : ('psyr', 113),
r'\rho' : ('psyr', 114),
r'\sigma' : ('psyr', 115),
r'\tau' : ('psyr', 116),
'\\upsilon' : ('psyr', 117),
r'\varpi' : ('psyr', 118),
r'\omega' : ('psyr', 119),
r'\xi' : ('psyr', 120),
r'\psi' : ('psyr', 121),
r'\zeta' : ('psyr', 122),
r'\sim' : ('psyr', 126),
r'\leq' : ('psyr', 163),
r'\infty' : ('psyr', 165),
r'\clubsuit' : ('psyr', 167),
r'\diamondsuit' : ('psyr', 168),
r'\heartsuit' : ('psyr', 169),
r'\spadesuit' : ('psyr', 170),
r'\leftrightarrow' : ('psyr', 171),
r'\leftarrow' : ('psyr', 172),
'\\uparrow' : ('psyr', 173),
r'\rightarrow' : ('psyr', 174),
r'\downarrow' : ('psyr', 175),
r'\pm' : ('psyr', 176),
r'\geq' : ('psyr', 179),
r'\times' : ('psyr', 180),
r'\propto' : ('psyr', 181),
r'\partial' : ('psyr', 182),
r'\bullet' : ('psyr', 183),
r'\div' : ('psyr', 184),
r'\neq' : ('psyr', 185),
r'\equiv' : ('psyr', 186),
r'\approx' : ('psyr', 187),
r'\ldots' : ('psyr', 188),
r'\aleph' : ('psyr', 192),
r'\Im' : ('psyr', 193),
r'\Re' : ('psyr', 194),
r'\wp' : ('psyr', 195),
r'\otimes' : ('psyr', 196),
r'\oplus' : ('psyr', 197),
r'\oslash' : ('psyr', 198),
r'\cap' : ('psyr', 199),
r'\cup' : ('psyr', 200),
r'\supset' : ('psyr', 201),
r'\supseteq' : ('psyr', 202),
r'\subset' : ('psyr', 204),
r'\subseteq' : ('psyr', 205),
r'\in' : ('psyr', 206),
r'\notin' : ('psyr', 207),
r'\angle' : ('psyr', 208),
r'\nabla' : ('psyr', 209),
r'\textregistered' : ('psyr', 210),
r'\copyright' : ('psyr', 211),
r'\texttrademark' : ('psyr', 212),
r'\Pi' : ('psyr', 213),
r'\prod' : ('psyr', 213),
r'\surd' : ('psyr', 214),
r'\__sqrt__' : ('psyr', 214),
r'\cdot' : ('psyr', 215),
'\\urcorner' : ('psyr', 216),
r'\vee' : ('psyr', 217),
r'\wedge' : ('psyr', 218),
r'\Leftrightarrow' : ('psyr', 219),
r'\Leftarrow' : ('psyr', 220),
'\\Uparrow' : ('psyr', 221),
r'\Rightarrow' : ('psyr', 222),
r'\Downarrow' : ('psyr', 223),
r'\Diamond' : ('psyr', 224),
r'\Sigma' : ('psyr', 229),
r'\sum' : ('psyr', 229),
r'\forall' : ('psyr', 34),
r'\exists' : ('psyr', 36),
r'\lceil' : ('psyr', 233),
r'\lbrace' : ('psyr', 123),
r'\Psi' : ('psyr', 89),
r'\bot' : ('psyr', 0o136),
r'\Omega' : ('psyr', 0o127),
r'\leftbracket' : ('psyr', 0o133),
r'\rightbracket' : ('psyr', 0o135),
r'\leftbrace' : ('psyr', 123),
r'\leftparen' : ('psyr', 0o50),
r'\prime' : ('psyr', 0o242),
r'\sharp' : ('psyr', 0o43),
r'\slash' : ('psyr', 0o57),
r'\Lamda' : ('psyr', 0o114),
r'\neg' : ('psyr', 0o330),
'\\Upsilon' : ('psyr', 0o241),
r'\rightbrace' : ('psyr', 0o175),
r'\rfloor' : ('psyr', 0o373),
r'\lambda' : ('psyr', 0o154),
r'\to' : ('psyr', 0o256),
r'\Xi' : ('psyr', 0o130),
r'\emptyset' : ('psyr', 0o306),
r'\lfloor' : ('psyr', 0o353),
r'\rightparen' : ('psyr', 0o51),
r'\rceil' : ('psyr', 0o371),
r'\ni' : ('psyr', 0o47),
r'\epsilon' : ('psyr', 0o145),
r'\Theta' : ('psyr', 0o121),
r'\langle' : ('psyr', 0o341),
r'\leftangle' : ('psyr', 0o341),
r'\rangle' : ('psyr', 0o361),
r'\rightangle' : ('psyr', 0o361),
r'\rbrace' : ('psyr', 0o175),
r'\circ' : ('psyr', 0o260),
r'\diamond' : ('psyr', 0o340),
r'\mu' : ('psyr', 0o155),
r'\mid' : ('psyr', 0o352),
r'\imath' : ('pncri8a', 105),
r'\%' : ('pncr8a', 37),
r'\$' : ('pncr8a', 36),
r'\{' : ('pncr8a', 123),
r'\}' : ('pncr8a', 125),
r'\backslash' : ('pncr8a', 92),
r'\ast' : ('pncr8a', 42),
r'\#' : ('pncr8a', 35),
r'\circumflexaccent' : ('pncri8a', 124), # for \hat
r'\combiningbreve' : ('pncri8a', 81), # for \breve
r'\combininggraveaccent' : ('pncri8a', 114), # for \grave
r'\combiningacuteaccent' : ('pncri8a', 63), # for \accute
r'\combiningdiaeresis' : ('pncri8a', 91), # for \ddot
r'\combiningtilde' : ('pncri8a', 75), # for \tilde
r'\combiningrightarrowabove' : ('pncri8a', 110), # for \vec
r'\combiningdotabove' : ('pncri8a', 26), # for \dot
}
# Automatically generated.
type12uni = {
'uni24C8' : 9416,
'aring' : 229,
'uni22A0' : 8864,
'uni2292' : 8850,
'quotedblright' : 8221,
'uni03D2' : 978,
'uni2215' : 8725,
'uni03D0' : 976,
'V' : 86,
'dollar' : 36,
'uni301E' : 12318,
'uni03D5' : 981,
'four' : 52,
'uni25A0' : 9632,
'uni013C' : 316,
'uni013B' : 315,
'uni013E' : 318,
'Yacute' : 221,
'uni25DE' : 9694,
'uni013F' : 319,
'uni255A' : 9562,
'uni2606' : 9734,
'uni0180' : 384,
'uni22B7' : 8887,
'uni044F' : 1103,
'uni22B5' : 8885,
'uni22B4' : 8884,
'uni22AE' : 8878,
'uni22B2' : 8882,
'uni22B1' : 8881,
'uni22B0' : 8880,
'uni25CD' : 9677,
'uni03CE' : 974,
'uni03CD' : 973,
'uni03CC' : 972,
'uni03CB' : 971,
'uni03CA' : 970,
'uni22B8' : 8888,
'uni22C9' : 8905,
'uni0449' : 1097,
'uni20DD' : 8413,
'uni20DC' : 8412,
'uni20DB' : 8411,
'uni2231' : 8753,
'uni25CF' : 9679,
'uni306E' : 12398,
'uni03D1' : 977,
'uni01A1' : 417,
'uni20D7' : 8407,
'uni03D6' : 982,
'uni2233' : 8755,
'uni20D2' : 8402,
'uni20D1' : 8401,
'uni20D0' : 8400,
'P' : 80,
'uni22BE' : 8894,
'uni22BD' : 8893,
'uni22BC' : 8892,
'uni22BB' : 8891,
'underscore' : 95,
'uni03C8' : 968,
'uni03C7' : 967,
'uni0328' : 808,
'uni03C5' : 965,
'uni03C4' : 964,
'uni03C3' : 963,
'uni03C2' : 962,
'uni03C1' : 961,
'uni03C0' : 960,
'uni2010' : 8208,
'uni0130' : 304,
'uni0133' : 307,
'uni0132' : 306,
'uni0135' : 309,
'uni0134' : 308,
'uni0137' : 311,
'uni0136' : 310,
'uni0139' : 313,
'uni0138' : 312,
'uni2244' : 8772,
'uni229A' : 8858,
'uni2571' : 9585,
'uni0278' : 632,
'uni2239' : 8761,
'p' : 112,
'uni3019' : 12313,
'uni25CB' : 9675,
'uni03DB' : 987,
'uni03DC' : 988,
'uni03DA' : 986,
'uni03DF' : 991,
'uni03DD' : 989,
'uni013D' : 317,
'uni220A' : 8714,
'uni220C' : 8716,
'uni220B' : 8715,
'uni220E' : 8718,
'uni220D' : 8717,
'uni220F' : 8719,
'uni22CC' : 8908,
'Otilde' : 213,
'uni25E5' : 9701,
'uni2736' : 10038,
'perthousand' : 8240,
'zero' : 48,
'uni279B' : 10139,
'dotlessi' : 305,
'uni2279' : 8825,
'Scaron' : 352,
'zcaron' : 382,
'uni21D8' : 8664,
'egrave' : 232,
'uni0271' : 625,
'uni01AA' : 426,
'uni2332' : 9010,
'section' : 167,
'uni25E4' : 9700,
'Icircumflex' : 206,
'ntilde' : 241,
'uni041E' : 1054,
'ampersand' : 38,
'uni041C' : 1052,
'uni041A' : 1050,
'uni22AB' : 8875,
'uni21DB' : 8667,
'dotaccent' : 729,
'uni0416' : 1046,
'uni0417' : 1047,
'uni0414' : 1044,
'uni0415' : 1045,
'uni0412' : 1042,
'uni0413' : 1043,
'degree' : 176,
'uni0411' : 1041,
'K' : 75,
'uni25EB' : 9707,
'uni25EF' : 9711,
'uni0418' : 1048,
'uni0419' : 1049,
'uni2263' : 8803,
'uni226E' : 8814,
'uni2251' : 8785,
'uni02C8' : 712,
'uni2262' : 8802,
'acircumflex' : 226,
'uni22B3' : 8883,
'uni2261' : 8801,
'uni2394' : 9108,
'Aring' : 197,
'uni2260' : 8800,
'uni2254' : 8788,
'uni0436' : 1078,
'uni2267' : 8807,
'k' : 107,
'uni22C8' : 8904,
'uni226A' : 8810,
'uni231F' : 8991,
'smalltilde' : 732,
'uni2201' : 8705,
'uni2200' : 8704,
'uni2203' : 8707,
'uni02BD' : 701,
'uni2205' : 8709,
'uni2204' : 8708,
'Agrave' : 192,
'uni2206' : 8710,
'uni2209' : 8713,
'uni2208' : 8712,
'uni226D' : 8813,
'uni2264' : 8804,
'uni263D' : 9789,
'uni2258' : 8792,
'uni02D3' : 723,
'uni02D2' : 722,
'uni02D1' : 721,
'uni02D0' : 720,
'uni25E1' : 9697,
'divide' : 247,
'uni02D5' : 725,
'uni02D4' : 724,
'ocircumflex' : 244,
'uni2524' : 9508,
'uni043A' : 1082,
'uni24CC' : 9420,
'asciitilde' : 126,
'uni22B9' : 8889,
'uni24D2' : 9426,
'uni211E' : 8478,
'uni211D' : 8477,
'uni24DD' : 9437,
'uni211A' : 8474,
'uni211C' : 8476,
'uni211B' : 8475,
'uni25C6' : 9670,
'uni017F' : 383,
'uni017A' : 378,
'uni017C' : 380,
'uni017B' : 379,
'uni0346' : 838,
'uni22F1' : 8945,
'uni22F0' : 8944,
'two' : 50,
'uni2298' : 8856,
'uni24D1' : 9425,
'E' : 69,
'uni025D' : 605,
'scaron' : 353,
'uni2322' : 8994,
'uni25E3' : 9699,
'uni22BF' : 8895,
'F' : 70,
'uni0440' : 1088,
'uni255E' : 9566,
'uni22BA' : 8890,
'uni0175' : 373,
'uni0174' : 372,
'uni0177' : 375,
'uni0176' : 374,
'bracketleft' : 91,
'uni0170' : 368,
'uni0173' : 371,
'uni0172' : 370,
'asciicircum' : 94,
'uni0179' : 377,
'uni2590' : 9616,
'uni25E2' : 9698,
'uni2119' : 8473,
'uni2118' : 8472,
'uni25CC' : 9676,
'f' : 102,
'ordmasculine' : 186,
'uni229B' : 8859,
'uni22A1' : 8865,
'uni2111' : 8465,
'uni2110' : 8464,
'uni2113' : 8467,
'uni2112' : 8466,
'mu' : 181,
'uni2281' : 8833,
'paragraph' : 182,
'nine' : 57,
'uni25EC' : 9708,
'v' : 118,
'uni040C' : 1036,
'uni0113' : 275,
'uni22D0' : 8912,
'uni21CC' : 8652,
'uni21CB' : 8651,
'uni21CA' : 8650,
'uni22A5' : 8869,
'uni21CF' : 8655,
'uni21CE' : 8654,
'uni21CD' : 8653,
'guilsinglleft' : 8249,
'backslash' : 92,
'uni2284' : 8836,
'uni224E' : 8782,
'uni224D' : 8781,
'uni224F' : 8783,
'uni224A' : 8778,
'uni2287' : 8839,
'uni224C' : 8780,
'uni224B' : 8779,
'uni21BD' : 8637,
'uni2286' : 8838,
'uni030F' : 783,
'uni030D' : 781,
'uni030E' : 782,
'uni030B' : 779,
'uni030C' : 780,
'uni030A' : 778,
'uni026E' : 622,
'uni026D' : 621,
'six' : 54,
'uni026A' : 618,
'uni026C' : 620,
'uni25C1' : 9665,
'uni20D6' : 8406,
'uni045B' : 1115,
'uni045C' : 1116,
'uni256B' : 9579,
'uni045A' : 1114,
'uni045F' : 1119,
'uni045E' : 1118,
'A' : 65,
'uni2569' : 9577,
'uni0458' : 1112,
'uni0459' : 1113,
'uni0452' : 1106,
'uni0453' : 1107,
'uni2562' : 9570,
'uni0451' : 1105,
'uni0456' : 1110,
'uni0457' : 1111,
'uni0454' : 1108,
'uni0455' : 1109,
'icircumflex' : 238,
'uni0307' : 775,
'uni0304' : 772,
'uni0305' : 773,
'uni0269' : 617,
'uni0268' : 616,
'uni0300' : 768,
'uni0301' : 769,
'uni0265' : 613,
'uni0264' : 612,
'uni0267' : 615,
'uni0266' : 614,
'uni0261' : 609,
'uni0260' : 608,
'uni0263' : 611,
'uni0262' : 610,
'a' : 97,
'uni2207' : 8711,
'uni2247' : 8775,
'uni2246' : 8774,
'uni2241' : 8769,
'uni2240' : 8768,
'uni2243' : 8771,
'uni2242' : 8770,
'uni2312' : 8978,
'ogonek' : 731,
'uni2249' : 8777,
'uni2248' : 8776,
'uni3030' : 12336,
'q' : 113,
'uni21C2' : 8642,
'uni21C1' : 8641,
'uni21C0' : 8640,
'uni21C7' : 8647,
'uni21C6' : 8646,
'uni21C5' : 8645,
'uni21C4' : 8644,
'uni225F' : 8799,
'uni212C' : 8492,
'uni21C8' : 8648,
'uni2467' : 9319,
'oacute' : 243,
'uni028F' : 655,
'uni028E' : 654,
'uni026F' : 623,
'uni028C' : 652,
'uni028B' : 651,
'uni028A' : 650,
'uni2510' : 9488,
'ograve' : 242,
'edieresis' : 235,
'uni22CE' : 8910,
'uni22CF' : 8911,
'uni219F' : 8607,
'comma' : 44,
'uni22CA' : 8906,
'uni0429' : 1065,
'uni03C6' : 966,
'uni0427' : 1063,
'uni0426' : 1062,
'uni0425' : 1061,
'uni0424' : 1060,
'uni0423' : 1059,
'uni0422' : 1058,
'uni0421' : 1057,
'uni0420' : 1056,
'uni2465' : 9317,
'uni24D0' : 9424,
'uni2464' : 9316,
'uni0430' : 1072,
'otilde' : 245,
'uni2661' : 9825,
'uni24D6' : 9430,
'uni2466' : 9318,
'uni24D5' : 9429,
'uni219A' : 8602,
'uni2518' : 9496,
'uni22B6' : 8886,
'uni2461' : 9313,
'uni24D4' : 9428,
'uni2460' : 9312,
'uni24EA' : 9450,
'guillemotright' : 187,
'ecircumflex' : 234,
'greater' : 62,
'uni2011' : 8209,
'uacute' : 250,
'uni2462' : 9314,
'L' : 76,
'bullet' : 8226,
'uni02A4' : 676,
'uni02A7' : 679,
'cedilla' : 184,
'uni02A2' : 674,
'uni2015' : 8213,
'uni22C4' : 8900,
'uni22C5' : 8901,
'uni22AD' : 8877,
'uni22C7' : 8903,
'uni22C0' : 8896,
'uni2016' : 8214,
'uni22C2' : 8898,
'uni22C3' : 8899,
'uni24CF' : 9423,
'uni042F' : 1071,
'uni042E' : 1070,
'uni042D' : 1069,
'ydieresis' : 255,
'l' : 108,
'logicalnot' : 172,
'uni24CA' : 9418,
'uni0287' : 647,
'uni0286' : 646,
'uni0285' : 645,
'uni0284' : 644,
'uni0283' : 643,
'uni0282' : 642,
'uni0281' : 641,
'uni027C' : 636,
'uni2664' : 9828,
'exclamdown' : 161,
'uni25C4' : 9668,
'uni0289' : 649,
'uni0288' : 648,
'uni039A' : 922,
'endash' : 8211,
'uni2640' : 9792,
'uni20E4' : 8420,
'uni0473' : 1139,
'uni20E1' : 8417,
'uni2642' : 9794,
'uni03B8' : 952,
'uni03B9' : 953,
'agrave' : 224,
'uni03B4' : 948,
'uni03B5' : 949,
'uni03B6' : 950,
'uni03B7' : 951,
'uni03B0' : 944,
'uni03B1' : 945,
'uni03B2' : 946,
'uni03B3' : 947,
'uni2555' : 9557,
'Adieresis' : 196,
'germandbls' : 223,
'Odieresis' : 214,
'space' : 32,
'uni0126' : 294,
'uni0127' : 295,
'uni0124' : 292,
'uni0125' : 293,
'uni0122' : 290,
'uni0123' : 291,
'uni0120' : 288,
'uni0121' : 289,
'quoteright' : 8217,
'uni2560' : 9568,
'uni2556' : 9558,
'ucircumflex' : 251,
'uni2561' : 9569,
'uni2551' : 9553,
'uni25B2' : 9650,
'uni2550' : 9552,
'uni2563' : 9571,
'uni2553' : 9555,
'G' : 71,
'uni2564' : 9572,
'uni2552' : 9554,
'quoteleft' : 8216,
'uni2565' : 9573,
'uni2572' : 9586,
'uni2568' : 9576,
'uni2566' : 9574,
'W' : 87,
'uni214A' : 8522,
'uni012F' : 303,
'uni012D' : 301,
'uni012E' : 302,
'uni012B' : 299,
'uni012C' : 300,
'uni255C' : 9564,
'uni012A' : 298,
'uni2289' : 8841,
'Q' : 81,
'uni2320' : 8992,
'uni2321' : 8993,
'g' : 103,
'uni03BD' : 957,
'uni03BE' : 958,
'uni03BF' : 959,
'uni2282' : 8834,
'uni2285' : 8837,
'uni03BA' : 954,
'uni03BB' : 955,
'uni03BC' : 956,
'uni2128' : 8488,
'uni25B7' : 9655,
'w' : 119,
'uni0302' : 770,
'uni03DE' : 990,
'uni25DA' : 9690,
'uni0303' : 771,
'uni0463' : 1123,
'uni0462' : 1122,
'uni3018' : 12312,
'uni2514' : 9492,
'question' : 63,
'uni25B3' : 9651,
'uni24E1' : 9441,
'one' : 49,
'uni200A' : 8202,
'uni2278' : 8824,
'ring' : 730,
'uni0195' : 405,
'figuredash' : 8210,
'uni22EC' : 8940,
'uni0339' : 825,
'uni0338' : 824,
'uni0337' : 823,
'uni0336' : 822,
'uni0335' : 821,
'uni0333' : 819,
'uni0332' : 818,
'uni0331' : 817,
'uni0330' : 816,
'uni01C1' : 449,
'uni01C0' : 448,
'uni01C3' : 451,
'uni01C2' : 450,
'uni2353' : 9043,
'uni0308' : 776,
'uni2218' : 8728,
'uni2219' : 8729,
'uni2216' : 8726,
'uni2217' : 8727,
'uni2214' : 8724,
'uni0309' : 777,
'uni2609' : 9737,
'uni2213' : 8723,
'uni2210' : 8720,
'uni2211' : 8721,
'uni2245' : 8773,
'B' : 66,
'uni25D6' : 9686,
'iacute' : 237,
'uni02E6' : 742,
'uni02E7' : 743,
'uni02E8' : 744,
'uni02E9' : 745,
'uni221D' : 8733,
'uni221E' : 8734,
'Ydieresis' : 376,
'uni221C' : 8732,
'uni22D7' : 8919,
'uni221A' : 8730,
'R' : 82,
'uni24DC' : 9436,
'uni033F' : 831,
'uni033E' : 830,
'uni033C' : 828,
'uni033B' : 827,
'uni033A' : 826,
'b' : 98,
'uni228A' : 8842,
'uni22DB' : 8923,
'uni2554' : 9556,
'uni046B' : 1131,
'uni046A' : 1130,
'r' : 114,
'uni24DB' : 9435,
'Ccedilla' : 199,
'minus' : 8722,
'uni24DA' : 9434,
'uni03F0' : 1008,
'uni03F1' : 1009,
'uni20AC' : 8364,
'uni2276' : 8822,
'uni24C0' : 9408,
'uni0162' : 354,
'uni0163' : 355,
'uni011E' : 286,
'uni011D' : 285,
'uni011C' : 284,
'uni011B' : 283,
'uni0164' : 356,
'uni0165' : 357,
'Lslash' : 321,
'uni0168' : 360,
'uni0169' : 361,
'uni25C9' : 9673,
'uni02E5' : 741,
'uni21C3' : 8643,
'uni24C4' : 9412,
'uni24E2' : 9442,
'uni2277' : 8823,
'uni013A' : 314,
'uni2102' : 8450,
'Uacute' : 218,
'uni2317' : 8983,
'uni2107' : 8455,
'uni221F' : 8735,
'yacute' : 253,
'uni3012' : 12306,
'Ucircumflex' : 219,
'uni015D' : 349,
'quotedbl' : 34,
'uni25D9' : 9689,
'uni2280' : 8832,
'uni22AF' : 8879,
'onehalf' : 189,
'uni221B' : 8731,
'Thorn' : 222,
'uni2226' : 8742,
'M' : 77,
'uni25BA' : 9658,
'uni2463' : 9315,
'uni2336' : 9014,
'eight' : 56,
'uni2236' : 8758,
'multiply' : 215,
'uni210C' : 8460,
'uni210A' : 8458,
'uni21C9' : 8649,
'grave' : 96,
'uni210E' : 8462,
'uni0117' : 279,
'uni016C' : 364,
'uni0115' : 277,
'uni016A' : 362,
'uni016F' : 367,
'uni0112' : 274,
'uni016D' : 365,
'uni016E' : 366,
'Ocircumflex' : 212,
'uni2305' : 8965,
'm' : 109,
'uni24DF' : 9439,
'uni0119' : 281,
'uni0118' : 280,
'uni20A3' : 8355,
'uni20A4' : 8356,
'uni20A7' : 8359,
'uni2288' : 8840,
'uni24C3' : 9411,
'uni251C' : 9500,
'uni228D' : 8845,
'uni222F' : 8751,
'uni222E' : 8750,
'uni222D' : 8749,
'uni222C' : 8748,
'uni222B' : 8747,
'uni222A' : 8746,
'uni255B' : 9563,
'Ugrave' : 217,
'uni24DE' : 9438,
'guilsinglright' : 8250,
'uni250A' : 9482,
'Ntilde' : 209,
'uni0279' : 633,
'questiondown' : 191,
'uni256C' : 9580,
'Atilde' : 195,
'uni0272' : 626,
'uni0273' : 627,
'uni0270' : 624,
'ccedilla' : 231,
'uni0276' : 630,
'uni0277' : 631,
'uni0274' : 628,
'uni0275' : 629,
'uni2252' : 8786,
'uni041F' : 1055,
'uni2250' : 8784,
'Z' : 90,
'uni2256' : 8790,
'uni2257' : 8791,
'copyright' : 169,
'uni2255' : 8789,
'uni043D' : 1085,
'uni043E' : 1086,
'uni043F' : 1087,
'yen' : 165,
'uni041D' : 1053,
'uni043B' : 1083,
'uni043C' : 1084,
'uni21B0' : 8624,
'uni21B1' : 8625,
'uni21B2' : 8626,
'uni21B3' : 8627,
'uni21B4' : 8628,
'uni21B5' : 8629,
'uni21B6' : 8630,
'uni21B7' : 8631,
'uni21B8' : 8632,
'Eacute' : 201,
'uni2311' : 8977,
'uni2310' : 8976,
'uni228F' : 8847,
'uni25DB' : 9691,
'uni21BA' : 8634,
'uni21BB' : 8635,
'uni21BC' : 8636,
'uni2017' : 8215,
'uni21BE' : 8638,
'uni21BF' : 8639,
'uni231C' : 8988,
'H' : 72,
'uni0293' : 659,
'uni2202' : 8706,
'uni22A4' : 8868,
'uni231E' : 8990,
'uni2232' : 8754,
'uni225B' : 8795,
'uni225C' : 8796,
'uni24D9' : 9433,
'uni225A' : 8794,
'uni0438' : 1080,
'uni0439' : 1081,
'uni225D' : 8797,
'uni225E' : 8798,
'uni0434' : 1076,
'X' : 88,
'uni007F' : 127,
'uni0437' : 1079,
'Idieresis' : 207,
'uni0431' : 1073,
'uni0432' : 1074,
'uni0433' : 1075,
'uni22AC' : 8876,
'uni22CD' : 8909,
'uni25A3' : 9635,
'bar' : 124,
'uni24BB' : 9403,
'uni037E' : 894,
'uni027B' : 635,
'h' : 104,
'uni027A' : 634,
'uni027F' : 639,
'uni027D' : 637,
'uni027E' : 638,
'uni2227' : 8743,
'uni2004' : 8196,
'uni2225' : 8741,
'uni2224' : 8740,
'uni2223' : 8739,
'uni2222' : 8738,
'uni2221' : 8737,
'uni2220' : 8736,
'x' : 120,
'uni2323' : 8995,
'uni2559' : 9561,
'uni2558' : 9560,
'uni2229' : 8745,
'uni2228' : 8744,
'udieresis' : 252,
'uni029D' : 669,
'ordfeminine' : 170,
'uni22CB' : 8907,
'uni233D' : 9021,
'uni0428' : 1064,
'uni24C6' : 9414,
'uni22DD' : 8925,
'uni24C7' : 9415,
'uni015C' : 348,
'uni015B' : 347,
'uni015A' : 346,
'uni22AA' : 8874,
'uni015F' : 351,
'uni015E' : 350,
'braceleft' : 123,
'uni24C5' : 9413,
'uni0410' : 1040,
'uni03AA' : 938,
'uni24C2' : 9410,
'uni03AC' : 940,
'uni03AB' : 939,
'macron' : 175,
'uni03AD' : 941,
'uni03AF' : 943,
'uni0294' : 660,
'uni0295' : 661,
'uni0296' : 662,
'uni0297' : 663,
'uni0290' : 656,
'uni0291' : 657,
'uni0292' : 658,
'atilde' : 227,
'Acircumflex' : 194,
'uni2370' : 9072,
'uni24C1' : 9409,
'uni0298' : 664,
'uni0299' : 665,
'Oslash' : 216,
'uni029E' : 670,
'C' : 67,
'quotedblleft' : 8220,
'uni029B' : 667,
'uni029C' : 668,
'uni03A9' : 937,
'uni03A8' : 936,
'S' : 83,
'uni24C9' : 9417,
'uni03A1' : 929,
'uni03A0' : 928,
'exclam' : 33,
'uni03A5' : 933,
'uni03A4' : 932,
'uni03A7' : 935,
'Zcaron' : 381,
'uni2133' : 8499,
'uni2132' : 8498,
'uni0159' : 345,
'uni0158' : 344,
'uni2137' : 8503,
'uni2005' : 8197,
'uni2135' : 8501,
'uni2134' : 8500,
'uni02BA' : 698,
'uni2033' : 8243,
'uni0151' : 337,
'uni0150' : 336,
'uni0157' : 343,
'equal' : 61,
'uni0155' : 341,
'uni0154' : 340,
's' : 115,
'uni233F' : 9023,
'eth' : 240,
'uni24BE' : 9406,
'uni21E9' : 8681,
'uni2060' : 8288,
'Egrave' : 200,
'uni255D' : 9565,
'uni24CD' : 9421,
'uni21E1' : 8673,
'uni21B9' : 8633,
'hyphen' : 45,
'uni01BE' : 446,
'uni01BB' : 443,
'period' : 46,
'igrave' : 236,
'uni01BA' : 442,
'uni2296' : 8854,
'uni2297' : 8855,
'uni2294' : 8852,
'uni2295' : 8853,
'colon' : 58,
'uni2293' : 8851,
'uni2290' : 8848,
'uni2291' : 8849,
'uni032D' : 813,
'uni032E' : 814,
'uni032F' : 815,
'uni032A' : 810,
'uni032B' : 811,
'uni032C' : 812,
'uni231D' : 8989,
'Ecircumflex' : 202,
'uni24D7' : 9431,
'uni25DD' : 9693,
'trademark' : 8482,
'Aacute' : 193,
'cent' : 162,
'uni0445' : 1093,
'uni266E' : 9838,
'uni266D' : 9837,
'uni266B' : 9835,
'uni03C9' : 969,
'uni2003' : 8195,
'uni2047' : 8263,
'lslash' : 322,
'uni03A6' : 934,
'uni2043' : 8259,
'uni250C' : 9484,
'uni2040' : 8256,
'uni255F' : 9567,
'uni24CB' : 9419,
'uni0472' : 1138,
'uni0446' : 1094,
'uni0474' : 1140,
'uni0475' : 1141,
'uni2508' : 9480,
'uni2660' : 9824,
'uni2506' : 9478,
'uni2502' : 9474,
'c' : 99,
'uni2500' : 9472,
'N' : 78,
'uni22A6' : 8870,
'uni21E7' : 8679,
'uni2130' : 8496,
'uni2002' : 8194,
'breve' : 728,
'uni0442' : 1090,
'Oacute' : 211,
'uni229F' : 8863,
'uni25C7' : 9671,
'uni229D' : 8861,
'uni229E' : 8862,
'guillemotleft' : 171,
'uni0329' : 809,
'uni24E5' : 9445,
'uni011F' : 287,
'uni0324' : 804,
'uni0325' : 805,
'uni0326' : 806,
'uni0327' : 807,
'uni0321' : 801,
'uni0322' : 802,
'n' : 110,
'uni2032' : 8242,
'uni2269' : 8809,
'uni2268' : 8808,
'uni0306' : 774,
'uni226B' : 8811,
'uni21EA' : 8682,
'uni0166' : 358,
'uni203B' : 8251,
'uni01B5' : 437,
'idieresis' : 239,
'uni02BC' : 700,
'uni01B0' : 432,
'braceright' : 125,
'seven' : 55,
'uni02BB' : 699,
'uni011A' : 282,
'uni29FB' : 10747,
'brokenbar' : 166,
'uni2036' : 8246,
'uni25C0' : 9664,
'uni0156' : 342,
'uni22D5' : 8917,
'uni0258' : 600,
'ugrave' : 249,
'uni22D6' : 8918,
'uni22D1' : 8913,
'uni2034' : 8244,
'uni22D3' : 8915,
'uni22D2' : 8914,
'uni203C' : 8252,
'uni223E' : 8766,
'uni02BF' : 703,
'uni22D9' : 8921,
'uni22D8' : 8920,
'uni25BD' : 9661,
'uni25BE' : 9662,
'uni25BF' : 9663,
'uni041B' : 1051,
'periodcentered' : 183,
'uni25BC' : 9660,
'uni019E' : 414,
'uni019B' : 411,
'uni019A' : 410,
'uni2007' : 8199,
'uni0391' : 913,
'uni0390' : 912,
'uni0393' : 915,
'uni0392' : 914,
'uni0395' : 917,
'uni0394' : 916,
'uni0397' : 919,
'uni0396' : 918,
'uni0399' : 921,
'uni0398' : 920,
'uni25C8' : 9672,
'uni2468' : 9320,
'sterling' : 163,
'uni22EB' : 8939,
'uni039C' : 924,
'uni039B' : 923,
'uni039E' : 926,
'uni039D' : 925,
'uni039F' : 927,
'I' : 73,
'uni03E1' : 993,
'uni03E0' : 992,
'uni2319' : 8985,
'uni228B' : 8843,
'uni25B5' : 9653,
'uni25B6' : 9654,
'uni22EA' : 8938,
'uni24B9' : 9401,
'uni044E' : 1102,
'uni0199' : 409,
'uni2266' : 8806,
'Y' : 89,
'uni22A2' : 8866,
'Eth' : 208,
'uni266F' : 9839,
'emdash' : 8212,
'uni263B' : 9787,
'uni24BD' : 9405,
'uni22DE' : 8926,
'uni0360' : 864,
'uni2557' : 9559,
'uni22DF' : 8927,
'uni22DA' : 8922,
'uni22DC' : 8924,
'uni0361' : 865,
'i' : 105,
'uni24BF' : 9407,
'uni0362' : 866,
'uni263E' : 9790,
'uni028D' : 653,
'uni2259' : 8793,
'uni0323' : 803,
'uni2265' : 8805,
'daggerdbl' : 8225,
'y' : 121,
'uni010A' : 266,
'plusminus' : 177,
'less' : 60,
'uni21AE' : 8622,
'uni0315' : 789,
'uni230B' : 8971,
'uni21AF' : 8623,
'uni21AA' : 8618,
'uni21AC' : 8620,
'uni21AB' : 8619,
'uni01FB' : 507,
'uni01FC' : 508,
'uni223A' : 8762,
'uni01FA' : 506,
'uni01FF' : 511,
'uni01FD' : 509,
'uni01FE' : 510,
'uni2567' : 9575,
'uni25E0' : 9696,
'uni0104' : 260,
'uni0105' : 261,
'uni0106' : 262,
'uni0107' : 263,
'uni0100' : 256,
'uni0101' : 257,
'uni0102' : 258,
'uni0103' : 259,
'uni2038' : 8248,
'uni2009' : 8201,
'uni2008' : 8200,
'uni0108' : 264,
'uni0109' : 265,
'uni02A1' : 673,
'uni223B' : 8763,
'uni226C' : 8812,
'uni25AC' : 9644,
'uni24D3' : 9427,
'uni21E0' : 8672,
'uni21E3' : 8675,
'Udieresis' : 220,
'uni21E2' : 8674,
'D' : 68,
'uni21E5' : 8677,
'uni2621' : 9761,
'uni21D1' : 8657,
'uni203E' : 8254,
'uni22C6' : 8902,
'uni21E4' : 8676,
'uni010D' : 269,
'uni010E' : 270,
'uni010F' : 271,
'five' : 53,
'T' : 84,
'uni010B' : 267,
'uni010C' : 268,
'uni2605' : 9733,
'uni2663' : 9827,
'uni21E6' : 8678,
'uni24B6' : 9398,
'uni22C1' : 8897,
'oslash' : 248,
'acute' : 180,
'uni01F0' : 496,
'd' : 100,
'OE' : 338,
'uni22E3' : 8931,
'Igrave' : 204,
'uni2308' : 8968,
'uni2309' : 8969,
'uni21A9' : 8617,
't' : 116,
'uni2313' : 8979,
'uni03A3' : 931,
'uni21A4' : 8612,
'uni21A7' : 8615,
'uni21A6' : 8614,
'uni21A1' : 8609,
'uni21A0' : 8608,
'uni21A3' : 8611,
'uni21A2' : 8610,
'parenright' : 41,
'uni256A' : 9578,
'uni25DC' : 9692,
'uni24CE' : 9422,
'uni042C' : 1068,
'uni24E0' : 9440,
'uni042B' : 1067,
'uni0409' : 1033,
'uni0408' : 1032,
'uni24E7' : 9447,
'uni25B4' : 9652,
'uni042A' : 1066,
'uni228E' : 8846,
'uni0401' : 1025,
'adieresis' : 228,
'uni0403' : 1027,
'quotesingle' : 39,
'uni0405' : 1029,
'uni0404' : 1028,
'uni0407' : 1031,
'uni0406' : 1030,
'uni229C' : 8860,
'uni2306' : 8966,
'uni2253' : 8787,
'twodotenleader' : 8229,
'uni2131' : 8497,
'uni21DA' : 8666,
'uni2234' : 8756,
'uni2235' : 8757,
'uni01A5' : 421,
'uni2237' : 8759,
'uni2230' : 8752,
'uni02CC' : 716,
'slash' : 47,
'uni01A0' : 416,
'ellipsis' : 8230,
'uni2299' : 8857,
'uni2238' : 8760,
'numbersign' : 35,
'uni21A8' : 8616,
'uni223D' : 8765,
'uni01AF' : 431,
'uni223F' : 8767,
'uni01AD' : 429,
'uni01AB' : 427,
'odieresis' : 246,
'uni223C' : 8764,
'uni227D' : 8829,
'uni0280' : 640,
'O' : 79,
'uni227E' : 8830,
'uni21A5' : 8613,
'uni22D4' : 8916,
'uni25D4' : 9684,
'uni227F' : 8831,
'uni0435' : 1077,
'uni2302' : 8962,
'uni2669' : 9833,
'uni24E3' : 9443,
'uni2720' : 10016,
'uni22A8' : 8872,
'uni22A9' : 8873,
'uni040A' : 1034,
'uni22A7' : 8871,
'oe' : 339,
'uni040B' : 1035,
'uni040E' : 1038,
'uni22A3' : 8867,
'o' : 111,
'uni040F' : 1039,
'Edieresis' : 203,
'uni25D5' : 9685,
'plus' : 43,
'uni044D' : 1101,
'uni263C' : 9788,
'uni22E6' : 8934,
'uni2283' : 8835,
'uni258C' : 9612,
'uni219E' : 8606,
'uni24E4' : 9444,
'uni2136' : 8502,
'dagger' : 8224,
'uni24B7' : 9399,
'uni219B' : 8603,
'uni22E5' : 8933,
'three' : 51,
'uni210B' : 8459,
'uni2534' : 9524,
'uni24B8' : 9400,
'uni230A' : 8970,
'hungarumlaut' : 733,
'parenleft' : 40,
'uni0148' : 328,
'uni0149' : 329,
'uni2124' : 8484,
'uni2125' : 8485,
'uni2126' : 8486,
'uni2127' : 8487,
'uni0140' : 320,
'uni2129' : 8489,
'uni25C5' : 9669,
'uni0143' : 323,
'uni0144' : 324,
'uni0145' : 325,
'uni0146' : 326,
'uni0147' : 327,
'uni210D' : 8461,
'fraction' : 8260,
'uni2031' : 8241,
'uni2196' : 8598,
'uni2035' : 8245,
'uni24E6' : 9446,
'uni016B' : 363,
'uni24BA' : 9402,
'uni266A' : 9834,
'uni0116' : 278,
'uni2115' : 8469,
'registered' : 174,
'J' : 74,
'uni25DF' : 9695,
'uni25CE' : 9678,
'uni273D' : 10045,
'dieresis' : 168,
'uni212B' : 8491,
'uni0114' : 276,
'uni212D' : 8493,
'uni212E' : 8494,
'uni212F' : 8495,
'uni014A' : 330,
'uni014B' : 331,
'uni014C' : 332,
'uni014D' : 333,
'uni014E' : 334,
'uni014F' : 335,
'uni025E' : 606,
'uni24E8' : 9448,
'uni0111' : 273,
'uni24E9' : 9449,
'Ograve' : 210,
'j' : 106,
'uni2195' : 8597,
'uni2194' : 8596,
'uni2197' : 8599,
'uni2037' : 8247,
'uni2191' : 8593,
'uni2190' : 8592,
'uni2193' : 8595,
'uni2192' : 8594,
'uni29FA' : 10746,
'uni2713' : 10003,
'z' : 122,
'uni2199' : 8601,
'uni2198' : 8600,
'uni2667' : 9831,
'ae' : 230,
'uni0448' : 1096,
'semicolon' : 59,
'uni2666' : 9830,
'uni038F' : 911,
'uni0444' : 1092,
'uni0447' : 1095,
'uni038E' : 910,
'uni0441' : 1089,
'uni038C' : 908,
'uni0443' : 1091,
'uni038A' : 906,
'uni0250' : 592,
'uni0251' : 593,
'uni0252' : 594,
'uni0253' : 595,
'uni0254' : 596,
'at' : 64,
'uni0256' : 598,
'uni0257' : 599,
'uni0167' : 359,
'uni0259' : 601,
'uni228C' : 8844,
'uni2662' : 9826,
'uni0319' : 793,
'uni0318' : 792,
'uni24BC' : 9404,
'uni0402' : 1026,
'uni22EF' : 8943,
'Iacute' : 205,
'uni22ED' : 8941,
'uni22EE' : 8942,
'uni0311' : 785,
'uni0310' : 784,
'uni21E8' : 8680,
'uni0312' : 786,
'percent' : 37,
'uni0317' : 791,
'uni0316' : 790,
'uni21D6' : 8662,
'uni21D7' : 8663,
'uni21D4' : 8660,
'uni21D5' : 8661,
'uni21D2' : 8658,
'uni21D3' : 8659,
'uni21D0' : 8656,
'uni2138' : 8504,
'uni2270' : 8816,
'uni2271' : 8817,
'uni2272' : 8818,
'uni2273' : 8819,
'uni2274' : 8820,
'uni2275' : 8821,
'bracketright' : 93,
'uni21D9' : 8665,
'uni21DF' : 8671,
'uni21DD' : 8669,
'uni21DE' : 8670,
'AE' : 198,
'uni03AE' : 942,
'uni227A' : 8826,
'uni227B' : 8827,
'uni227C' : 8828,
'asterisk' : 42,
'aacute' : 225,
'uni226F' : 8815,
'uni22E2' : 8930,
'uni0386' : 902,
'uni22E0' : 8928,
'uni22E1' : 8929,
'U' : 85,
'uni22E7' : 8935,
'uni22E4' : 8932,
'uni0387' : 903,
'uni031A' : 794,
'eacute' : 233,
'uni22E8' : 8936,
'uni22E9' : 8937,
'uni24D8' : 9432,
'uni025A' : 602,
'uni025B' : 603,
'uni025C' : 604,
'e' : 101,
'uni0128' : 296,
'uni025F' : 607,
'uni2665' : 9829,
'thorn' : 254,
'uni0129' : 297,
'uni253C' : 9532,
'uni25D7' : 9687,
'u' : 117,
'uni0388' : 904,
'uni0389' : 905,
'uni0255' : 597,
'uni0171' : 369,
'uni0384' : 900,
'uni0385' : 901,
'uni044A' : 1098,
'uni252C' : 9516,
'uni044C' : 1100,
'uni044B' : 1099
}
uni2type1 = dict(((v,k) for k,v in six.iteritems(type12uni)))
tex2uni = {
'widehat' : 0x0302,
'widetilde' : 0x0303,
'widebar' : 0x0305,
'langle' : 0x27e8,
'rangle' : 0x27e9,
'perp' : 0x27c2,
'neq' : 0x2260,
'Join' : 0x2a1d,
'leqslant' : 0x2a7d,
'geqslant' : 0x2a7e,
'lessapprox' : 0x2a85,
'gtrapprox' : 0x2a86,
'lesseqqgtr' : 0x2a8b,
'gtreqqless' : 0x2a8c,
'triangleeq' : 0x225c,
'eqslantless' : 0x2a95,
'eqslantgtr' : 0x2a96,
'backepsilon' : 0x03f6,
'precapprox' : 0x2ab7,
'succapprox' : 0x2ab8,
'fallingdotseq' : 0x2252,
'subseteqq' : 0x2ac5,
'supseteqq' : 0x2ac6,
'varpropto' : 0x221d,
'precnapprox' : 0x2ab9,
'succnapprox' : 0x2aba,
'subsetneqq' : 0x2acb,
'supsetneqq' : 0x2acc,
'lnapprox' : 0x2ab9,
'gnapprox' : 0x2aba,
'longleftarrow' : 0x27f5,
'longrightarrow' : 0x27f6,
'longleftrightarrow' : 0x27f7,
'Longleftarrow' : 0x27f8,
'Longrightarrow' : 0x27f9,
'Longleftrightarrow' : 0x27fa,
'longmapsto' : 0x27fc,
'leadsto' : 0x21dd,
'dashleftarrow' : 0x290e,
'dashrightarrow' : 0x290f,
'circlearrowleft' : 0x21ba,
'circlearrowright' : 0x21bb,
'leftrightsquigarrow' : 0x21ad,
'leftsquigarrow' : 0x219c,
'rightsquigarrow' : 0x219d,
'Game' : 0x2141,
'hbar' : 0x0127,
'hslash' : 0x210f,
'ldots' : 0x2026,
'vdots' : 0x22ee,
'doteqdot' : 0x2251,
'doteq' : 8784,
'partial' : 8706,
'gg' : 8811,
'asymp' : 8781,
'blacktriangledown' : 9662,
'otimes' : 8855,
'nearrow' : 8599,
'varpi' : 982,
'vee' : 8744,
'vec' : 8407,
'smile' : 8995,
'succnsim' : 8937,
'gimel' : 8503,
'vert' : 124,
'|' : 124,
'varrho' : 1009,
'P' : 182,
'approxident' : 8779,
'Swarrow' : 8665,
'textasciicircum' : 94,
'imageof' : 8887,
'ntriangleleft' : 8938,
'nleq' : 8816,
'div' : 247,
'nparallel' : 8742,
'Leftarrow' : 8656,
'lll' : 8920,
'oiint' : 8751,
'ngeq' : 8817,
'Theta' : 920,
'origof' : 8886,
'blacksquare' : 9632,
'solbar' : 9023,
'neg' : 172,
'sum' : 8721,
'Vdash' : 8873,
'coloneq' : 8788,
'degree' : 176,
'bowtie' : 8904,
'blacktriangleright' : 9654,
'varsigma' : 962,
'leq' : 8804,
'ggg' : 8921,
'lneqq' : 8808,
'scurel' : 8881,
'stareq' : 8795,
'BbbN' : 8469,
'nLeftarrow' : 8653,
'nLeftrightarrow' : 8654,
'k' : 808,
'bot' : 8869,
'BbbC' : 8450,
'Lsh' : 8624,
'leftleftarrows' : 8647,
'BbbZ' : 8484,
'digamma' : 989,
'BbbR' : 8477,
'BbbP' : 8473,
'BbbQ' : 8474,
'vartriangleright' : 8883,
'succsim' : 8831,
'wedge' : 8743,
'lessgtr' : 8822,
'veebar' : 8891,
'mapsdown' : 8615,
'Rsh' : 8625,
'chi' : 967,
'prec' : 8826,
'nsubseteq' : 8840,
'therefore' : 8756,
'eqcirc' : 8790,
'textexclamdown' : 161,
'nRightarrow' : 8655,
'flat' : 9837,
'notin' : 8713,
'llcorner' : 8990,
'varepsilon' : 949,
'bigtriangleup' : 9651,
'aleph' : 8501,
'dotminus' : 8760,
'upsilon' : 965,
'Lambda' : 923,
'cap' : 8745,
'barleftarrow' : 8676,
'mu' : 956,
'boxplus' : 8862,
'mp' : 8723,
'circledast' : 8859,
'tau' : 964,
'in' : 8712,
'backslash' : 92,
'varnothing' : 8709,
'sharp' : 9839,
'eqsim' : 8770,
'gnsim' : 8935,
'Searrow' : 8664,
'updownarrows' : 8645,
'heartsuit' : 9825,
'trianglelefteq' : 8884,
'ddag' : 8225,
'sqsubseteq' : 8849,
'mapsfrom' : 8612,
'boxbar' : 9707,
'sim' : 8764,
'Nwarrow' : 8662,
'nequiv' : 8802,
'succ' : 8827,
'vdash' : 8866,
'Leftrightarrow' : 8660,
'parallel' : 8741,
'invnot' : 8976,
'natural' : 9838,
'ss' : 223,
'uparrow' : 8593,
'nsim' : 8769,
'hookrightarrow' : 8618,
'Equiv' : 8803,
'approx' : 8776,
'Vvdash' : 8874,
'nsucc' : 8833,
'leftrightharpoons' : 8651,
'Re' : 8476,
'boxminus' : 8863,
'equiv' : 8801,
'Lleftarrow' : 8666,
'll' : 8810,
'Cup' : 8915,
'measeq' : 8798,
'upharpoonleft' : 8639,
'lq' : 8216,
'Upsilon' : 933,
'subsetneq' : 8842,
'greater' : 62,
'supsetneq' : 8843,
'Cap' : 8914,
'L' : 321,
'spadesuit' : 9824,
'lrcorner' : 8991,
'not' : 824,
'bar' : 772,
'rightharpoonaccent' : 8401,
'boxdot' : 8865,
'l' : 322,
'leftharpoondown' : 8637,
'bigcup' : 8899,
'iint' : 8748,
'bigwedge' : 8896,
'downharpoonleft' : 8643,
'textasciitilde' : 126,
'subset' : 8834,
'leqq' : 8806,
'mapsup' : 8613,
'nvDash' : 8877,
'looparrowleft' : 8619,
'nless' : 8814,
'rightarrowbar' : 8677,
'Vert' : 8214,
'downdownarrows' : 8650,
'uplus' : 8846,
'simeq' : 8771,
'napprox' : 8777,
'ast' : 8727,
'twoheaduparrow' : 8607,
'doublebarwedge' : 8966,
'Sigma' : 931,
'leftharpoonaccent' : 8400,
'ntrianglelefteq' : 8940,
'nexists' : 8708,
'times' : 215,
'measuredangle' : 8737,
'bumpeq' : 8783,
'carriagereturn' : 8629,
'adots' : 8944,
'checkmark' : 10003,
'lambda' : 955,
'xi' : 958,
'rbrace' : 125,
'rbrack' : 93,
'Nearrow' : 8663,
'maltese' : 10016,
'clubsuit' : 9827,
'top' : 8868,
'overarc' : 785,
'varphi' : 966,
'Delta' : 916,
'iota' : 953,
'nleftarrow' : 8602,
'candra' : 784,
'supset' : 8835,
'triangleleft' : 9665,
'gtreqless' : 8923,
'ntrianglerighteq' : 8941,
'quad' : 8195,
'Xi' : 926,
'gtrdot' : 8919,
'leftthreetimes' : 8907,
'minus' : 8722,
'preccurlyeq' : 8828,
'nleftrightarrow' : 8622,
'lambdabar' : 411,
'blacktriangle' : 9652,
'kernelcontraction' : 8763,
'Phi' : 934,
'angle' : 8736,
'spadesuitopen' : 9828,
'eqless' : 8924,
'mid' : 8739,
'varkappa' : 1008,
'Ldsh' : 8626,
'updownarrow' : 8597,
'beta' : 946,
'textquotedblleft' : 8220,
'rho' : 961,
'alpha' : 945,
'intercal' : 8890,
'beth' : 8502,
'grave' : 768,
'acwopencirclearrow' : 8634,
'nmid' : 8740,
'nsupset' : 8837,
'sigma' : 963,
'dot' : 775,
'Rightarrow' : 8658,
'turnednot' : 8985,
'backsimeq' : 8909,
'leftarrowtail' : 8610,
'approxeq' : 8778,
'curlyeqsucc' : 8927,
'rightarrowtail' : 8611,
'Psi' : 936,
'copyright' : 169,
'yen' : 165,
'vartriangleleft' : 8882,
'rasp' : 700,
'triangleright' : 9655,
'precsim' : 8830,
'infty' : 8734,
'geq' : 8805,
'updownarrowbar' : 8616,
'precnsim' : 8936,
'H' : 779,
'ulcorner' : 8988,
'looparrowright' : 8620,
'ncong' : 8775,
'downarrow' : 8595,
'circeq' : 8791,
'subseteq' : 8838,
'bigstar' : 9733,
'prime' : 8242,
'lceil' : 8968,
'Rrightarrow' : 8667,
'oiiint' : 8752,
'curlywedge' : 8911,
'vDash' : 8872,
'lfloor' : 8970,
'ddots' : 8945,
'exists' : 8707,
'underbar' : 817,
'Pi' : 928,
'leftrightarrows' : 8646,
'sphericalangle' : 8738,
'coprod' : 8720,
'circledcirc' : 8858,
'gtrsim' : 8819,
'gneqq' : 8809,
'between' : 8812,
'theta' : 952,
'complement' : 8705,
'arceq' : 8792,
'nVdash' : 8878,
'S' : 167,
'wr' : 8768,
'wp' : 8472,
'backcong' : 8780,
'lasp' : 701,
'c' : 807,
'nabla' : 8711,
'dotplus' : 8724,
'eta' : 951,
'forall' : 8704,
'eth' : 240,
'colon' : 58,
'sqcup' : 8852,
'rightrightarrows' : 8649,
'sqsupset' : 8848,
'mapsto' : 8614,
'bigtriangledown' : 9661,
'sqsupseteq' : 8850,
'propto' : 8733,
'pi' : 960,
'pm' : 177,
'dots' : 0x2026,
'nrightarrow' : 8603,
'textasciiacute' : 180,
'Doteq' : 8785,
'breve' : 774,
'sqcap' : 8851,
'twoheadrightarrow' : 8608,
'kappa' : 954,
'vartriangle' : 9653,
'diamondsuit' : 9826,
'pitchfork' : 8916,
'blacktriangleleft' : 9664,
'nprec' : 8832,
'curvearrowright' : 8631,
'barwedge' : 8892,
'multimap' : 8888,
'textquestiondown' : 191,
'cong' : 8773,
'rtimes' : 8906,
'rightzigzagarrow' : 8669,
'rightarrow' : 8594,
'leftarrow' : 8592,
'__sqrt__' : 8730,
'twoheaddownarrow' : 8609,
'oint' : 8750,
'bigvee' : 8897,
'eqdef' : 8797,
'sterling' : 163,
'phi' : 981,
'Updownarrow' : 8661,
'backprime' : 8245,
'emdash' : 8212,
'Gamma' : 915,
'i' : 305,
'rceil' : 8969,
'leftharpoonup' : 8636,
'Im' : 8465,
'curvearrowleft' : 8630,
'wedgeq' : 8793,
'curlyeqprec' : 8926,
'questeq' : 8799,
'less' : 60,
'upuparrows' : 8648,
'tilde' : 771,
'textasciigrave' : 96,
'smallsetminus' : 8726,
'ell' : 8467,
'cup' : 8746,
'danger' : 9761,
'nVDash' : 8879,
'cdotp' : 183,
'cdots' : 8943,
'hat' : 770,
'eqgtr' : 8925,
'psi' : 968,
'frown' : 8994,
'acute' : 769,
'downzigzagarrow' : 8623,
'ntriangleright' : 8939,
'cupdot' : 8845,
'circleddash' : 8861,
'oslash' : 8856,
'mho' : 8487,
'd' : 803,
'sqsubset' : 8847,
'cdot' : 8901,
'Omega' : 937,
'OE' : 338,
'veeeq' : 8794,
'Finv' : 8498,
't' : 865,
'leftrightarrow' : 8596,
'swarrow' : 8601,
'rightthreetimes' : 8908,
'rightleftharpoons' : 8652,
'lesssim' : 8818,
'searrow' : 8600,
'because' : 8757,
'gtrless' : 8823,
'star' : 8902,
'nsubset' : 8836,
'zeta' : 950,
'dddot' : 8411,
'bigcirc' : 9675,
'Supset' : 8913,
'circ' : 8728,
'slash' : 8725,
'ocirc' : 778,
'prod' : 8719,
'twoheadleftarrow' : 8606,
'daleth' : 8504,
'upharpoonright' : 8638,
'odot' : 8857,
'Uparrow' : 8657,
'O' : 216,
'hookleftarrow' : 8617,
'trianglerighteq' : 8885,
'nsime' : 8772,
'oe' : 339,
'nwarrow' : 8598,
'o' : 248,
'ddddot' : 8412,
'downharpoonright' : 8642,
'succcurlyeq' : 8829,
'gamma' : 947,
'scrR' : 8475,
'dag' : 8224,
'thickspace' : 8197,
'frakZ' : 8488,
'lessdot' : 8918,
'triangledown' : 9663,
'ltimes' : 8905,
'scrB' : 8492,
'endash' : 8211,
'scrE' : 8496,
'scrF' : 8497,
'scrH' : 8459,
'scrI' : 8464,
'rightharpoondown' : 8641,
'scrL' : 8466,
'scrM' : 8499,
'frakC' : 8493,
'nsupseteq' : 8841,
'circledR' : 174,
'circledS' : 9416,
'ngtr' : 8815,
'bigcap' : 8898,
'scre' : 8495,
'Downarrow' : 8659,
'scrg' : 8458,
'overleftrightarrow' : 8417,
'scro' : 8500,
'lnsim' : 8934,
'eqcolon' : 8789,
'curlyvee' : 8910,
'urcorner' : 8989,
'lbrace' : 123,
'Bumpeq' : 8782,
'delta' : 948,
'boxtimes' : 8864,
'overleftarrow' : 8406,
'prurel' : 8880,
'clubsuitopen' : 9831,
'cwopencirclearrow' : 8635,
'geqq' : 8807,
'rightleftarrows' : 8644,
'ac' : 8766,
'ae' : 230,
'int' : 8747,
'rfloor' : 8971,
'risingdotseq' : 8787,
'nvdash' : 8876,
'diamond' : 8900,
'ddot' : 776,
'backsim' : 8765,
'oplus' : 8853,
'triangleq' : 8796,
'check' : 780,
'ni' : 8715,
'iiint' : 8749,
'ne' : 8800,
'lesseqgtr' : 8922,
'obar' : 9021,
'supseteq' : 8839,
'nu' : 957,
'AA' : 197,
'AE' : 198,
'models' : 8871,
'ominus' : 8854,
'dashv' : 8867,
'omega' : 969,
'rq' : 8217,
'Subset' : 8912,
'rightharpoonup' : 8640,
'Rdsh' : 8627,
'bullet' : 8729,
'divideontimes' : 8903,
'lbrack' : 91,
'textquotedblright' : 8221,
'Colon' : 8759,
'%' : 37,
'$' : 36,
'{' : 123,
'}' : 125,
'_' : 95,
'#' : 35,
'imath' : 0x131,
'circumflexaccent' : 770,
'combiningbreve' : 774,
'combiningoverline' : 772,
'combininggraveaccent' : 768,
'combiningacuteaccent' : 769,
'combiningdiaeresis' : 776,
'combiningtilde' : 771,
'combiningrightarrowabove' : 8407,
'combiningdotabove' : 775,
'to' : 8594,
'succeq' : 8829,
'emptyset' : 8709,
'leftparen' : 40,
'rightparen' : 41,
'bigoplus' : 10753,
'leftangle' : 10216,
'rightangle' : 10217,
'leftbrace' : 124,
'rightbrace' : 125,
'jmath' : 567,
'bigodot' : 10752,
'preceq' : 8828,
'biguplus' : 10756,
'epsilon' : 949,
'vartheta' : 977,
'bigotimes' : 10754,
'guillemotleft' : 171,
'ring' : 730,
'Thorn' : 222,
'guilsinglright' : 8250,
'perthousand' : 8240,
'macron' : 175,
'cent' : 162,
'guillemotright' : 187,
'equal' : 61,
'asterisk' : 42,
'guilsinglleft' : 8249,
'plus' : 43,
'thorn' : 254,
'dagger' : 8224
}
# Each element is a 4-tuple of the form:
# src_start, src_end, dst_font, dst_start
#
stix_virtual_fonts = {
'bb':
{
'rm':
[
(0x0030, 0x0039, 'rm', 0x1d7d8), # 0-9
(0x0041, 0x0042, 'rm', 0x1d538), # A-B
(0x0043, 0x0043, 'rm', 0x2102), # C
(0x0044, 0x0047, 'rm', 0x1d53b), # D-G
(0x0048, 0x0048, 'rm', 0x210d), # H
(0x0049, 0x004d, 'rm', 0x1d540), # I-M
(0x004e, 0x004e, 'rm', 0x2115), # N
(0x004f, 0x004f, 'rm', 0x1d546), # O
(0x0050, 0x0051, 'rm', 0x2119), # P-Q
(0x0052, 0x0052, 'rm', 0x211d), # R
(0x0053, 0x0059, 'rm', 0x1d54a), # S-Y
(0x005a, 0x005a, 'rm', 0x2124), # Z
(0x0061, 0x007a, 'rm', 0x1d552), # a-z
(0x0393, 0x0393, 'rm', 0x213e), # \Gamma
(0x03a0, 0x03a0, 'rm', 0x213f), # \Pi
(0x03a3, 0x03a3, 'rm', 0x2140), # \Sigma
(0x03b3, 0x03b3, 'rm', 0x213d), # \gamma
(0x03c0, 0x03c0, 'rm', 0x213c), # \pi
],
'it':
[
(0x0030, 0x0039, 'rm', 0x1d7d8), # 0-9
(0x0041, 0x0042, 'it', 0xe154), # A-B
(0x0043, 0x0043, 'it', 0x2102), # C
(0x0044, 0x0044, 'it', 0x2145), # D
(0x0045, 0x0047, 'it', 0xe156), # E-G
(0x0048, 0x0048, 'it', 0x210d), # H
(0x0049, 0x004d, 'it', 0xe159), # I-M
(0x004e, 0x004e, 'it', 0x2115), # N
(0x004f, 0x004f, 'it', 0xe15e), # O
(0x0050, 0x0051, 'it', 0x2119), # P-Q
(0x0052, 0x0052, 'it', 0x211d), # R
(0x0053, 0x0059, 'it', 0xe15f), # S-Y
(0x005a, 0x005a, 'it', 0x2124), # Z
(0x0061, 0x0063, 'it', 0xe166), # a-c
(0x0064, 0x0065, 'it', 0x2146), # d-e
(0x0066, 0x0068, 'it', 0xe169), # f-h
(0x0069, 0x006a, 'it', 0x2148), # i-j
(0x006b, 0x007a, 'it', 0xe16c), # k-z
(0x0393, 0x0393, 'it', 0x213e), # \Gamma (not in beta STIX fonts)
(0x03a0, 0x03a0, 'it', 0x213f), # \Pi
(0x03a3, 0x03a3, 'it', 0x2140), # \Sigma (not in beta STIX fonts)
(0x03b3, 0x03b3, 'it', 0x213d), # \gamma (not in beta STIX fonts)
(0x03c0, 0x03c0, 'it', 0x213c), # \pi
],
'bf':
[
(0x0030, 0x0039, 'rm', 0x1d7d8), # 0-9
(0x0041, 0x0042, 'bf', 0xe38a), # A-B
(0x0043, 0x0043, 'bf', 0x2102), # C
(0x0044, 0x0044, 'bf', 0x2145), # D
(0x0045, 0x0047, 'bf', 0xe38d), # E-G
(0x0048, 0x0048, 'bf', 0x210d), # H
(0x0049, 0x004d, 'bf', 0xe390), # I-M
(0x004e, 0x004e, 'bf', 0x2115), # N
(0x004f, 0x004f, 'bf', 0xe395), # O
(0x0050, 0x0051, 'bf', 0x2119), # P-Q
(0x0052, 0x0052, 'bf', 0x211d), # R
(0x0053, 0x0059, 'bf', 0xe396), # S-Y
(0x005a, 0x005a, 'bf', 0x2124), # Z
(0x0061, 0x0063, 'bf', 0xe39d), # a-c
(0x0064, 0x0065, 'bf', 0x2146), # d-e
(0x0066, 0x0068, 'bf', 0xe3a2), # f-h
(0x0069, 0x006a, 'bf', 0x2148), # i-j
(0x006b, 0x007a, 'bf', 0xe3a7), # k-z
(0x0393, 0x0393, 'bf', 0x213e), # \Gamma
(0x03a0, 0x03a0, 'bf', 0x213f), # \Pi
(0x03a3, 0x03a3, 'bf', 0x2140), # \Sigma
(0x03b3, 0x03b3, 'bf', 0x213d), # \gamma
(0x03c0, 0x03c0, 'bf', 0x213c), # \pi
],
},
'cal':
[
(0x0041, 0x005a, 'it', 0xe22d), # A-Z
],
'circled':
{
'rm':
[
(0x0030, 0x0030, 'rm', 0x24ea), # 0
(0x0031, 0x0039, 'rm', 0x2460), # 1-9
(0x0041, 0x005a, 'rm', 0x24b6), # A-Z
(0x0061, 0x007a, 'rm', 0x24d0) # a-z
],
'it':
[
(0x0030, 0x0030, 'rm', 0x24ea), # 0
(0x0031, 0x0039, 'rm', 0x2460), # 1-9
(0x0041, 0x005a, 'it', 0x24b6), # A-Z
(0x0061, 0x007a, 'it', 0x24d0) # a-z
],
'bf':
[
(0x0030, 0x0030, 'bf', 0x24ea), # 0
(0x0031, 0x0039, 'bf', 0x2460), # 1-9
(0x0041, 0x005a, 'bf', 0x24b6), # A-Z
(0x0061, 0x007a, 'bf', 0x24d0) # a-z
],
},
'frak':
{
'rm':
[
(0x0041, 0x0042, 'rm', 0x1d504), # A-B
(0x0043, 0x0043, 'rm', 0x212d), # C
(0x0044, 0x0047, 'rm', 0x1d507), # D-G
(0x0048, 0x0048, 'rm', 0x210c), # H
(0x0049, 0x0049, 'rm', 0x2111), # I
(0x004a, 0x0051, 'rm', 0x1d50d), # J-Q
(0x0052, 0x0052, 'rm', 0x211c), # R
(0x0053, 0x0059, 'rm', 0x1d516), # S-Y
(0x005a, 0x005a, 'rm', 0x2128), # Z
(0x0061, 0x007a, 'rm', 0x1d51e), # a-z
],
'it':
[
(0x0041, 0x0042, 'rm', 0x1d504), # A-B
(0x0043, 0x0043, 'rm', 0x212d), # C
(0x0044, 0x0047, 'rm', 0x1d507), # D-G
(0x0048, 0x0048, 'rm', 0x210c), # H
(0x0049, 0x0049, 'rm', 0x2111), # I
(0x004a, 0x0051, 'rm', 0x1d50d), # J-Q
(0x0052, 0x0052, 'rm', 0x211c), # R
(0x0053, 0x0059, 'rm', 0x1d516), # S-Y
(0x005a, 0x005a, 'rm', 0x2128), # Z
(0x0061, 0x007a, 'rm', 0x1d51e), # a-z
],
'bf':
[
(0x0041, 0x005a, 'bf', 0x1d56c), # A-Z
(0x0061, 0x007a, 'bf', 0x1d586), # a-z
],
},
'scr':
[
(0x0041, 0x0041, 'it', 0x1d49c), # A
(0x0042, 0x0042, 'it', 0x212c), # B
(0x0043, 0x0044, 'it', 0x1d49e), # C-D
(0x0045, 0x0046, 'it', 0x2130), # E-F
(0x0047, 0x0047, 'it', 0x1d4a2), # G
(0x0048, 0x0048, 'it', 0x210b), # H
(0x0049, 0x0049, 'it', 0x2110), # I
(0x004a, 0x004b, 'it', 0x1d4a5), # J-K
(0x004c, 0x004c, 'it', 0x2112), # L
(0x004d, 0x004d, 'it', 0x2133), # M
(0x004e, 0x0051, 'it', 0x1d4a9), # N-Q
(0x0052, 0x0052, 'it', 0x211b), # R
(0x0053, 0x005a, 'it', 0x1d4ae), # S-Z
(0x0061, 0x0064, 'it', 0x1d4b6), # a-d
(0x0065, 0x0065, 'it', 0x212f), # e
(0x0066, 0x0066, 'it', 0x1d4bb), # f
(0x0067, 0x0067, 'it', 0x210a), # g
(0x0068, 0x006e, 'it', 0x1d4bd), # h-n
(0x006f, 0x006f, 'it', 0x2134), # o
(0x0070, 0x007a, 'it', 0x1d4c5), # p-z
],
'sf':
{
'rm':
[
(0x0030, 0x0039, 'rm', 0x1d7e2), # 0-9
(0x0041, 0x005a, 'rm', 0x1d5a0), # A-Z
(0x0061, 0x007a, 'rm', 0x1d5ba), # a-z
(0x0391, 0x03a9, 'rm', 0xe17d), # \Alpha-\Omega
(0x03b1, 0x03c9, 'rm', 0xe196), # \alpha-\omega
(0x03d1, 0x03d1, 'rm', 0xe1b0), # theta variant
(0x03d5, 0x03d5, 'rm', 0xe1b1), # phi variant
(0x03d6, 0x03d6, 'rm', 0xe1b3), # pi variant
(0x03f1, 0x03f1, 'rm', 0xe1b2), # rho variant
(0x03f5, 0x03f5, 'rm', 0xe1af), # lunate epsilon
(0x2202, 0x2202, 'rm', 0xe17c), # partial differential
],
'it':
[
# These numerals are actually upright. We don't actually
# want italic numerals ever.
(0x0030, 0x0039, 'rm', 0x1d7e2), # 0-9
(0x0041, 0x005a, 'it', 0x1d608), # A-Z
(0x0061, 0x007a, 'it', 0x1d622), # a-z
(0x0391, 0x03a9, 'rm', 0xe17d), # \Alpha-\Omega
(0x03b1, 0x03c9, 'it', 0xe1d8), # \alpha-\omega
(0x03d1, 0x03d1, 'it', 0xe1f2), # theta variant
(0x03d5, 0x03d5, 'it', 0xe1f3), # phi variant
(0x03d6, 0x03d6, 'it', 0xe1f5), # pi variant
(0x03f1, 0x03f1, 'it', 0xe1f4), # rho variant
(0x03f5, 0x03f5, 'it', 0xe1f1), # lunate epsilon
],
'bf':
[
(0x0030, 0x0039, 'bf', 0x1d7ec), # 0-9
(0x0041, 0x005a, 'bf', 0x1d5d4), # A-Z
(0x0061, 0x007a, 'bf', 0x1d5ee), # a-z
(0x0391, 0x03a9, 'bf', 0x1d756), # \Alpha-\Omega
(0x03b1, 0x03c9, 'bf', 0x1d770), # \alpha-\omega
(0x03d1, 0x03d1, 'bf', 0x1d78b), # theta variant
(0x03d5, 0x03d5, 'bf', 0x1d78d), # phi variant
(0x03d6, 0x03d6, 'bf', 0x1d78f), # pi variant
(0x03f0, 0x03f0, 'bf', 0x1d78c), # kappa variant
(0x03f1, 0x03f1, 'bf', 0x1d78e), # rho variant
(0x03f5, 0x03f5, 'bf', 0x1d78a), # lunate epsilon
(0x2202, 0x2202, 'bf', 0x1d789), # partial differential
(0x2207, 0x2207, 'bf', 0x1d76f), # \Nabla
],
},
'tt':
[
(0x0030, 0x0039, 'rm', 0x1d7f6), # 0-9
(0x0041, 0x005a, 'rm', 0x1d670), # A-Z
(0x0061, 0x007a, 'rm', 0x1d68a) # a-z
],
}
| 89,505 | 34.114162 | 78 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/rcsetup.py
|
"""
The rcsetup module contains the default values and the validation code for
customization using matplotlib's rc settings.
Each rc setting is assigned a default value and a function used to validate
any attempted changes to that setting. The default values and validation
functions are defined in the rcsetup module, and are used to construct the
rcParams global object which stores the settings and is referenced throughout
matplotlib.
These default values should be consistent with the default matplotlibrc file
that actually reflects the values given here. Any additions or deletions to the
parameter set listed here should also be visited to the
:file:`matplotlibrc.template` in matplotlib's root source directory.
"""
from __future__ import absolute_import, division, print_function
import six
from collections import Iterable, Mapping
from functools import reduce
import operator
import os
import warnings
import re
from matplotlib import cbook, testing
from matplotlib.cbook import mplDeprecation, deprecated, ls_mapper
from matplotlib.fontconfig_pattern import parse_fontconfig_pattern
from matplotlib.colors import is_color_like
# Don't let the original cycler collide with our validating cycler
from cycler import Cycler, cycler as ccycler
# The capitalized forms are needed for ipython at present; this may
# change for later versions.
interactive_bk = ['GTK', 'GTKAgg', 'GTKCairo', 'MacOSX',
'Qt4Agg', 'Qt5Agg', 'TkAgg', 'WX', 'WXAgg',
'GTK3Cairo', 'GTK3Agg', 'WebAgg', 'nbAgg']
interactive_bk = ['GTK', 'GTKAgg', 'GTKCairo', 'GTK3Agg', 'GTK3Cairo',
'MacOSX',
'nbAgg',
'Qt4Agg', 'Qt4Cairo', 'Qt5Agg', 'Qt5Cairo',
'TkAgg', 'TkCairo',
'WebAgg',
'WX', 'WXAgg', 'WXCairo']
non_interactive_bk = ['agg', 'cairo', 'gdk',
'pdf', 'pgf', 'ps', 'svg', 'template']
all_backends = interactive_bk + non_interactive_bk
class ValidateInStrings(object):
def __init__(self, key, valid, ignorecase=False):
'valid is a list of legal strings'
self.key = key
self.ignorecase = ignorecase
def func(s):
if ignorecase:
return s.lower()
else:
return s
self.valid = {func(k): k for k in valid}
def __call__(self, s):
if self.ignorecase:
s = s.lower()
if s in self.valid:
return self.valid[s]
raise ValueError('Unrecognized %s string "%s": valid strings are %s'
% (self.key, s, list(six.itervalues(self.valid))))
def _listify_validator(scalar_validator, allow_stringlist=False):
def f(s):
if isinstance(s, six.string_types):
try:
return [scalar_validator(v.strip()) for v in s.split(',')
if v.strip()]
except Exception:
if allow_stringlist:
# Sometimes, a list of colors might be a single string
# of single-letter colornames. So give that a shot.
return [scalar_validator(v.strip()) for v in s if v.strip()]
else:
raise
# We should allow any generic sequence type, including generators,
# Numpy ndarrays, and pandas data structures. However, unordered
# sequences, such as sets, should be allowed but discouraged unless the
# user desires pseudorandom behavior.
elif isinstance(s, Iterable) and not isinstance(s, Mapping):
# The condition on this list comprehension will preserve the
# behavior of filtering out any empty strings (behavior was
# from the original validate_stringlist()), while allowing
# any non-string/text scalar values such as numbers and arrays.
return [scalar_validator(v) for v in s
if not isinstance(v, six.string_types) or v]
else:
raise ValueError("{!r} must be of type: string or non-dictionary "
"iterable".format(s))
try:
f.__name__ = "{}list".format(scalar_validator.__name__)
except AttributeError: # class instance.
f.__name__ = "{}List".format(type(scalar_validator).__name__)
f.__doc__ = scalar_validator.__doc__
return f
def validate_any(s):
return s
validate_anylist = _listify_validator(validate_any)
def validate_path_exists(s):
"""If s is a path, return s, else False"""
if s is None:
return None
if os.path.exists(s):
return s
else:
raise RuntimeError('"%s" should be a path but it does not exist' % s)
def validate_bool(b):
"""Convert b to a boolean or raise"""
if isinstance(b, six.string_types):
b = b.lower()
if b in ('t', 'y', 'yes', 'on', 'true', '1', 1, True):
return True
elif b in ('f', 'n', 'no', 'off', 'false', '0', 0, False):
return False
else:
raise ValueError('Could not convert "%s" to boolean' % b)
def validate_bool_maybe_none(b):
'Convert b to a boolean or raise'
if isinstance(b, six.string_types):
b = b.lower()
if b is None or b == 'none':
return None
if b in ('t', 'y', 'yes', 'on', 'true', '1', 1, True):
return True
elif b in ('f', 'n', 'no', 'off', 'false', '0', 0, False):
return False
else:
raise ValueError('Could not convert "%s" to boolean' % b)
def deprecate_axes_hold(value):
if value is None:
return None # converted to True where accessed in figure.py,
# axes/_base.py
warnings.warn("axes.hold is deprecated, will be removed in 3.0",
mplDeprecation)
return validate_bool(value)
def validate_float(s):
"""convert s to float or raise"""
try:
return float(s)
except ValueError:
raise ValueError('Could not convert "%s" to float' % s)
validate_floatlist = _listify_validator(validate_float)
def validate_float_or_None(s):
"""convert s to float, None or raise"""
# values directly from the rc file can only be strings,
# so we need to recognize the string "None" and convert
# it into the object. We will be case-sensitive here to
# avoid confusion between string values of 'none', which
# can be a valid string value for some other parameters.
if s is None or s == 'None':
return None
try:
return float(s)
except ValueError:
raise ValueError('Could not convert "%s" to float or None' % s)
def validate_string_or_None(s):
"""convert s to string or raise"""
if s is None:
return None
try:
return validate_string(s)
except ValueError:
raise ValueError('Could not convert "%s" to string' % s)
def validate_axisbelow(s):
try:
return validate_bool(s)
except ValueError:
if isinstance(s, six.string_types):
s = s.lower()
if s.startswith('line'):
return 'line'
raise ValueError('%s cannot be interpreted as'
' True, False, or "line"' % s)
def validate_dpi(s):
"""confirm s is string 'figure' or convert s to float or raise"""
if s == 'figure':
return s
try:
return float(s)
except ValueError:
raise ValueError('"%s" is not string "figure" or'
' could not convert "%s" to float' % (s, s))
def validate_int(s):
"""convert s to int or raise"""
try:
return int(s)
except ValueError:
raise ValueError('Could not convert "%s" to int' % s)
def validate_int_or_None(s):
"""if not None, tries to validate as an int"""
if s=='None':
s = None
if s is None:
return None
try:
return int(s)
except ValueError:
raise ValueError('Could not convert "%s" to int' % s)
def validate_fonttype(s):
"""
confirm that this is a Postscript of PDF font type that we know how to
convert to
"""
fonttypes = {'type3': 3,
'truetype': 42}
try:
fonttype = validate_int(s)
except ValueError:
try:
return fonttypes[s.lower()]
except KeyError:
raise ValueError(
'Supported Postscript/PDF font types are %s' % list(fonttypes))
else:
if fonttype not in six.itervalues(fonttypes):
raise ValueError(
'Supported Postscript/PDF font types are %s' %
list(six.itervalues(fonttypes)))
return fonttype
_validate_standard_backends = ValidateInStrings(
'backend', all_backends, ignorecase=True)
def validate_backend(s):
if s.startswith('module://'):
return s
else:
return _validate_standard_backends(s)
def validate_qt4(s):
if s is None:
return None
return ValidateInStrings("backend.qt4", ['PyQt4', 'PySide', 'PyQt4v2'])(s)
def validate_qt5(s):
if s is None:
return None
return ValidateInStrings("backend.qt5", ['PyQt5', 'PySide2'])(s)
def validate_toolbar(s):
validator = ValidateInStrings(
'toolbar',
['None', 'toolbar2', 'toolmanager'],
ignorecase=True)
return validator(s)
_seq_err_msg = ('You must supply exactly {n} values, you provided {num} '
'values: {s}')
_str_err_msg = ('You must supply exactly {n} comma-separated values, you '
'provided {num} comma-separated values: {s}')
class validate_nseq_float(object):
def __init__(self, n=None, allow_none=False):
self.n = n
self.allow_none = allow_none
def __call__(self, s):
"""return a seq of n floats or raise"""
if isinstance(s, six.string_types):
s = [x.strip() for x in s.split(',')]
err_msg = _str_err_msg
else:
err_msg = _seq_err_msg
if self.n is not None and len(s) != self.n:
raise ValueError(err_msg.format(n=self.n, num=len(s), s=s))
try:
return [float(val)
if not self.allow_none or val is not None
else val
for val in s]
except ValueError:
raise ValueError('Could not convert all entries to floats')
class validate_nseq_int(object):
def __init__(self, n=None):
self.n = n
def __call__(self, s):
"""return a seq of n ints or raise"""
if isinstance(s, six.string_types):
s = [x.strip() for x in s.split(',')]
err_msg = _str_err_msg
else:
err_msg = _seq_err_msg
if self.n is not None and len(s) != self.n:
raise ValueError(err_msg.format(n=self.n, num=len(s), s=s))
try:
return [int(val) for val in s]
except ValueError:
raise ValueError('Could not convert all entries to ints')
def validate_color_or_inherit(s):
'return a valid color arg'
if s == 'inherit':
return s
return validate_color(s)
def validate_color_or_auto(s):
if s == 'auto':
return s
return validate_color(s)
def validate_color_for_prop_cycle(s):
# Special-case the N-th color cycle syntax, this obviously can not
# go in the color cycle.
if isinstance(s, bytes):
match = re.match(b'^C[0-9]$', s)
if match is not None:
raise ValueError('Can not put cycle reference ({cn!r}) in '
'prop_cycler'.format(cn=s))
elif isinstance(s, six.string_types):
match = re.match('^C[0-9]$', s)
if match is not None:
raise ValueError('Can not put cycle reference ({cn!r}) in '
'prop_cycler'.format(cn=s))
return validate_color(s)
def validate_color(s):
'return a valid color arg'
try:
if s.lower() == 'none':
return 'none'
except AttributeError:
pass
if isinstance(s, six.string_types):
if len(s) == 6 or len(s) == 8:
stmp = '#' + s
if is_color_like(stmp):
return stmp
if is_color_like(s):
return s
# If it is still valid, it must be a tuple.
colorarg = s
msg = ''
if s.find(',') >= 0:
# get rid of grouping symbols
stmp = ''.join([c for c in s if c.isdigit() or c == '.' or c == ','])
vals = stmp.split(',')
if len(vals) not in [3, 4]:
msg = '\nColor tuples must be of length 3 or 4'
else:
try:
colorarg = [float(val) for val in vals]
except ValueError:
msg = '\nCould not convert all entries to floats'
if not msg and is_color_like(colorarg):
return colorarg
raise ValueError('%s does not look like a color arg%s' % (s, msg))
validate_colorlist = _listify_validator(validate_color, allow_stringlist=True)
validate_colorlist.__doc__ = 'return a list of colorspecs'
def validate_string(s):
if isinstance(s, (str, six.text_type)):
# Always leave str as str and unicode as unicode
return s
else:
return str(s)
validate_stringlist = _listify_validator(str)
validate_stringlist.__doc__ = 'return a list'
validate_orientation = ValidateInStrings(
'orientation', ['landscape', 'portrait'])
def validate_aspect(s):
if s in ('auto', 'equal'):
return s
try:
return float(s)
except ValueError:
raise ValueError('not a valid aspect specification')
def validate_fontsize(s):
fontsizes = ['xx-small', 'x-small', 'small', 'medium', 'large',
'x-large', 'xx-large', 'smaller', 'larger']
if isinstance(s, six.string_types):
s = s.lower()
if s in fontsizes:
return s
try:
return float(s)
except ValueError:
raise ValueError("%s is not a valid font size. Valid font sizes "
"are %s." % (s, ", ".join(fontsizes)))
validate_fontsizelist = _listify_validator(validate_fontsize)
def validate_font_properties(s):
parse_fontconfig_pattern(s)
return s
validate_fontset = ValidateInStrings(
'fontset',
['dejavusans', 'dejavuserif', 'cm', 'stix', 'stixsans', 'custom'])
validate_mathtext_default = ValidateInStrings(
'default',
"rm cal it tt sf bf default bb frak circled scr regular".split())
validate_verbose = ValidateInStrings(
'verbose',
['silent', 'helpful', 'debug', 'debug-annoying'])
_validate_alignment = ValidateInStrings(
'alignment',
['center', 'top', 'bottom', 'baseline',
'center_baseline'])
def validate_whiskers(s):
if s == 'range':
return 'range'
else:
try:
v = validate_nseq_float(2)(s)
return v
except (TypeError, ValueError):
try:
v = float(s)
return v
except ValueError:
raise ValueError("Not a valid whisker value ['range', float, "
"(float, float)]")
def update_savefig_format(value):
# The old savefig.extension could also have a value of "auto", but
# the new savefig.format does not. We need to fix this here.
value = validate_string(value)
if value == 'auto':
value = 'png'
return value
validate_ps_papersize = ValidateInStrings(
'ps_papersize',
['auto', 'letter', 'legal', 'ledger',
'a0', 'a1', 'a2', 'a3', 'a4', 'a5', 'a6', 'a7', 'a8', 'a9', 'a10',
'b0', 'b1', 'b2', 'b3', 'b4', 'b5', 'b6', 'b7', 'b8', 'b9', 'b10',
], ignorecase=True)
def validate_ps_distiller(s):
if isinstance(s, six.string_types):
s = s.lower()
if s in ('none', None):
return None
elif s in ('false', False):
return False
elif s in ('ghostscript', 'xpdf'):
return s
else:
raise ValueError('matplotlibrc ps.usedistiller must either be none, '
'ghostscript or xpdf')
validate_joinstyle = ValidateInStrings('joinstyle',
['miter', 'round', 'bevel'],
ignorecase=True)
validate_joinstylelist = _listify_validator(validate_joinstyle)
validate_capstyle = ValidateInStrings('capstyle',
['butt', 'round', 'projecting'],
ignorecase=True)
validate_capstylelist = _listify_validator(validate_capstyle)
validate_fillstyle = ValidateInStrings('markers.fillstyle',
['full', 'left', 'right', 'bottom',
'top', 'none'])
validate_fillstylelist = _listify_validator(validate_fillstyle)
_validate_negative_linestyle = ValidateInStrings('negative_linestyle',
['solid', 'dashed'],
ignorecase=True)
@deprecated('2.1',
addendum=(" See 'validate_negative_linestyle_legacy' " +
"deprecation warning for more information."))
def validate_negative_linestyle(s):
return _validate_negative_linestyle(s)
@deprecated('2.1',
addendum=(" The 'contour.negative_linestyle' rcParam now " +
"follows the same validation as the other rcParams " +
"that are related to line style."))
def validate_negative_linestyle_legacy(s):
try:
res = validate_negative_linestyle(s)
return res
except ValueError:
dashes = validate_nseq_float(2)(s)
return (0, dashes) # (offset, (solid, blank))
validate_legend_loc = ValidateInStrings(
'legend_loc',
['best',
'upper right',
'upper left',
'lower left',
'lower right',
'right',
'center left',
'center right',
'lower center',
'upper center',
'center'], ignorecase=True)
def validate_svg_fonttype(s):
if s in ["none", "path"]:
return s
if s == "svgfont":
cbook.warn_deprecated(
"2.2", "'svgfont' support for svg.fonttype is deprecated.")
return s
raise ValueError("Unrecognized svg.fonttype string '{}'; "
"valid strings are 'none', 'path'")
def validate_hinting(s):
if s in (True, False):
return s
if s.lower() in ('auto', 'native', 'either', 'none'):
return s.lower()
raise ValueError("hinting should be 'auto', 'native', 'either' or 'none'")
validate_pgf_texsystem = ValidateInStrings('pgf.texsystem',
['xelatex', 'lualatex', 'pdflatex'])
validate_movie_writer = ValidateInStrings('animation.writer',
['ffmpeg', 'ffmpeg_file',
'avconv', 'avconv_file',
'imagemagick', 'imagemagick_file',
'html'])
validate_movie_frame_fmt = ValidateInStrings('animation.frame_format',
['png', 'jpeg', 'tiff', 'raw', 'rgba'])
validate_axis_locator = ValidateInStrings('major', ['minor', 'both', 'major'])
validate_movie_html_fmt = ValidateInStrings('animation.html',
['html5', 'jshtml', 'none'])
def validate_bbox(s):
if isinstance(s, six.string_types):
s = s.lower()
if s == 'tight':
return s
if s == 'standard':
return None
raise ValueError("bbox should be 'tight' or 'standard'")
elif s is not None:
# Backwards compatibility. None is equivalent to 'standard'.
raise ValueError("bbox should be 'tight' or 'standard'")
return s
def validate_sketch(s):
if isinstance(s, six.string_types):
s = s.lower()
if s == 'none' or s is None:
return None
if isinstance(s, six.string_types):
result = tuple([float(v.strip()) for v in s.split(',')])
elif isinstance(s, (list, tuple)):
result = tuple([float(v) for v in s])
if len(result) != 3:
raise ValueError("path.sketch must be a tuple (scale, length, randomness)")
return result
class ValidateInterval(object):
"""
Value must be in interval
"""
def __init__(self, vmin, vmax, closedmin=True, closedmax=True):
self.vmin = vmin
self.vmax = vmax
self.cmin = closedmin
self.cmax = closedmax
def __call__(self, s):
try:
s = float(s)
except ValueError:
raise RuntimeError('Value must be a float; found "%s"' % s)
if self.cmin and s < self.vmin:
raise RuntimeError('Value must be >= %f; found "%f"' %
(self.vmin, s))
elif not self.cmin and s <= self.vmin:
raise RuntimeError('Value must be > %f; found "%f"' %
(self.vmin, s))
if self.cmax and s > self.vmax:
raise RuntimeError('Value must be <= %f; found "%f"' %
(self.vmax, s))
elif not self.cmax and s >= self.vmax:
raise RuntimeError('Value must be < %f; found "%f"' %
(self.vmax, s))
return s
validate_grid_axis = ValidateInStrings('axes.grid.axis', ['x', 'y', 'both'])
def validate_hatch(s):
"""
Validate a hatch pattern.
A hatch pattern string can have any sequence of the following
characters: ``\\ / | - + * . x o O``.
"""
if not isinstance(s, six.string_types):
raise ValueError("Hatch pattern must be a string")
unknown = set(s) - {'\\', '/', '|', '-', '+', '*', '.', 'x', 'o', 'O'}
if unknown:
raise ValueError("Unknown hatch symbol(s): %s" % list(unknown))
return s
validate_hatchlist = _listify_validator(validate_hatch)
validate_dashlist = _listify_validator(validate_nseq_float(allow_none=True))
_prop_validators = {
'color': _listify_validator(validate_color_for_prop_cycle,
allow_stringlist=True),
'linewidth': validate_floatlist,
'linestyle': validate_stringlist,
'facecolor': validate_colorlist,
'edgecolor': validate_colorlist,
'joinstyle': validate_joinstylelist,
'capstyle': validate_capstylelist,
'fillstyle': validate_fillstylelist,
'markerfacecolor': validate_colorlist,
'markersize': validate_floatlist,
'markeredgewidth': validate_floatlist,
'markeredgecolor': validate_colorlist,
'alpha': validate_floatlist,
'marker': validate_stringlist,
'hatch': validate_hatchlist,
'dashes': validate_dashlist,
}
_prop_aliases = {
'c': 'color',
'lw': 'linewidth',
'ls': 'linestyle',
'fc': 'facecolor',
'ec': 'edgecolor',
'mfc': 'markerfacecolor',
'mec': 'markeredgecolor',
'mew': 'markeredgewidth',
'ms': 'markersize',
}
def cycler(*args, **kwargs):
"""
Creates a :class:`cycler.Cycler` object much like :func:`cycler.cycler`,
but includes input validation.
cycler(arg)
cycler(label, itr)
cycler(label1=itr1[, label2=itr2[, ...]])
Form 1 simply copies a given `Cycler` object.
Form 2 creates a `Cycler` from a label and an iterable.
Form 3 composes a `Cycler` as an inner product of the
pairs of keyword arguments. In other words, all of the
iterables are cycled simultaneously, as if through zip().
Parameters
----------
arg : Cycler
Copy constructor for Cycler.
label : name
The property key. Must be a valid `Artist` property.
For example, 'color' or 'linestyle'. Aliases are allowed,
such as 'c' for 'color' and 'lw' for 'linewidth'.
itr : iterable
Finite-length iterable of the property values. These values
are validated and will raise a ValueError if invalid.
Returns
-------
cycler : Cycler
New :class:`cycler.Cycler` for the given properties
"""
if args and kwargs:
raise TypeError("cycler() can only accept positional OR keyword "
"arguments -- not both.")
elif not args and not kwargs:
raise TypeError("cycler() must have positional OR keyword arguments")
if len(args) == 1:
if not isinstance(args[0], Cycler):
raise TypeError("If only one positional argument given, it must "
" be a Cycler instance.")
return validate_cycler(args[0])
elif len(args) == 2:
pairs = [(args[0], args[1])]
elif len(args) > 2:
raise TypeError("No more than 2 positional arguments allowed")
else:
pairs = six.iteritems(kwargs)
validated = []
for prop, vals in pairs:
norm_prop = _prop_aliases.get(prop, prop)
validator = _prop_validators.get(norm_prop, None)
if validator is None:
raise TypeError("Unknown artist property: %s" % prop)
vals = validator(vals)
# We will normalize the property names as well to reduce
# the amount of alias handling code elsewhere.
validated.append((norm_prop, vals))
return reduce(operator.add, (ccycler(k, v) for k, v in validated))
def validate_cycler(s):
'return a Cycler object from a string repr or the object itself'
if isinstance(s, six.string_types):
try:
# TODO: We might want to rethink this...
# While I think I have it quite locked down,
# it is execution of arbitrary code without
# sanitation.
# Combine this with the possibility that rcparams
# might come from the internet (future plans), this
# could be downright dangerous.
# I locked it down by only having the 'cycler()' function
# available.
# UPDATE: Partly plugging a security hole.
# I really should have read this:
# http://nedbatchelder.com/blog/201206/eval_really_is_dangerous.html
# We should replace this eval with a combo of PyParsing and
# ast.literal_eval()
if '.__' in s.replace(' ', ''):
raise ValueError("'%s' seems to have dunder methods. Raising"
" an exception for your safety")
s = eval(s, {'cycler': cycler, '__builtins__': {}})
except BaseException as e:
raise ValueError("'%s' is not a valid cycler construction: %s" %
(s, e))
# Should make sure what comes from the above eval()
# is a Cycler object.
if isinstance(s, Cycler):
cycler_inst = s
else:
raise ValueError("object was not a string or Cycler instance: %s" % s)
unknowns = cycler_inst.keys - (set(_prop_validators) | set(_prop_aliases))
if unknowns:
raise ValueError("Unknown artist properties: %s" % unknowns)
# Not a full validation, but it'll at least normalize property names
# A fuller validation would require v0.10 of cycler.
checker = set()
for prop in cycler_inst.keys:
norm_prop = _prop_aliases.get(prop, prop)
if norm_prop != prop and norm_prop in cycler_inst.keys:
raise ValueError("Cannot specify both '{0}' and alias '{1}'"
" in the same prop_cycle".format(norm_prop, prop))
if norm_prop in checker:
raise ValueError("Another property was already aliased to '{0}'."
" Collision normalizing '{1}'.".format(norm_prop,
prop))
checker.update([norm_prop])
# This is just an extra-careful check, just in case there is some
# edge-case I haven't thought of.
assert len(checker) == len(cycler_inst.keys)
# Now, it should be safe to mutate this cycler
for prop in cycler_inst.keys:
norm_prop = _prop_aliases.get(prop, prop)
cycler_inst.change_key(prop, norm_prop)
for key, vals in cycler_inst.by_key().items():
_prop_validators[key](vals)
return cycler_inst
def validate_hist_bins(s):
if isinstance(s, six.string_types) and s == 'auto':
return s
try:
return int(s)
except (TypeError, ValueError):
pass
try:
return validate_floatlist(s)
except ValueError:
pass
raise ValueError("'hist.bins' must be 'auto', an int or " +
"a sequence of floats")
def validate_animation_writer_path(p):
# Make sure it's a string and then figure out if the animations
# are already loaded and reset the writers (which will validate
# the path on next call)
if not isinstance(p, six.string_types):
raise ValueError("path must be a (unicode) string")
from sys import modules
# set dirty, so that the next call to the registry will re-evaluate
# the state.
# only set dirty if already loaded. If not loaded, the load will
# trigger the checks.
if "matplotlib.animation" in modules:
modules["matplotlib.animation"].writers.set_dirty()
return p
def validate_webagg_address(s):
if s is not None:
import socket
try:
socket.inet_aton(s)
except socket.error as e:
raise ValueError("'webagg.address' is not a valid IP address")
return s
raise ValueError("'webagg.address' is not a valid IP address")
# A validator dedicated to the named line styles, based on the items in
# ls_mapper, and a list of possible strings read from Line2D.set_linestyle
_validate_named_linestyle = ValidateInStrings('linestyle',
list(six.iterkeys(ls_mapper)) +
list(six.itervalues(ls_mapper)) +
['None', 'none', ' ', ''],
ignorecase=True)
def _validate_linestyle(ls):
"""
A validator for all possible line styles, the named ones *and*
the on-off ink sequences.
"""
# Look first for a valid named line style, like '--' or 'solid'
if isinstance(ls, six.string_types):
try:
return _validate_named_linestyle(ls)
except (UnicodeDecodeError, KeyError):
# On Python 2, string-like *ls*, like for example
# 'solid'.encode('utf-16'), may raise a unicode error.
raise ValueError("the linestyle string {!r} is not a valid "
"string.".format(ls))
if isinstance(ls, (bytes, bytearray)):
# On Python 2, a string-like *ls* should already have lead to a
# successful return or to raising an exception. On Python 3, we have
# to manually raise an exception in the case of a byte-like *ls*.
# Otherwise, if *ls* is of even-length, it will be passed to the
# instance of validate_nseq_float, which will return an absurd on-off
# ink sequence...
raise ValueError("linestyle {!r} neither looks like an on-off ink "
"sequence nor a valid string.".format(ls))
# Look for an on-off ink sequence (in points) *of even length*.
# Offset is set to None.
try:
if len(ls) % 2 != 0:
raise ValueError("the linestyle sequence {!r} is not of even "
"length.".format(ls))
return (None, validate_nseq_float()(ls))
except (ValueError, TypeError):
# TypeError can be raised inside the instance of validate_nseq_float,
# by wrong types passed to float(), like NoneType.
raise ValueError("linestyle {!r} is not a valid on-off ink "
"sequence.".format(ls))
# a map from key -> value, converter
defaultParams = {
'backend': ['Agg', validate_backend], # agg is certainly
# present
'backend_fallback': [True, validate_bool], # agg is certainly present
'backend.qt4': [None, validate_qt4],
'backend.qt5': [None, validate_qt5],
'webagg.port': [8988, validate_int],
'webagg.address': ['127.0.0.1', validate_webagg_address],
'webagg.open_in_browser': [True, validate_bool],
'webagg.port_retries': [50, validate_int],
'nbagg.transparent': [True, validate_bool],
'toolbar': ['toolbar2', validate_toolbar],
'datapath': [None, validate_path_exists], # handled by
# _get_data_path_cached
'interactive': [False, validate_bool],
'timezone': ['UTC', validate_string],
# the verbosity setting
'verbose.level': ['silent', validate_verbose],
'verbose.fileo': ['sys.stdout', validate_string],
# line props
'lines.linewidth': [1.5, validate_float], # line width in points
'lines.linestyle': ['-', _validate_linestyle], # solid line
'lines.color': ['C0', validate_color], # first color in color cycle
'lines.marker': ['None', validate_string], # marker name
'lines.markeredgewidth': [1.0, validate_float],
'lines.markersize': [6, validate_float], # markersize, in points
'lines.antialiased': [True, validate_bool], # antialiased (no jaggies)
'lines.dash_joinstyle': ['round', validate_joinstyle],
'lines.solid_joinstyle': ['round', validate_joinstyle],
'lines.dash_capstyle': ['butt', validate_capstyle],
'lines.solid_capstyle': ['projecting', validate_capstyle],
'lines.dashed_pattern': [[3.7, 1.6], validate_nseq_float(allow_none=True)],
'lines.dashdot_pattern': [[6.4, 1.6, 1, 1.6],
validate_nseq_float(allow_none=True)],
'lines.dotted_pattern': [[1, 1.65], validate_nseq_float(allow_none=True)],
'lines.scale_dashes': [True, validate_bool],
# marker props
'markers.fillstyle': ['full', validate_fillstyle],
## patch props
'patch.linewidth': [1.0, validate_float], # line width in points
'patch.edgecolor': ['k', validate_color],
'patch.force_edgecolor' : [False, validate_bool],
'patch.facecolor': ['C0', validate_color], # first color in cycle
'patch.antialiased': [True, validate_bool], # antialiased (no jaggies)
## hatch props
'hatch.color': ['k', validate_color],
'hatch.linewidth': [1.0, validate_float],
## Histogram properties
'hist.bins': [10, validate_hist_bins],
## Boxplot properties
'boxplot.notch': [False, validate_bool],
'boxplot.vertical': [True, validate_bool],
'boxplot.whiskers': [1.5, validate_whiskers],
'boxplot.bootstrap': [None, validate_int_or_None],
'boxplot.patchartist': [False, validate_bool],
'boxplot.showmeans': [False, validate_bool],
'boxplot.showcaps': [True, validate_bool],
'boxplot.showbox': [True, validate_bool],
'boxplot.showfliers': [True, validate_bool],
'boxplot.meanline': [False, validate_bool],
'boxplot.flierprops.color': ['k', validate_color],
'boxplot.flierprops.marker': ['o', validate_string],
'boxplot.flierprops.markerfacecolor': ['none', validate_color_or_auto],
'boxplot.flierprops.markeredgecolor': ['k', validate_color],
'boxplot.flierprops.markersize': [6, validate_float],
'boxplot.flierprops.linestyle': ['none', _validate_linestyle],
'boxplot.flierprops.linewidth': [1.0, validate_float],
'boxplot.boxprops.color': ['k', validate_color],
'boxplot.boxprops.linewidth': [1.0, validate_float],
'boxplot.boxprops.linestyle': ['-', _validate_linestyle],
'boxplot.whiskerprops.color': ['k', validate_color],
'boxplot.whiskerprops.linewidth': [1.0, validate_float],
'boxplot.whiskerprops.linestyle': ['-', _validate_linestyle],
'boxplot.capprops.color': ['k', validate_color],
'boxplot.capprops.linewidth': [1.0, validate_float],
'boxplot.capprops.linestyle': ['-', _validate_linestyle],
'boxplot.medianprops.color': ['C1', validate_color],
'boxplot.medianprops.linewidth': [1.0, validate_float],
'boxplot.medianprops.linestyle': ['-', _validate_linestyle],
'boxplot.meanprops.color': ['C2', validate_color],
'boxplot.meanprops.marker': ['^', validate_string],
'boxplot.meanprops.markerfacecolor': ['C2', validate_color],
'boxplot.meanprops.markeredgecolor': ['C2', validate_color],
'boxplot.meanprops.markersize': [6, validate_float],
'boxplot.meanprops.linestyle': ['--', _validate_linestyle],
'boxplot.meanprops.linewidth': [1.0, validate_float],
## font props
'font.family': [['sans-serif'], validate_stringlist], # used by text object
'font.style': ['normal', validate_string],
'font.variant': ['normal', validate_string],
'font.stretch': ['normal', validate_string],
'font.weight': ['normal', validate_string],
'font.size': [10, validate_float], # Base font size in points
'font.serif': [['DejaVu Serif', 'Bitstream Vera Serif',
'Computer Modern Roman',
'New Century Schoolbook', 'Century Schoolbook L',
'Utopia', 'ITC Bookman', 'Bookman',
'Nimbus Roman No9 L', 'Times New Roman',
'Times', 'Palatino', 'Charter', 'serif'],
validate_stringlist],
'font.sans-serif': [['DejaVu Sans', 'Bitstream Vera Sans',
'Computer Modern Sans Serif',
'Lucida Grande', 'Verdana', 'Geneva', 'Lucid',
'Arial', 'Helvetica', 'Avant Garde', 'sans-serif'],
validate_stringlist],
'font.cursive': [['Apple Chancery', 'Textile', 'Zapf Chancery',
'Sand', 'Script MT', 'Felipa', 'cursive'],
validate_stringlist],
'font.fantasy': [['Comic Sans MS', 'Chicago', 'Charcoal', 'Impact'
'Western', 'Humor Sans', 'xkcd', 'fantasy'],
validate_stringlist],
'font.monospace': [['DejaVu Sans Mono', 'Bitstream Vera Sans Mono',
'Computer Modern Typewriter',
'Andale Mono', 'Nimbus Mono L', 'Courier New',
'Courier', 'Fixed', 'Terminal', 'monospace'],
validate_stringlist],
# text props
'text.color': ['k', validate_color], # black
'text.usetex': [False, validate_bool],
'text.latex.unicode': [False, validate_bool],
'text.latex.preamble': [[''], validate_stringlist],
'text.latex.preview': [False, validate_bool],
'text.dvipnghack': [None, validate_bool_maybe_none],
'text.hinting': ['auto', validate_hinting],
'text.hinting_factor': [8, validate_int],
'text.antialiased': [True, validate_bool],
'mathtext.cal': ['cursive', validate_font_properties],
'mathtext.rm': ['sans', validate_font_properties],
'mathtext.tt': ['monospace', validate_font_properties],
'mathtext.it': ['sans:italic', validate_font_properties],
'mathtext.bf': ['sans:bold', validate_font_properties],
'mathtext.sf': ['sans', validate_font_properties],
'mathtext.fontset': ['dejavusans', validate_fontset],
'mathtext.default': ['it', validate_mathtext_default],
'mathtext.fallback_to_cm': [True, validate_bool],
'image.aspect': ['equal', validate_aspect], # equal, auto, a number
'image.interpolation': ['nearest', validate_string],
'image.cmap': ['viridis', validate_string], # one of gray, jet, etc
'image.lut': [256, validate_int], # lookup table
'image.origin': ['upper', validate_string], # lookup table
'image.resample': [True, validate_bool],
# Specify whether vector graphics backends will combine all images on a
# set of axes into a single composite image
'image.composite_image': [True, validate_bool],
# contour props
'contour.negative_linestyle': ['dashed', _validate_linestyle],
'contour.corner_mask': [True, validate_bool],
# errorbar props
'errorbar.capsize': [0, validate_float],
# axes props
'axes.axisbelow': ['line', validate_axisbelow],
'axes.hold': [None, deprecate_axes_hold],
'axes.facecolor': ['w', validate_color], # background color; white
'axes.edgecolor': ['k', validate_color], # edge color; black
'axes.linewidth': [0.8, validate_float], # edge linewidth
'axes.spines.left': [True, validate_bool], # Set visibility of axes
'axes.spines.right': [True, validate_bool], # 'spines', the lines
'axes.spines.bottom': [True, validate_bool], # around the chart
'axes.spines.top': [True, validate_bool], # denoting data boundary
'axes.titlesize': ['large', validate_fontsize], # fontsize of the
# axes title
'axes.titleweight': ['normal', validate_string], # font weight of axes title
'axes.titlepad': [6.0, validate_float], # pad from axes top to title in points
'axes.grid': [False, validate_bool], # display grid or not
'axes.grid.which': ['major', validate_axis_locator], # set whether the gid are by
# default draw on 'major'
# 'minor' or 'both' kind of
# axis locator
'axes.grid.axis': ['both', validate_grid_axis], # grid type.
# Can be 'x', 'y', 'both'
'axes.labelsize': ['medium', validate_fontsize], # fontsize of the
# x any y labels
'axes.labelpad': [4.0, validate_float], # space between label and axis
'axes.labelweight': ['normal', validate_string], # fontsize of the x any y labels
'axes.labelcolor': ['k', validate_color], # color of axis label
'axes.formatter.limits': [[-7, 7], validate_nseq_int(2)],
# use scientific notation if log10
# of the axis range is smaller than the
# first or larger than the second
'axes.formatter.use_locale': [False, validate_bool],
# Use the current locale to format ticks
'axes.formatter.use_mathtext': [False, validate_bool],
'axes.formatter.min_exponent': [0, validate_int], # minimum exponent to format in scientific notation
'axes.formatter.useoffset': [True, validate_bool],
'axes.formatter.offset_threshold': [4, validate_int],
'axes.unicode_minus': [True, validate_bool],
# This entry can be either a cycler object or a
# string repr of a cycler-object, which gets eval()'ed
# to create the object.
'axes.prop_cycle': [
ccycler('color',
['#1f77b4', '#ff7f0e', '#2ca02c', '#d62728',
'#9467bd', '#8c564b', '#e377c2', '#7f7f7f',
'#bcbd22', '#17becf']),
validate_cycler],
# If 'data', axes limits are set close to the data.
# If 'round_numbers' axes limits are set to the nearest round numbers.
'axes.autolimit_mode': [
'data',
ValidateInStrings('autolimit_mode', ['data', 'round_numbers'])],
'axes.xmargin': [0.05, ValidateInterval(0, 1,
closedmin=True,
closedmax=True)], # margin added to xaxis
'axes.ymargin': [0.05, ValidateInterval(0, 1,
closedmin=True,
closedmax=True)],# margin added to yaxis
'polaraxes.grid': [True, validate_bool], # display polar grid or
# not
'axes3d.grid': [True, validate_bool], # display 3d grid
# scatter props
'scatter.marker': ['o', validate_string],
# TODO validate that these are valid datetime format strings
'date.autoformatter.year': ['%Y', validate_string],
'date.autoformatter.month': ['%Y-%m', validate_string],
'date.autoformatter.day': ['%Y-%m-%d', validate_string],
'date.autoformatter.hour': ['%m-%d %H', validate_string],
'date.autoformatter.minute': ['%d %H:%M', validate_string],
'date.autoformatter.second': ['%H:%M:%S', validate_string],
'date.autoformatter.microsecond': ['%M:%S.%f', validate_string],
#legend properties
'legend.fancybox': [True, validate_bool],
'legend.loc': ['best', validate_legend_loc],
# the number of points in the legend line
'legend.numpoints': [1, validate_int],
# the number of points in the legend line for scatter
'legend.scatterpoints': [1, validate_int],
'legend.fontsize': ['medium', validate_fontsize],
# the relative size of legend markers vs. original
'legend.markerscale': [1.0, validate_float],
'legend.shadow': [False, validate_bool],
# whether or not to draw a frame around legend
'legend.frameon': [True, validate_bool],
# alpha value of the legend frame
'legend.framealpha': [0.8, validate_float_or_None],
## the following dimensions are in fraction of the font size
'legend.borderpad': [0.4, validate_float], # units are fontsize
# the vertical space between the legend entries
'legend.labelspacing': [0.5, validate_float],
# the length of the legend lines
'legend.handlelength': [2., validate_float],
# the length of the legend lines
'legend.handleheight': [0.7, validate_float],
# the space between the legend line and legend text
'legend.handletextpad': [.8, validate_float],
# the border between the axes and legend edge
'legend.borderaxespad': [0.5, validate_float],
# the border between the axes and legend edge
'legend.columnspacing': [2., validate_float],
'legend.facecolor': ['inherit', validate_color_or_inherit],
'legend.edgecolor': ['0.8', validate_color_or_inherit],
# tick properties
'xtick.top': [False, validate_bool], # draw ticks on the top side
'xtick.bottom': [True, validate_bool], # draw ticks on the bottom side
'xtick.labeltop': [False, validate_bool], # draw label on the top
'xtick.labelbottom': [True, validate_bool], # draw label on the bottom
'xtick.major.size': [3.5, validate_float], # major xtick size in points
'xtick.minor.size': [2, validate_float], # minor xtick size in points
'xtick.major.width': [0.8, validate_float], # major xtick width in points
'xtick.minor.width': [0.6, validate_float], # minor xtick width in points
'xtick.major.pad': [3.5, validate_float], # distance to label in points
'xtick.minor.pad': [3.4, validate_float], # distance to label in points
'xtick.color': ['k', validate_color], # color of the xtick labels
'xtick.minor.visible': [False, validate_bool], # visibility of the x axis minor ticks
'xtick.minor.top': [True, validate_bool], # draw x axis top minor ticks
'xtick.minor.bottom': [True, validate_bool], # draw x axis bottom minor ticks
'xtick.major.top': [True, validate_bool], # draw x axis top major ticks
'xtick.major.bottom': [True, validate_bool], # draw x axis bottom major ticks
# fontsize of the xtick labels
'xtick.labelsize': ['medium', validate_fontsize],
'xtick.direction': ['out', validate_string], # direction of xticks
'xtick.alignment': ["center", _validate_alignment],
'ytick.left': [True, validate_bool], # draw ticks on the left side
'ytick.right': [False, validate_bool], # draw ticks on the right side
'ytick.labelleft': [True, validate_bool], # draw tick labels on the left side
'ytick.labelright': [False, validate_bool], # draw tick labels on the right side
'ytick.major.size': [3.5, validate_float], # major ytick size in points
'ytick.minor.size': [2, validate_float], # minor ytick size in points
'ytick.major.width': [0.8, validate_float], # major ytick width in points
'ytick.minor.width': [0.6, validate_float], # minor ytick width in points
'ytick.major.pad': [3.5, validate_float], # distance to label in points
'ytick.minor.pad': [3.4, validate_float], # distance to label in points
'ytick.color': ['k', validate_color], # color of the ytick labels
'ytick.minor.visible': [False, validate_bool], # visibility of the y axis minor ticks
'ytick.minor.left': [True, validate_bool], # draw y axis left minor ticks
'ytick.minor.right': [True, validate_bool], # draw y axis right minor ticks
'ytick.major.left': [True, validate_bool], # draw y axis left major ticks
'ytick.major.right': [True, validate_bool], # draw y axis right major ticks
# fontsize of the ytick labels
'ytick.labelsize': ['medium', validate_fontsize],
'ytick.direction': ['out', validate_string], # direction of yticks
'ytick.alignment': ["center_baseline", _validate_alignment],
'grid.color': ['#b0b0b0', validate_color], # grid color
'grid.linestyle': ['-', _validate_linestyle], # solid
'grid.linewidth': [0.8, validate_float], # in points
'grid.alpha': [1.0, validate_float],
## figure props
# figure title
'figure.titlesize': ['large', validate_fontsize],
'figure.titleweight': ['normal', validate_string],
# figure size in inches: width by height
'figure.figsize': [[6.4, 4.8], validate_nseq_float(2)],
'figure.dpi': [100, validate_float], # DPI
'figure.facecolor': ['w', validate_color], # facecolor; white
'figure.edgecolor': ['w', validate_color], # edgecolor; white
'figure.frameon': [True, validate_bool],
'figure.autolayout': [False, validate_bool],
'figure.max_open_warning': [20, validate_int],
'figure.subplot.left': [0.125, ValidateInterval(0, 1, closedmin=True,
closedmax=True)],
'figure.subplot.right': [0.9, ValidateInterval(0, 1, closedmin=True,
closedmax=True)],
'figure.subplot.bottom': [0.11, ValidateInterval(0, 1, closedmin=True,
closedmax=True)],
'figure.subplot.top': [0.88, ValidateInterval(0, 1, closedmin=True,
closedmax=True)],
'figure.subplot.wspace': [0.2, ValidateInterval(0, 1, closedmin=True,
closedmax=False)],
'figure.subplot.hspace': [0.2, ValidateInterval(0, 1, closedmin=True,
closedmax=False)],
# do constrained_layout.
'figure.constrained_layout.use': [False, validate_bool],
# wspace and hspace are fraction of adjacent subplots to use
# for space. Much smaller than above because we don't need
# room for the text.
'figure.constrained_layout.hspace': [0.02, ValidateInterval(
0, 1, closedmin=True, closedmax=False)],
'figure.constrained_layout.wspace': [0.02, ValidateInterval(
0, 1, closedmin=True, closedmax=False)],
# This is a buffer around the axes in inches. This is 3pts.
'figure.constrained_layout.h_pad': [0.04167, validate_float],
'figure.constrained_layout.w_pad': [0.04167, validate_float],
## Saving figure's properties
'savefig.dpi': ['figure', validate_dpi], # DPI
'savefig.facecolor': ['w', validate_color], # facecolor; white
'savefig.edgecolor': ['w', validate_color], # edgecolor; white
'savefig.frameon': [True, validate_bool],
'savefig.orientation': ['portrait', validate_orientation], # edgecolor;
#white
'savefig.jpeg_quality': [95, validate_int],
# value checked by backend at runtime
'savefig.format': ['png', update_savefig_format],
# options are 'tight', or 'standard'. 'standard' validates to None.
'savefig.bbox': ['standard', validate_bbox],
'savefig.pad_inches': [0.1, validate_float],
# default directory in savefig dialog box
'savefig.directory': ['~', validate_string],
'savefig.transparent': [False, validate_bool],
# Maintain shell focus for TkAgg
'tk.window_focus': [False, validate_bool],
# Set the papersize/type
'ps.papersize': ['letter', validate_ps_papersize],
'ps.useafm': [False, validate_bool], # Set PYTHONINSPECT
# use ghostscript or xpdf to distill ps output
'ps.usedistiller': [False, validate_ps_distiller],
'ps.distiller.res': [6000, validate_int], # dpi
'ps.fonttype': [3, validate_fonttype], # 3 (Type3) or 42 (Truetype)
# compression level from 0 to 9; 0 to disable
'pdf.compression': [6, validate_int],
# ignore any color-setting commands from the frontend
'pdf.inheritcolor': [False, validate_bool],
# use only the 14 PDF core fonts embedded in every PDF viewing application
'pdf.use14corefonts': [False, validate_bool],
'pdf.fonttype': [3, validate_fonttype], # 3 (Type3) or 42 (Truetype)
'pgf.debug': [False, validate_bool], # output debug information
# choose latex application for creating pdf files (xelatex/lualatex)
'pgf.texsystem': ['xelatex', validate_pgf_texsystem],
# use matplotlib rc settings for font configuration
'pgf.rcfonts': [True, validate_bool],
# provide a custom preamble for the latex process
'pgf.preamble': [[''], validate_stringlist],
# write raster image data directly into the svg file
'svg.image_inline': [True, validate_bool],
# True to save all characters as paths in the SVG
'svg.fonttype': ['path', validate_svg_fonttype],
'svg.hashsalt': [None, validate_string_or_None],
# set this when you want to generate hardcopy docstring
'docstring.hardcopy': [False, validate_bool],
# where plugin directory is locate
'plugins.directory': ['.matplotlib_plugins', validate_string],
'path.simplify': [True, validate_bool],
'path.simplify_threshold': [1.0 / 9.0, ValidateInterval(0.0, 1.0)],
'path.snap': [True, validate_bool],
'path.sketch': [None, validate_sketch],
'path.effects': [[], validate_any],
'agg.path.chunksize': [0, validate_int], # 0 to disable chunking;
# key-mappings (multi-character mappings should be a list/tuple)
'keymap.fullscreen': [('f', 'ctrl+f'), validate_stringlist],
'keymap.home': [['h', 'r', 'home'], validate_stringlist],
'keymap.back': [['left', 'c', 'backspace'], validate_stringlist],
'keymap.forward': [['right', 'v'], validate_stringlist],
'keymap.pan': [['p'], validate_stringlist],
'keymap.zoom': [['o'], validate_stringlist],
'keymap.save': [['s', 'ctrl+s'], validate_stringlist],
'keymap.quit': [['ctrl+w', 'cmd+w', 'q'], validate_stringlist],
'keymap.quit_all': [['W', 'cmd+W', 'Q'], validate_stringlist],
'keymap.grid': [['g'], validate_stringlist],
'keymap.grid_minor': [['G'], validate_stringlist],
'keymap.yscale': [['l'], validate_stringlist],
'keymap.xscale': [['k', 'L'], validate_stringlist],
'keymap.all_axes': [['a'], validate_stringlist],
# sample data
'examples.directory': ['', validate_string],
# Animation settings
'animation.html': ['none', validate_movie_html_fmt],
# Limit, in MB, of size of base64 encoded animation in HTML
# (i.e. IPython notebook)
'animation.embed_limit': [20, validate_float],
'animation.writer': ['ffmpeg', validate_movie_writer],
'animation.codec': ['h264', validate_string],
'animation.bitrate': [-1, validate_int],
# Controls image format when frames are written to disk
'animation.frame_format': ['png', validate_movie_frame_fmt],
# Additional arguments for HTML writer
'animation.html_args': [[], validate_stringlist],
# Path to ffmpeg binary. If just binary name, subprocess uses $PATH.
'animation.ffmpeg_path': ['ffmpeg', validate_animation_writer_path],
# Additional arguments for ffmpeg movie writer (using pipes)
'animation.ffmpeg_args': [[], validate_stringlist],
# Path to AVConv binary. If just binary name, subprocess uses $PATH.
'animation.avconv_path': ['avconv', validate_animation_writer_path],
# Additional arguments for avconv movie writer (using pipes)
'animation.avconv_args': [[], validate_stringlist],
# Path to convert binary. If just binary name, subprocess uses $PATH.
'animation.convert_path': ['convert', validate_animation_writer_path],
# Additional arguments for convert movie writer (using pipes)
'animation.convert_args': [[], validate_stringlist],
# Classic (pre 2.0) compatibility mode
# This is used for things that are hard to make backward compatible
# with a sane rcParam alone. This does *not* turn on classic mode
# altogether. For that use `matplotlib.style.use('classic')`.
'_internal.classic_mode': [False, validate_bool]
}
if __name__ == '__main__':
rc = defaultParams
rc['datapath'][0] = '/'
for key in rc:
if not rc[key][1](rc[key][0]) == rc[key][0]:
print("%s: %s != %s" % (key, rc[key][1](rc[key][0]), rc[key][0]))
| 57,867 | 39.326132 | 106 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/cm.py
|
"""
Builtin colormaps, colormap handling utilities, and the `ScalarMappable` mixin.
See :doc:`/gallery/color/colormap_reference` for a list of builtin colormaps.
See :doc:`/tutorials/colors/colormaps` for an in-depth discussion of colormaps.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import numpy as np
from numpy import ma
import matplotlib as mpl
import matplotlib.colors as colors
import matplotlib.cbook as cbook
from matplotlib._cm import datad
from matplotlib._cm_listed import cmaps as cmaps_listed
cmap_d = {}
# reverse all the colormaps.
# reversed colormaps have '_r' appended to the name.
def _reverser(f):
def freversed(x):
return f(1 - x)
return freversed
def revcmap(data):
"""Can only handle specification *data* in dictionary format."""
data_r = {}
for key, val in six.iteritems(data):
if callable(val):
valnew = _reverser(val)
# This doesn't work: lambda x: val(1-x)
# The same "val" (the first one) is used
# each time, so the colors are identical
# and the result is shades of gray.
else:
# Flip x and exchange the y values facing x = 0 and x = 1.
valnew = [(1.0 - x, y1, y0) for x, y0, y1 in reversed(val)]
data_r[key] = valnew
return data_r
def _reverse_cmap_spec(spec):
"""Reverses cmap specification *spec*, can handle both dict and tuple
type specs."""
if 'listed' in spec:
return {'listed': spec['listed'][::-1]}
if 'red' in spec:
return revcmap(spec)
else:
revspec = list(reversed(spec))
if len(revspec[0]) == 2: # e.g., (1, (1.0, 0.0, 1.0))
revspec = [(1.0 - a, b) for a, b in revspec]
return revspec
def _generate_cmap(name, lutsize):
"""Generates the requested cmap from its *name*. The lut size is
*lutsize*."""
spec = datad[name]
# Generate the colormap object.
if 'red' in spec:
return colors.LinearSegmentedColormap(name, spec, lutsize)
elif 'listed' in spec:
return colors.ListedColormap(spec['listed'], name)
else:
return colors.LinearSegmentedColormap.from_list(name, spec, lutsize)
LUTSIZE = mpl.rcParams['image.lut']
# Generate the reversed specifications (all at once, to avoid
# modify-when-iterating).
datad.update({cmapname + '_r': _reverse_cmap_spec(spec)
for cmapname, spec in six.iteritems(datad)})
# Precache the cmaps with ``lutsize = LUTSIZE``.
# Also add the reversed ones added in the section above:
for cmapname in datad:
cmap_d[cmapname] = _generate_cmap(cmapname, LUTSIZE)
cmap_d.update(cmaps_listed)
locals().update(cmap_d)
# Continue with definitions ...
def register_cmap(name=None, cmap=None, data=None, lut=None):
"""
Add a colormap to the set recognized by :func:`get_cmap`.
It can be used in two ways::
register_cmap(name='swirly', cmap=swirly_cmap)
register_cmap(name='choppy', data=choppydata, lut=128)
In the first case, *cmap* must be a :class:`matplotlib.colors.Colormap`
instance. The *name* is optional; if absent, the name will
be the :attr:`~matplotlib.colors.Colormap.name` attribute of the *cmap*.
In the second case, the three arguments are passed to
the :class:`~matplotlib.colors.LinearSegmentedColormap` initializer,
and the resulting colormap is registered.
"""
if name is None:
try:
name = cmap.name
except AttributeError:
raise ValueError("Arguments must include a name or a Colormap")
if not isinstance(name, six.string_types):
raise ValueError("Colormap name must be a string")
if isinstance(cmap, colors.Colormap):
cmap_d[name] = cmap
return
# For the remainder, let exceptions propagate.
if lut is None:
lut = mpl.rcParams['image.lut']
cmap = colors.LinearSegmentedColormap(name, data, lut)
cmap_d[name] = cmap
def get_cmap(name=None, lut=None):
"""
Get a colormap instance, defaulting to rc values if *name* is None.
Colormaps added with :func:`register_cmap` take precedence over
built-in colormaps.
If *name* is a :class:`matplotlib.colors.Colormap` instance, it will be
returned.
If *lut* is not None it must be an integer giving the number of
entries desired in the lookup table, and *name* must be a standard
mpl colormap name.
"""
if name is None:
name = mpl.rcParams['image.cmap']
if isinstance(name, colors.Colormap):
return name
if name in cmap_d:
if lut is None:
return cmap_d[name]
else:
return cmap_d[name]._resample(lut)
else:
raise ValueError(
"Colormap %s is not recognized. Possible values are: %s"
% (name, ', '.join(sorted(cmap_d))))
class ScalarMappable(object):
"""
This is a mixin class to support scalar data to RGBA mapping.
The ScalarMappable makes use of data normalization before returning
RGBA colors from the given colormap.
"""
def __init__(self, norm=None, cmap=None):
r"""
Parameters
----------
norm : :class:`matplotlib.colors.Normalize` instance
The normalizing object which scales data, typically into the
interval ``[0, 1]``.
If *None*, *norm* defaults to a *colors.Normalize* object which
initializes its scaling based on the first data processed.
cmap : str or :class:`~matplotlib.colors.Colormap` instance
The colormap used to map normalized data values to RGBA colors.
"""
self.callbacksSM = cbook.CallbackRegistry()
if cmap is None:
cmap = get_cmap()
if norm is None:
norm = colors.Normalize()
self._A = None
#: The Normalization instance of this ScalarMappable.
self.norm = norm
#: The Colormap instance of this ScalarMappable.
self.cmap = get_cmap(cmap)
#: The last colorbar associated with this ScalarMappable. May be None.
self.colorbar = None
self.update_dict = {'array': False}
def to_rgba(self, x, alpha=None, bytes=False, norm=True):
"""
Return a normalized rgba array corresponding to *x*.
In the normal case, *x* is a 1-D or 2-D sequence of scalars, and
the corresponding ndarray of rgba values will be returned,
based on the norm and colormap set for this ScalarMappable.
There is one special case, for handling images that are already
rgb or rgba, such as might have been read from an image file.
If *x* is an ndarray with 3 dimensions,
and the last dimension is either 3 or 4, then it will be
treated as an rgb or rgba array, and no mapping will be done.
The array can be uint8, or it can be floating point with
values in the 0-1 range; otherwise a ValueError will be raised.
If it is a masked array, the mask will be ignored.
If the last dimension is 3, the *alpha* kwarg (defaulting to 1)
will be used to fill in the transparency. If the last dimension
is 4, the *alpha* kwarg is ignored; it does not
replace the pre-existing alpha. A ValueError will be raised
if the third dimension is other than 3 or 4.
In either case, if *bytes* is *False* (default), the rgba
array will be floats in the 0-1 range; if it is *True*,
the returned rgba array will be uint8 in the 0 to 255 range.
If norm is False, no normalization of the input data is
performed, and it is assumed to be in the range (0-1).
"""
# First check for special case, image input:
try:
if x.ndim == 3:
if x.shape[2] == 3:
if alpha is None:
alpha = 1
if x.dtype == np.uint8:
alpha = np.uint8(alpha * 255)
m, n = x.shape[:2]
xx = np.empty(shape=(m, n, 4), dtype=x.dtype)
xx[:, :, :3] = x
xx[:, :, 3] = alpha
elif x.shape[2] == 4:
xx = x
else:
raise ValueError("third dimension must be 3 or 4")
if xx.dtype.kind == 'f':
if norm and xx.max() > 1 or xx.min() < 0:
raise ValueError("Floating point image RGB values "
"must be in the 0..1 range.")
if bytes:
xx = (xx * 255).astype(np.uint8)
elif xx.dtype == np.uint8:
if not bytes:
xx = xx.astype(np.float32) / 255
else:
raise ValueError("Image RGB array must be uint8 or "
"floating point; found %s" % xx.dtype)
return xx
except AttributeError:
# e.g., x is not an ndarray; so try mapping it
pass
# This is the normal case, mapping a scalar array:
x = ma.asarray(x)
if norm:
x = self.norm(x)
rgba = self.cmap(x, alpha=alpha, bytes=bytes)
return rgba
def set_array(self, A):
"""Set the image array from numpy array *A*.
.. ACCEPTS: ndarray
Parameters
----------
A : ndarray
"""
self._A = A
self.update_dict['array'] = True
def get_array(self):
'Return the array'
return self._A
def get_cmap(self):
'return the colormap'
return self.cmap
def get_clim(self):
'return the min, max of the color limits for image scaling'
return self.norm.vmin, self.norm.vmax
def set_clim(self, vmin=None, vmax=None):
"""
set the norm limits for image scaling; if *vmin* is a length2
sequence, interpret it as ``(vmin, vmax)`` which is used to
support setp
ACCEPTS: a length 2 sequence of floats; may be overridden in methods
that have ``vmin`` and ``vmax`` kwargs.
"""
if vmax is None:
try:
vmin, vmax = vmin
except (TypeError, ValueError):
pass
if vmin is not None:
self.norm.vmin = colors._sanitize_extrema(vmin)
if vmax is not None:
self.norm.vmax = colors._sanitize_extrema(vmax)
self.changed()
def set_cmap(self, cmap):
"""
set the colormap for luminance data
ACCEPTS: a colormap or registered colormap name
"""
cmap = get_cmap(cmap)
self.cmap = cmap
self.changed()
def set_norm(self, norm):
"""Set the normalization instance.
.. ACCEPTS: `.Normalize`
Parameters
----------
norm : `.Normalize`
"""
if norm is None:
norm = colors.Normalize()
self.norm = norm
self.changed()
def autoscale(self):
"""
Autoscale the scalar limits on the norm instance using the
current array
"""
if self._A is None:
raise TypeError('You must first set_array for mappable')
self.norm.autoscale(self._A)
self.changed()
def autoscale_None(self):
"""
Autoscale the scalar limits on the norm instance using the
current array, changing only limits that are None
"""
if self._A is None:
raise TypeError('You must first set_array for mappable')
self.norm.autoscale_None(self._A)
self.changed()
def add_checker(self, checker):
"""
Add an entry to a dictionary of boolean flags
that are set to True when the mappable is changed.
"""
self.update_dict[checker] = False
def check_update(self, checker):
"""
If mappable has changed since the last check,
return True; else return False
"""
if self.update_dict[checker]:
self.update_dict[checker] = False
return True
return False
def changed(self):
"""
Call this whenever the mappable is changed to notify all the
callbackSM listeners to the 'changed' signal
"""
self.callbacksSM.process('changed', self)
for key in self.update_dict:
self.update_dict[key] = True
self.stale = True
| 12,654 | 31.201018 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/hatch.py
|
"""
Contains a classes for generating hatch patterns.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange
import numpy as np
from matplotlib.path import Path
class HatchPatternBase(object):
"""
The base class for a hatch pattern.
"""
pass
class HorizontalHatch(HatchPatternBase):
def __init__(self, hatch, density):
self.num_lines = int((hatch.count('-') + hatch.count('+')) * density)
self.num_vertices = self.num_lines * 2
def set_vertices_and_codes(self, vertices, codes):
steps, stepsize = np.linspace(0.0, 1.0, self.num_lines, False,
retstep=True)
steps += stepsize / 2.
vertices[0::2, 0] = 0.0
vertices[0::2, 1] = steps
vertices[1::2, 0] = 1.0
vertices[1::2, 1] = steps
codes[0::2] = Path.MOVETO
codes[1::2] = Path.LINETO
class VerticalHatch(HatchPatternBase):
def __init__(self, hatch, density):
self.num_lines = int((hatch.count('|') + hatch.count('+')) * density)
self.num_vertices = self.num_lines * 2
def set_vertices_and_codes(self, vertices, codes):
steps, stepsize = np.linspace(0.0, 1.0, self.num_lines, False,
retstep=True)
steps += stepsize / 2.
vertices[0::2, 0] = steps
vertices[0::2, 1] = 0.0
vertices[1::2, 0] = steps
vertices[1::2, 1] = 1.0
codes[0::2] = Path.MOVETO
codes[1::2] = Path.LINETO
class NorthEastHatch(HatchPatternBase):
def __init__(self, hatch, density):
self.num_lines = int((hatch.count('/') + hatch.count('x') +
hatch.count('X')) * density)
if self.num_lines:
self.num_vertices = (self.num_lines + 1) * 2
else:
self.num_vertices = 0
def set_vertices_and_codes(self, vertices, codes):
steps = np.linspace(-0.5, 0.5, self.num_lines + 1, True)
vertices[0::2, 0] = 0.0 + steps
vertices[0::2, 1] = 0.0 - steps
vertices[1::2, 0] = 1.0 + steps
vertices[1::2, 1] = 1.0 - steps
codes[0::2] = Path.MOVETO
codes[1::2] = Path.LINETO
class SouthEastHatch(HatchPatternBase):
def __init__(self, hatch, density):
self.num_lines = int((hatch.count('\\') + hatch.count('x') +
hatch.count('X')) * density)
self.num_vertices = (self.num_lines + 1) * 2
if self.num_lines:
self.num_vertices = (self.num_lines + 1) * 2
else:
self.num_vertices = 0
def set_vertices_and_codes(self, vertices, codes):
steps = np.linspace(-0.5, 0.5, self.num_lines + 1, True)
vertices[0::2, 0] = 0.0 + steps
vertices[0::2, 1] = 1.0 + steps
vertices[1::2, 0] = 1.0 + steps
vertices[1::2, 1] = 0.0 + steps
codes[0::2] = Path.MOVETO
codes[1::2] = Path.LINETO
class Shapes(HatchPatternBase):
filled = False
def __init__(self, hatch, density):
if self.num_rows == 0:
self.num_shapes = 0
self.num_vertices = 0
else:
self.num_shapes = ((self.num_rows // 2 + 1) * (self.num_rows + 1) +
(self.num_rows // 2) * (self.num_rows))
self.num_vertices = (self.num_shapes *
len(self.shape_vertices) *
(self.filled and 1 or 2))
def set_vertices_and_codes(self, vertices, codes):
offset = 1.0 / self.num_rows
shape_vertices = self.shape_vertices * offset * self.size
if not self.filled:
inner_vertices = shape_vertices[::-1] * 0.9
shape_codes = self.shape_codes
shape_size = len(shape_vertices)
cursor = 0
for row in xrange(self.num_rows + 1):
if row % 2 == 0:
cols = np.linspace(0.0, 1.0, self.num_rows + 1, True)
else:
cols = np.linspace(offset / 2.0, 1.0 - offset / 2.0,
self.num_rows, True)
row_pos = row * offset
for col_pos in cols:
vertices[cursor:cursor + shape_size] = (shape_vertices +
(col_pos, row_pos))
codes[cursor:cursor + shape_size] = shape_codes
cursor += shape_size
if not self.filled:
vertices[cursor:cursor + shape_size] = (inner_vertices +
(col_pos, row_pos))
codes[cursor:cursor + shape_size] = shape_codes
cursor += shape_size
class Circles(Shapes):
def __init__(self, hatch, density):
path = Path.unit_circle()
self.shape_vertices = path.vertices
self.shape_codes = path.codes
Shapes.__init__(self, hatch, density)
class SmallCircles(Circles):
size = 0.2
def __init__(self, hatch, density):
self.num_rows = (hatch.count('o')) * density
Circles.__init__(self, hatch, density)
class LargeCircles(Circles):
size = 0.35
def __init__(self, hatch, density):
self.num_rows = (hatch.count('O')) * density
Circles.__init__(self, hatch, density)
class SmallFilledCircles(SmallCircles):
size = 0.1
filled = True
def __init__(self, hatch, density):
self.num_rows = (hatch.count('.')) * density
Circles.__init__(self, hatch, density)
class Stars(Shapes):
size = 1.0 / 3.0
filled = True
def __init__(self, hatch, density):
self.num_rows = (hatch.count('*')) * density
path = Path.unit_regular_star(5)
self.shape_vertices = path.vertices
self.shape_codes = np.ones(len(self.shape_vertices)) * Path.LINETO
self.shape_codes[0] = Path.MOVETO
Shapes.__init__(self, hatch, density)
_hatch_types = [
HorizontalHatch,
VerticalHatch,
NorthEastHatch,
SouthEastHatch,
SmallCircles,
LargeCircles,
SmallFilledCircles,
Stars
]
def get_path(hatchpattern, density=6):
"""
Given a hatch specifier, *hatchpattern*, generates Path to render
the hatch in a unit square. *density* is the number of lines per
unit square.
"""
density = int(density)
patterns = [hatch_type(hatchpattern, density)
for hatch_type in _hatch_types]
num_vertices = sum([pattern.num_vertices for pattern in patterns])
if num_vertices == 0:
return Path(np.empty((0, 2)))
vertices = np.empty((num_vertices, 2))
codes = np.empty((num_vertices,), np.uint8)
cursor = 0
for pattern in patterns:
if pattern.num_vertices != 0:
vertices_chunk = vertices[cursor:cursor + pattern.num_vertices]
codes_chunk = codes[cursor:cursor + pattern.num_vertices]
pattern.set_vertices_and_codes(vertices_chunk, codes_chunk)
cursor += pattern.num_vertices
return Path(vertices, codes)
| 7,140 | 31.312217 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/backend_managers.py
|
"""
`ToolManager`
Class that makes the bridge between user interaction (key press,
toolbar clicks, ..) and the actions in response to the user inputs.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import warnings
import matplotlib.cbook as cbook
import matplotlib.widgets as widgets
from matplotlib.rcsetup import validate_stringlist
import matplotlib.backend_tools as tools
class ToolEvent(object):
"""Event for tool manipulation (add/remove)"""
def __init__(self, name, sender, tool, data=None):
self.name = name
self.sender = sender
self.tool = tool
self.data = data
class ToolTriggerEvent(ToolEvent):
"""Event to inform that a tool has been triggered"""
def __init__(self, name, sender, tool, canvasevent=None, data=None):
ToolEvent.__init__(self, name, sender, tool, data)
self.canvasevent = canvasevent
class ToolManagerMessageEvent(object):
"""
Event carrying messages from toolmanager
Messages usually get displayed to the user by the toolbar
"""
def __init__(self, name, sender, message):
self.name = name
self.sender = sender
self.message = message
class ToolManager(object):
"""
Helper class that groups all the user interactions for a Figure
Attributes
----------
figure: `Figure`
keypresslock: `widgets.LockDraw`
`LockDraw` object to know if the `canvas` key_press_event is locked
messagelock: `widgets.LockDraw`
`LockDraw` object to know if the message is available to write
"""
def __init__(self, figure=None):
warnings.warn('Treat the new Tool classes introduced in v1.5 as ' +
'experimental for now, the API will likely change in ' +
'version 2.1 and perhaps the rcParam as well')
self._key_press_handler_id = None
self._tools = {}
self._keys = {}
self._toggled = {}
self._callbacks = cbook.CallbackRegistry()
# to process keypress event
self.keypresslock = widgets.LockDraw()
self.messagelock = widgets.LockDraw()
self._figure = None
self.set_figure(figure)
@property
def canvas(self):
"""Canvas managed by FigureManager"""
if not self._figure:
return None
return self._figure.canvas
@property
def figure(self):
"""Figure that holds the canvas"""
return self._figure
@figure.setter
def figure(self, figure):
self.set_figure(figure)
def set_figure(self, figure, update_tools=True):
"""
Sets the figure to interact with the tools
Parameters
==========
figure: `Figure`
update_tools: bool
Force tools to update figure
"""
if self._key_press_handler_id:
self.canvas.mpl_disconnect(self._key_press_handler_id)
self._figure = figure
if figure:
self._key_press_handler_id = self.canvas.mpl_connect(
'key_press_event', self._key_press)
if update_tools:
for tool in self._tools.values():
tool.figure = figure
def toolmanager_connect(self, s, func):
"""
Connect event with string *s* to *func*.
Parameters
----------
s : String
Name of the event
The following events are recognized
- 'tool_message_event'
- 'tool_removed_event'
- 'tool_added_event'
For every tool added a new event is created
- 'tool_trigger_TOOLNAME`
Where TOOLNAME is the id of the tool.
func : function
Function to be called with signature
def func(event)
"""
return self._callbacks.connect(s, func)
def toolmanager_disconnect(self, cid):
"""
Disconnect callback id *cid*
Example usage::
cid = toolmanager.toolmanager_connect('tool_trigger_zoom',
on_press)
#...later
toolmanager.toolmanager_disconnect(cid)
"""
return self._callbacks.disconnect(cid)
def message_event(self, message, sender=None):
""" Emit a `ToolManagerMessageEvent`"""
if sender is None:
sender = self
s = 'tool_message_event'
event = ToolManagerMessageEvent(s, sender, message)
self._callbacks.process(s, event)
@property
def active_toggle(self):
"""Currently toggled tools"""
return self._toggled
def get_tool_keymap(self, name):
"""
Get the keymap associated with the specified tool
Parameters
----------
name : string
Name of the Tool
Returns
-------
list : list of keys associated with the Tool
"""
keys = [k for k, i in six.iteritems(self._keys) if i == name]
return keys
def _remove_keys(self, name):
for k in self.get_tool_keymap(name):
del self._keys[k]
def update_keymap(self, name, *keys):
"""
Set the keymap to associate with the specified tool
Parameters
----------
name : string
Name of the Tool
keys : keys to associate with the Tool
"""
if name not in self._tools:
raise KeyError('%s not in Tools' % name)
self._remove_keys(name)
for key in keys:
for k in validate_stringlist(key):
if k in self._keys:
warnings.warn('Key %s changed from %s to %s' %
(k, self._keys[k], name))
self._keys[k] = name
def remove_tool(self, name):
"""
Remove tool from `ToolManager`
Parameters
----------
name : string
Name of the Tool
"""
tool = self.get_tool(name)
tool.destroy()
# If is a toggle tool and toggled, untoggle
if getattr(tool, 'toggled', False):
self.trigger_tool(tool, 'toolmanager')
self._remove_keys(name)
s = 'tool_removed_event'
event = ToolEvent(s, self, tool)
self._callbacks.process(s, event)
del self._tools[name]
def add_tool(self, name, tool, *args, **kwargs):
"""
Add *tool* to `ToolManager`
If successful adds a new event `tool_trigger_name` where **name** is
the **name** of the tool, this event is fired everytime
the tool is triggered.
Parameters
----------
name : str
Name of the tool, treated as the ID, has to be unique
tool : class_like, i.e. str or type
Reference to find the class of the Tool to added.
Notes
-----
args and kwargs get passed directly to the tools constructor.
See Also
--------
matplotlib.backend_tools.ToolBase : The base class for tools.
"""
tool_cls = self._get_cls_to_instantiate(tool)
if not tool_cls:
raise ValueError('Impossible to find class for %s' % str(tool))
if name in self._tools:
warnings.warn('A "Tool class" with the same name already exists, '
'not added')
return self._tools[name]
tool_obj = tool_cls(self, name, *args, **kwargs)
self._tools[name] = tool_obj
if tool_cls.default_keymap is not None:
self.update_keymap(name, tool_cls.default_keymap)
# For toggle tools init the radio_group in self._toggled
if isinstance(tool_obj, tools.ToolToggleBase):
# None group is not mutually exclusive, a set is used to keep track
# of all toggled tools in this group
if tool_obj.radio_group is None:
self._toggled.setdefault(None, set())
else:
self._toggled.setdefault(tool_obj.radio_group, None)
# If initially toggled
if tool_obj.toggled:
self._handle_toggle(tool_obj, None, None, None)
tool_obj.set_figure(self.figure)
self._tool_added_event(tool_obj)
return tool_obj
def _tool_added_event(self, tool):
s = 'tool_added_event'
event = ToolEvent(s, self, tool)
self._callbacks.process(s, event)
def _handle_toggle(self, tool, sender, canvasevent, data):
"""
Toggle tools, need to untoggle prior to using other Toggle tool
Called from trigger_tool
Parameters
----------
tool: Tool object
sender: object
Object that wishes to trigger the tool
canvasevent : Event
Original Canvas event or None
data : Object
Extra data to pass to the tool when triggering
"""
radio_group = tool.radio_group
# radio_group None is not mutually exclusive
# just keep track of toggled tools in this group
if radio_group is None:
if tool.name in self._toggled[None]:
self._toggled[None].remove(tool.name)
else:
self._toggled[None].add(tool.name)
return
# If the tool already has a toggled state, untoggle it
if self._toggled[radio_group] == tool.name:
toggled = None
# If no tool was toggled in the radio_group
# toggle it
elif self._toggled[radio_group] is None:
toggled = tool.name
# Other tool in the radio_group is toggled
else:
# Untoggle previously toggled tool
self.trigger_tool(self._toggled[radio_group],
self,
canvasevent,
data)
toggled = tool.name
# Keep track of the toggled tool in the radio_group
self._toggled[radio_group] = toggled
def _get_cls_to_instantiate(self, callback_class):
# Find the class that corresponds to the tool
if isinstance(callback_class, six.string_types):
# FIXME: make more complete searching structure
if callback_class in globals():
callback_class = globals()[callback_class]
else:
mod = 'backend_tools'
current_module = __import__(mod,
globals(), locals(), [mod], 1)
callback_class = getattr(current_module, callback_class, False)
if callable(callback_class):
return callback_class
else:
return None
def trigger_tool(self, name, sender=None, canvasevent=None,
data=None):
"""
Trigger a tool and emit the tool_trigger_[name] event
Parameters
----------
name : string
Name of the tool
sender: object
Object that wishes to trigger the tool
canvasevent : Event
Original Canvas event or None
data : Object
Extra data to pass to the tool when triggering
"""
tool = self.get_tool(name)
if tool is None:
return
if sender is None:
sender = self
self._trigger_tool(name, sender, canvasevent, data)
s = 'tool_trigger_%s' % name
event = ToolTriggerEvent(s, sender, tool, canvasevent, data)
self._callbacks.process(s, event)
def _trigger_tool(self, name, sender=None, canvasevent=None, data=None):
"""
Trigger on a tool
Method to actually trigger the tool
"""
tool = self.get_tool(name)
if isinstance(tool, tools.ToolToggleBase):
self._handle_toggle(tool, sender, canvasevent, data)
# Important!!!
# This is where the Tool object gets triggered
tool.trigger(sender, canvasevent, data)
def _key_press(self, event):
if event.key is None or self.keypresslock.locked():
return
name = self._keys.get(event.key, None)
if name is None:
return
self.trigger_tool(name, canvasevent=event)
@property
def tools(self):
"""Return the tools controlled by `ToolManager`"""
return self._tools
def get_tool(self, name, warn=True):
"""
Return the tool object, also accepts the actual tool for convenience
Parameters
----------
name : str, ToolBase
Name of the tool, or the tool itself
warn : bool, optional
If this method should give warnings.
"""
if isinstance(name, tools.ToolBase) and name.name in self._tools:
return name
if name not in self._tools:
if warn:
warnings.warn("ToolManager does not control tool %s" % name)
return None
return self._tools[name]
| 13,127 | 29.04119 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/legend_handler.py
|
"""
This module defines default legend handlers.
It is strongly encouraged to have read the :ref:`legend guide
<sphx_glr_tutorials_intermediate_legend_guide.py>` before this documentation.
Legend handlers are expected to be a callable object with a following
signature. ::
legend_handler(legend, orig_handle, fontsize, handlebox)
Where *legend* is the legend itself, *orig_handle* is the original
plot, *fontsize* is the fontsize in pixels, and *handlebox* is a
OffsetBox instance. Within the call, you should create relevant
artists (using relevant properties from the *legend* and/or
*orig_handle*) and add them into the handlebox. The artists needs to
be scaled according to the fontsize (note that the size is in pixel,
i.e., this is dpi-scaled value).
This module includes definition of several legend handler classes
derived from the base class (HandlerBase) with the following method::
def legend_artist(self, legend, orig_handle, fontsize, handlebox):
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import zip
from itertools import cycle
import numpy as np
from matplotlib.lines import Line2D
from matplotlib.patches import Rectangle
import matplotlib.collections as mcoll
import matplotlib.colors as mcolors
def update_from_first_child(tgt, src):
tgt.update_from(src.get_children()[0])
class HandlerBase(object):
"""
A Base class for default legend handlers.
The derived classes are meant to override *create_artists* method, which
has a following signature.::
def create_artists(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize,
trans):
The overridden method needs to create artists of the given
transform that fits in the given dimension (xdescent, ydescent,
width, height) that are scaled by fontsize if necessary.
"""
def __init__(self, xpad=0., ypad=0., update_func=None):
self._xpad, self._ypad = xpad, ypad
self._update_prop_func = update_func
def _update_prop(self, legend_handle, orig_handle):
if self._update_prop_func is None:
self._default_update_prop(legend_handle, orig_handle)
else:
self._update_prop_func(legend_handle, orig_handle)
def _default_update_prop(self, legend_handle, orig_handle):
legend_handle.update_from(orig_handle)
def update_prop(self, legend_handle, orig_handle, legend):
self._update_prop(legend_handle, orig_handle)
legend._set_artist_props(legend_handle)
legend_handle.set_clip_box(None)
legend_handle.set_clip_path(None)
def adjust_drawing_area(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize,
):
xdescent = xdescent - self._xpad * fontsize
ydescent = ydescent - self._ypad * fontsize
width = width - self._xpad * fontsize
height = height - self._ypad * fontsize
return xdescent, ydescent, width, height
def legend_artist(self, legend, orig_handle,
fontsize, handlebox):
"""
Return the artist that this HandlerBase generates for the given
original artist/handle.
Parameters
----------
legend : :class:`matplotlib.legend.Legend` instance
The legend for which these legend artists are being created.
orig_handle : :class:`matplotlib.artist.Artist` or similar
The object for which these legend artists are being created.
fontsize : float or int
The fontsize in pixels. The artists being created should
be scaled according to the given fontsize.
handlebox : :class:`matplotlib.offsetbox.OffsetBox` instance
The box which has been created to hold this legend entry's
artists. Artists created in the `legend_artist` method must
be added to this handlebox inside this method.
"""
xdescent, ydescent, width, height = self.adjust_drawing_area(
legend, orig_handle,
handlebox.xdescent, handlebox.ydescent,
handlebox.width, handlebox.height,
fontsize)
artists = self.create_artists(legend, orig_handle,
xdescent, ydescent, width, height,
fontsize, handlebox.get_transform())
# create_artists will return a list of artists.
for a in artists:
handlebox.add_artist(a)
# we only return the first artist
return artists[0]
def create_artists(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize,
trans):
raise NotImplementedError('Derived must override')
class HandlerNpoints(HandlerBase):
"""
A legend handler that shows *numpoints* points in the legend entry.
"""
def __init__(self, marker_pad=0.3, numpoints=None, **kw):
"""
Parameters
----------
marker_pad : float
Padding between points in legend entry.
numpoints : int
Number of points to show in legend entry.
Notes
-----
Any other keyword arguments are given to `HandlerBase`.
"""
HandlerBase.__init__(self, **kw)
self._numpoints = numpoints
self._marker_pad = marker_pad
def get_numpoints(self, legend):
if self._numpoints is None:
return legend.numpoints
else:
return self._numpoints
def get_xdata(self, legend, xdescent, ydescent, width, height, fontsize):
numpoints = self.get_numpoints(legend)
if numpoints > 1:
# we put some pad here to compensate the size of the marker
pad = self._marker_pad * fontsize
xdata = np.linspace(-xdescent + pad,
-xdescent + width - pad,
numpoints)
xdata_marker = xdata
else:
xdata = np.linspace(-xdescent, -xdescent + width, 2)
xdata_marker = [-xdescent + 0.5 * width]
return xdata, xdata_marker
class HandlerNpointsYoffsets(HandlerNpoints):
"""
A legend handler that shows *numpoints* in the legend, and allows them to
be individually offest in the y-direction.
"""
def __init__(self, numpoints=None, yoffsets=None, **kw):
"""
Parameters
----------
numpoints : int
Number of points to show in legend entry.
yoffsets : array of floats
Length *numpoints* list of y offsets for each point in
legend entry.
Notes
-----
Any other keyword arguments are given to `HandlerNpoints`.
"""
HandlerNpoints.__init__(self, numpoints=numpoints, **kw)
self._yoffsets = yoffsets
def get_ydata(self, legend, xdescent, ydescent, width, height, fontsize):
if self._yoffsets is None:
ydata = height * legend._scatteryoffsets
else:
ydata = height * np.asarray(self._yoffsets)
return ydata
class HandlerLine2D(HandlerNpoints):
"""
Handler for `.Line2D` instances.
"""
def __init__(self, marker_pad=0.3, numpoints=None, **kw):
"""
Parameters
----------
marker_pad : float
Padding between points in legend entry.
numpoints : int
Number of points to show in legend entry.
Notes
-----
Any other keyword arguments are given to `HandlerNpoints`.
"""
HandlerNpoints.__init__(self, marker_pad=marker_pad,
numpoints=numpoints, **kw)
def create_artists(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize,
trans):
xdata, xdata_marker = self.get_xdata(legend, xdescent, ydescent,
width, height, fontsize)
ydata = ((height - ydescent) / 2.) * np.ones(xdata.shape, float)
legline = Line2D(xdata, ydata)
self.update_prop(legline, orig_handle, legend)
legline.set_drawstyle('default')
legline.set_marker("")
legline_marker = Line2D(xdata_marker, ydata[:len(xdata_marker)])
self.update_prop(legline_marker, orig_handle, legend)
legline_marker.set_linestyle('None')
if legend.markerscale != 1:
newsz = legline_marker.get_markersize() * legend.markerscale
legline_marker.set_markersize(newsz)
# we don't want to add this to the return list because
# the texts and handles are assumed to be in one-to-one
# correspondence.
legline._legmarker = legline_marker
legline.set_transform(trans)
legline_marker.set_transform(trans)
return [legline, legline_marker]
class HandlerPatch(HandlerBase):
"""
Handler for `.Patch` instances.
"""
def __init__(self, patch_func=None, **kw):
"""
Parameters
----------
patch_func : callable, optional
The function that creates the legend key artist.
*patch_func* should have the signature::
def patch_func(legend=legend, orig_handle=orig_handle,
xdescent=xdescent, ydescent=ydescent,
width=width, height=height, fontsize=fontsize)
Subsequently the created artist will have its ``update_prop`` method
called and the appropriate transform will be applied.
Notes
-----
Any other keyword arguments are given to `HandlerBase`.
"""
HandlerBase.__init__(self, **kw)
self._patch_func = patch_func
def _create_patch(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize):
if self._patch_func is None:
p = Rectangle(xy=(-xdescent, -ydescent),
width=width, height=height)
else:
p = self._patch_func(legend=legend, orig_handle=orig_handle,
xdescent=xdescent, ydescent=ydescent,
width=width, height=height, fontsize=fontsize)
return p
def create_artists(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize, trans):
p = self._create_patch(legend, orig_handle,
xdescent, ydescent, width, height, fontsize)
self.update_prop(p, orig_handle, legend)
p.set_transform(trans)
return [p]
class HandlerLineCollection(HandlerLine2D):
"""
Handler for `.LineCollection` instances.
"""
def get_numpoints(self, legend):
if self._numpoints is None:
return legend.scatterpoints
else:
return self._numpoints
def _default_update_prop(self, legend_handle, orig_handle):
lw = orig_handle.get_linewidths()[0]
dashes = orig_handle._us_linestyles[0]
color = orig_handle.get_colors()[0]
legend_handle.set_color(color)
legend_handle.set_linestyle(dashes)
legend_handle.set_linewidth(lw)
def create_artists(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize, trans):
xdata, xdata_marker = self.get_xdata(legend, xdescent, ydescent,
width, height, fontsize)
ydata = ((height - ydescent) / 2.) * np.ones(xdata.shape, float)
legline = Line2D(xdata, ydata)
self.update_prop(legline, orig_handle, legend)
legline.set_transform(trans)
return [legline]
class HandlerRegularPolyCollection(HandlerNpointsYoffsets):
"""
Handler for `.RegularPolyCollections`.
"""
def __init__(self, yoffsets=None, sizes=None, **kw):
HandlerNpointsYoffsets.__init__(self, yoffsets=yoffsets, **kw)
self._sizes = sizes
def get_numpoints(self, legend):
if self._numpoints is None:
return legend.scatterpoints
else:
return self._numpoints
def get_sizes(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize):
if self._sizes is None:
handle_sizes = orig_handle.get_sizes()
if not len(handle_sizes):
handle_sizes = [1]
size_max = max(handle_sizes) * legend.markerscale ** 2
size_min = min(handle_sizes) * legend.markerscale ** 2
numpoints = self.get_numpoints(legend)
if numpoints < 4:
sizes = [.5 * (size_max + size_min), size_max,
size_min][:numpoints]
else:
rng = (size_max - size_min)
sizes = rng * np.linspace(0, 1, numpoints) + size_min
else:
sizes = self._sizes
return sizes
def update_prop(self, legend_handle, orig_handle, legend):
self._update_prop(legend_handle, orig_handle)
legend_handle.set_figure(legend.figure)
#legend._set_artist_props(legend_handle)
legend_handle.set_clip_box(None)
legend_handle.set_clip_path(None)
def create_collection(self, orig_handle, sizes, offsets, transOffset):
p = type(orig_handle)(orig_handle.get_numsides(),
rotation=orig_handle.get_rotation(),
sizes=sizes,
offsets=offsets,
transOffset=transOffset,
)
return p
def create_artists(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize,
trans):
xdata, xdata_marker = self.get_xdata(legend, xdescent, ydescent,
width, height, fontsize)
ydata = self.get_ydata(legend, xdescent, ydescent,
width, height, fontsize)
sizes = self.get_sizes(legend, orig_handle, xdescent, ydescent,
width, height, fontsize)
p = self.create_collection(orig_handle, sizes,
offsets=list(zip(xdata_marker, ydata)),
transOffset=trans)
self.update_prop(p, orig_handle, legend)
p._transOffset = trans
return [p]
class HandlerPathCollection(HandlerRegularPolyCollection):
"""
Handler for `.PathCollections`, which are used by `~.Axes.scatter`.
"""
def create_collection(self, orig_handle, sizes, offsets, transOffset):
p = type(orig_handle)([orig_handle.get_paths()[0]],
sizes=sizes,
offsets=offsets,
transOffset=transOffset,
)
return p
class HandlerCircleCollection(HandlerRegularPolyCollection):
"""
Handler for `.CircleCollections`.
"""
def create_collection(self, orig_handle, sizes, offsets, transOffset):
p = type(orig_handle)(sizes,
offsets=offsets,
transOffset=transOffset,
)
return p
class HandlerErrorbar(HandlerLine2D):
"""
Handler for Errorbars.
"""
def __init__(self, xerr_size=0.5, yerr_size=None,
marker_pad=0.3, numpoints=None, **kw):
self._xerr_size = xerr_size
self._yerr_size = yerr_size
HandlerLine2D.__init__(self, marker_pad=marker_pad,
numpoints=numpoints, **kw)
def get_err_size(self, legend, xdescent, ydescent,
width, height, fontsize):
xerr_size = self._xerr_size * fontsize
if self._yerr_size is None:
yerr_size = xerr_size
else:
yerr_size = self._yerr_size * fontsize
return xerr_size, yerr_size
def create_artists(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize,
trans):
plotlines, caplines, barlinecols = orig_handle
xdata, xdata_marker = self.get_xdata(legend, xdescent, ydescent,
width, height, fontsize)
ydata = ((height - ydescent) / 2.) * np.ones(xdata.shape, float)
legline = Line2D(xdata, ydata)
xdata_marker = np.asarray(xdata_marker)
ydata_marker = np.asarray(ydata[:len(xdata_marker)])
xerr_size, yerr_size = self.get_err_size(legend, xdescent, ydescent,
width, height, fontsize)
legline_marker = Line2D(xdata_marker, ydata_marker)
# when plotlines are None (only errorbars are drawn), we just
# make legline invisible.
if plotlines is None:
legline.set_visible(False)
legline_marker.set_visible(False)
else:
self.update_prop(legline, plotlines, legend)
legline.set_drawstyle('default')
legline.set_marker('None')
self.update_prop(legline_marker, plotlines, legend)
legline_marker.set_linestyle('None')
if legend.markerscale != 1:
newsz = legline_marker.get_markersize() * legend.markerscale
legline_marker.set_markersize(newsz)
handle_barlinecols = []
handle_caplines = []
if orig_handle.has_xerr:
verts = [ ((x - xerr_size, y), (x + xerr_size, y))
for x, y in zip(xdata_marker, ydata_marker)]
coll = mcoll.LineCollection(verts)
self.update_prop(coll, barlinecols[0], legend)
handle_barlinecols.append(coll)
if caplines:
capline_left = Line2D(xdata_marker - xerr_size, ydata_marker)
capline_right = Line2D(xdata_marker + xerr_size, ydata_marker)
self.update_prop(capline_left, caplines[0], legend)
self.update_prop(capline_right, caplines[0], legend)
capline_left.set_marker("|")
capline_right.set_marker("|")
handle_caplines.append(capline_left)
handle_caplines.append(capline_right)
if orig_handle.has_yerr:
verts = [ ((x, y - yerr_size), (x, y + yerr_size))
for x, y in zip(xdata_marker, ydata_marker)]
coll = mcoll.LineCollection(verts)
self.update_prop(coll, barlinecols[0], legend)
handle_barlinecols.append(coll)
if caplines:
capline_left = Line2D(xdata_marker, ydata_marker - yerr_size)
capline_right = Line2D(xdata_marker, ydata_marker + yerr_size)
self.update_prop(capline_left, caplines[0], legend)
self.update_prop(capline_right, caplines[0], legend)
capline_left.set_marker("_")
capline_right.set_marker("_")
handle_caplines.append(capline_left)
handle_caplines.append(capline_right)
artists = []
artists.extend(handle_barlinecols)
artists.extend(handle_caplines)
artists.append(legline)
artists.append(legline_marker)
for artist in artists:
artist.set_transform(trans)
return artists
class HandlerStem(HandlerNpointsYoffsets):
"""
Handler for plots produced by `~.Axes.stem`.
"""
def __init__(self, marker_pad=0.3, numpoints=None,
bottom=None, yoffsets=None, **kw):
"""
Parameters
----------
marker_pad : float
Padding between points in legend entry. Default is 0.3.
numpoints : int, optional
Number of points to show in legend entry.
bottom : float, optional
yoffsets : array of floats, optional
Length *numpoints* list of y offsets for each point in
legend entry.
Notes
-----
Any other keyword arguments are given to `HandlerNpointsYoffsets`.
"""
HandlerNpointsYoffsets.__init__(self, marker_pad=marker_pad,
numpoints=numpoints,
yoffsets=yoffsets,
**kw)
self._bottom = bottom
def get_ydata(self, legend, xdescent, ydescent, width, height, fontsize):
if self._yoffsets is None:
ydata = height * (0.5 * legend._scatteryoffsets + 0.5)
else:
ydata = height * np.asarray(self._yoffsets)
return ydata
def create_artists(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize,
trans):
markerline, stemlines, baseline = orig_handle
xdata, xdata_marker = self.get_xdata(legend, xdescent, ydescent,
width, height, fontsize)
ydata = self.get_ydata(legend, xdescent, ydescent,
width, height, fontsize)
if self._bottom is None:
bottom = 0.
else:
bottom = self._bottom
leg_markerline = Line2D(xdata_marker, ydata[:len(xdata_marker)])
self.update_prop(leg_markerline, markerline, legend)
leg_stemlines = []
for thisx, thisy in zip(xdata_marker, ydata):
l = Line2D([thisx, thisx], [bottom, thisy])
leg_stemlines.append(l)
for lm, m in zip(leg_stemlines, stemlines):
self.update_prop(lm, m, legend)
leg_baseline = Line2D([np.min(xdata), np.max(xdata)],
[bottom, bottom])
self.update_prop(leg_baseline, baseline, legend)
artists = [leg_markerline]
artists.extend(leg_stemlines)
artists.append(leg_baseline)
for artist in artists:
artist.set_transform(trans)
return artists
class HandlerTuple(HandlerBase):
"""
Handler for Tuple.
Additional kwargs are passed through to `HandlerBase`.
Parameters
----------
ndivide : int, optional
The number of sections to divide the legend area into. If None,
use the length of the input tuple. Default is 1.
pad : float, optional
If None, fall back to ``legend.borderpad`` as the default.
In units of fraction of font size. Default is None.
"""
def __init__(self, ndivide=1, pad=None, **kwargs):
self._ndivide = ndivide
self._pad = pad
HandlerBase.__init__(self, **kwargs)
def create_artists(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize,
trans):
handler_map = legend.get_legend_handler_map()
if self._ndivide is None:
ndivide = len(orig_handle)
else:
ndivide = self._ndivide
if self._pad is None:
pad = legend.borderpad * fontsize
else:
pad = self._pad * fontsize
if ndivide > 1:
width = (width - pad * (ndivide - 1)) / ndivide
xds_cycle = cycle(xdescent - (width + pad) * np.arange(ndivide))
a_list = []
for handle1 in orig_handle:
handler = legend.get_legend_handler(handler_map, handle1)
_a_list = handler.create_artists(
legend, handle1,
next(xds_cycle), ydescent, width, height, fontsize, trans)
a_list.extend(_a_list)
return a_list
class HandlerPolyCollection(HandlerBase):
"""
Handler for `.PolyCollection` used in `~.Axes.fill_between` and `~.Axes.stackplot`.
"""
def _update_prop(self, legend_handle, orig_handle):
def first_color(colors):
if colors is None:
return None
colors = mcolors.to_rgba_array(colors)
if len(colors):
return colors[0]
else:
return "none"
def get_first(prop_array):
if len(prop_array):
return prop_array[0]
else:
return None
edgecolor = getattr(orig_handle, '_original_edgecolor',
orig_handle.get_edgecolor())
legend_handle.set_edgecolor(first_color(edgecolor))
facecolor = getattr(orig_handle, '_original_facecolor',
orig_handle.get_facecolor())
legend_handle.set_facecolor(first_color(facecolor))
legend_handle.set_fill(orig_handle.get_fill())
legend_handle.set_hatch(orig_handle.get_hatch())
legend_handle.set_linewidth(get_first(orig_handle.get_linewidths()))
legend_handle.set_linestyle(get_first(orig_handle.get_linestyles()))
legend_handle.set_transform(get_first(orig_handle.get_transforms()))
legend_handle.set_figure(orig_handle.get_figure())
legend_handle.set_alpha(orig_handle.get_alpha())
def create_artists(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize, trans):
p = Rectangle(xy=(-xdescent, -ydescent),
width=width, height=height)
self.update_prop(p, orig_handle, legend)
p.set_transform(trans)
return [p]
| 25,682 | 34.134063 | 87 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/widgets.py
|
"""
GUI neutral widgets
===================
Widgets that are designed to work for any of the GUI backends.
All of these widgets require you to predefine a :class:`matplotlib.axes.Axes`
instance and pass that as the first arg. matplotlib doesn't try to
be too smart with respect to layout -- you will have to figure out how
wide and tall you want your Axes to be to accommodate your widget.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import copy
import six
from six.moves import zip
import numpy as np
from matplotlib import rcParams
from .patches import Circle, Rectangle, Ellipse
from .lines import Line2D
from .transforms import blended_transform_factory
class LockDraw(object):
"""
Some widgets, like the cursor, draw onto the canvas, and this is not
desirable under all circumstances, like when the toolbar is in
zoom-to-rect mode and drawing a rectangle. The module level "lock"
allows someone to grab the lock and prevent other widgets from
drawing. Use ``matplotlib.widgets.lock(someobj)`` to prevent
other widgets from drawing while you're interacting with the canvas.
"""
def __init__(self):
self._owner = None
def __call__(self, o):
"""reserve the lock for *o*"""
if not self.available(o):
raise ValueError('already locked')
self._owner = o
def release(self, o):
"""release the lock"""
if not self.available(o):
raise ValueError('you do not own this lock')
self._owner = None
def available(self, o):
"""drawing is available to *o*"""
return not self.locked() or self.isowner(o)
def isowner(self, o):
"""Return True if *o* owns this lock"""
return self._owner is o
def locked(self):
"""Return True if the lock is currently held by an owner"""
return self._owner is not None
class Widget(object):
"""
Abstract base class for GUI neutral widgets
"""
drawon = True
eventson = True
_active = True
def set_active(self, active):
"""Set whether the widget is active.
"""
self._active = active
def get_active(self):
"""Get whether the widget is active.
"""
return self._active
# set_active is overridden by SelectorWidgets.
active = property(get_active, lambda self, active: self.set_active(active),
doc="Is the widget active?")
def ignore(self, event):
"""Return True if event should be ignored.
This method (or a version of it) should be called at the beginning
of any event callback.
"""
return not self.active
class AxesWidget(Widget):
"""Widget that is connected to a single
:class:`~matplotlib.axes.Axes`.
To guarantee that the widget remains responsive and not garbage-collected,
a reference to the object should be maintained by the user.
This is necessary because the callback registry
maintains only weak-refs to the functions, which are member
functions of the widget. If there are no references to the widget
object it may be garbage collected which will disconnect the
callbacks.
Attributes:
*ax* : :class:`~matplotlib.axes.Axes`
The parent axes for the widget
*canvas* : :class:`~matplotlib.backend_bases.FigureCanvasBase` subclass
The parent figure canvas for the widget.
*active* : bool
If False, the widget does not respond to events.
"""
def __init__(self, ax):
self.ax = ax
self.canvas = ax.figure.canvas
self.cids = []
def connect_event(self, event, callback):
"""Connect callback with an event.
This should be used in lieu of `figure.canvas.mpl_connect` since this
function stores callback ids for later clean up.
"""
cid = self.canvas.mpl_connect(event, callback)
self.cids.append(cid)
def disconnect_events(self):
"""Disconnect all events created by this widget."""
for c in self.cids:
self.canvas.mpl_disconnect(c)
class Button(AxesWidget):
"""
A GUI neutral button.
For the button to remain responsive you must keep a reference to it.
Call :meth:`on_clicked` to connect to the button.
Attributes
----------
ax :
The :class:`matplotlib.axes.Axes` the button renders into.
label :
A :class:`matplotlib.text.Text` instance.
color :
The color of the button when not hovering.
hovercolor :
The color of the button when hovering.
"""
def __init__(self, ax, label, image=None,
color='0.85', hovercolor='0.95'):
"""
Parameters
----------
ax : matplotlib.axes.Axes
The :class:`matplotlib.axes.Axes` instance the button
will be placed into.
label : str
The button text. Accepts string.
image : array, mpl image, Pillow Image
The image to place in the button, if not *None*.
Can be any legal arg to imshow (numpy array,
matplotlib Image instance, or Pillow Image).
color : color
The color of the button when not activated
hovercolor : color
The color of the button when the mouse is over it
"""
AxesWidget.__init__(self, ax)
if image is not None:
ax.imshow(image)
self.label = ax.text(0.5, 0.5, label,
verticalalignment='center',
horizontalalignment='center',
transform=ax.transAxes)
self.cnt = 0
self.observers = {}
self.connect_event('button_press_event', self._click)
self.connect_event('button_release_event', self._release)
self.connect_event('motion_notify_event', self._motion)
ax.set_navigate(False)
ax.set_facecolor(color)
ax.set_xticks([])
ax.set_yticks([])
self.color = color
self.hovercolor = hovercolor
self._lastcolor = color
def _click(self, event):
if self.ignore(event):
return
if event.inaxes != self.ax:
return
if not self.eventson:
return
if event.canvas.mouse_grabber != self.ax:
event.canvas.grab_mouse(self.ax)
def _release(self, event):
if self.ignore(event):
return
if event.canvas.mouse_grabber != self.ax:
return
event.canvas.release_mouse(self.ax)
if not self.eventson:
return
if event.inaxes != self.ax:
return
for cid, func in six.iteritems(self.observers):
func(event)
def _motion(self, event):
if self.ignore(event):
return
if event.inaxes == self.ax:
c = self.hovercolor
else:
c = self.color
if c != self._lastcolor:
self.ax.set_facecolor(c)
self._lastcolor = c
if self.drawon:
self.ax.figure.canvas.draw()
def on_clicked(self, func):
"""
When the button is clicked, call this *func* with event.
A connection id is returned. It can be used to disconnect
the button from its callback.
"""
cid = self.cnt
self.observers[cid] = func
self.cnt += 1
return cid
def disconnect(self, cid):
"""remove the observer with connection id *cid*"""
try:
del self.observers[cid]
except KeyError:
pass
class Slider(AxesWidget):
"""
A slider representing a floating point range.
Create a slider from *valmin* to *valmax* in axes *ax*. For the slider to
remain responsive you must maintain a reference to it. Call
:meth:`on_changed` to connect to the slider event.
Attributes
----------
val : float
Slider value.
"""
def __init__(self, ax, label, valmin, valmax, valinit=0.5, valfmt='%1.2f',
closedmin=True, closedmax=True, slidermin=None,
slidermax=None, dragging=True, valstep=None, **kwargs):
"""
Parameters
----------
ax : Axes
The Axes to put the slider in.
label : str
Slider label.
valmin : float
The minimum value of the slider.
valmax : float
The maximum value of the slider.
valinit : float, optional, default: 0.5
The slider initial position.
valfmt : str, optional, default: "%1.2f"
Used to format the slider value, fprint format string.
closedmin : bool, optional, default: True
Indicate whether the slider interval is closed on the bottom.
closedmax : bool, optional, default: True
Indicate whether the slider interval is closed on the top.
slidermin : Slider, optional, default: None
Do not allow the current slider to have a value less than
the value of the Slider `slidermin`.
slidermax : Slider, optional, default: None
Do not allow the current slider to have a value greater than
the value of the Slider `slidermax`.
dragging : bool, optional, default: True
If True the slider can be dragged by the mouse.
valstep : float, optional, default: None
If given, the slider will snap to multiples of `valstep`.
Notes
-----
Additional kwargs are passed on to ``self.poly`` which is the
:class:`~matplotlib.patches.Rectangle` that draws the slider
knob. See the :class:`~matplotlib.patches.Rectangle` documentation for
valid property names (e.g., `facecolor`, `edgecolor`, `alpha`).
"""
AxesWidget.__init__(self, ax)
if slidermin is not None and not hasattr(slidermin, 'val'):
raise ValueError("Argument slidermin ({}) has no 'val'"
.format(type(slidermin)))
if slidermax is not None and not hasattr(slidermax, 'val'):
raise ValueError("Argument slidermax ({}) has no 'val'"
.format(type(slidermax)))
self.closedmin = closedmin
self.closedmax = closedmax
self.slidermin = slidermin
self.slidermax = slidermax
self.drag_active = False
self.valmin = valmin
self.valmax = valmax
self.valstep = valstep
valinit = self._value_in_bounds(valinit)
if valinit is None:
valinit = valmin
self.val = valinit
self.valinit = valinit
self.poly = ax.axvspan(valmin, valinit, 0, 1, **kwargs)
self.vline = ax.axvline(valinit, 0, 1, color='r', lw=1)
self.valfmt = valfmt
ax.set_yticks([])
ax.set_xlim((valmin, valmax))
ax.set_xticks([])
ax.set_navigate(False)
self.connect_event('button_press_event', self._update)
self.connect_event('button_release_event', self._update)
if dragging:
self.connect_event('motion_notify_event', self._update)
self.label = ax.text(-0.02, 0.5, label, transform=ax.transAxes,
verticalalignment='center',
horizontalalignment='right')
self.valtext = ax.text(1.02, 0.5, valfmt % valinit,
transform=ax.transAxes,
verticalalignment='center',
horizontalalignment='left')
self.cnt = 0
self.observers = {}
self.set_val(valinit)
def _value_in_bounds(self, val):
""" Makes sure self.val is with given bounds."""
if self.valstep:
val = np.round((val - self.valmin)/self.valstep)*self.valstep
val += self.valmin
if val <= self.valmin:
if not self.closedmin:
return
val = self.valmin
elif val >= self.valmax:
if not self.closedmax:
return
val = self.valmax
if self.slidermin is not None and val <= self.slidermin.val:
if not self.closedmin:
return
val = self.slidermin.val
if self.slidermax is not None and val >= self.slidermax.val:
if not self.closedmax:
return
val = self.slidermax.val
return val
def _update(self, event):
"""update the slider position"""
if self.ignore(event):
return
if event.button != 1:
return
if event.name == 'button_press_event' and event.inaxes == self.ax:
self.drag_active = True
event.canvas.grab_mouse(self.ax)
if not self.drag_active:
return
elif ((event.name == 'button_release_event') or
(event.name == 'button_press_event' and
event.inaxes != self.ax)):
self.drag_active = False
event.canvas.release_mouse(self.ax)
return
val = self._value_in_bounds(event.xdata)
if (val is not None) and (val != self.val):
self.set_val(val)
def set_val(self, val):
"""
Set slider value to *val*
Parameters
----------
val : float
"""
xy = self.poly.xy
xy[2] = val, 1
xy[3] = val, 0
self.poly.xy = xy
self.valtext.set_text(self.valfmt % val)
if self.drawon:
self.ax.figure.canvas.draw_idle()
self.val = val
if not self.eventson:
return
for cid, func in six.iteritems(self.observers):
func(val)
def on_changed(self, func):
"""
When the slider value is changed call *func* with the new
slider value
Parameters
----------
func : callable
Function to call when slider is changed.
The function must accept a single float as its arguments.
Returns
-------
cid : int
Connection id (which can be used to disconnect *func*)
"""
cid = self.cnt
self.observers[cid] = func
self.cnt += 1
return cid
def disconnect(self, cid):
"""
Remove the observer with connection id *cid*
Parameters
----------
cid : int
Connection id of the observer to be removed
"""
try:
del self.observers[cid]
except KeyError:
pass
def reset(self):
"""Reset the slider to the initial value"""
if (self.val != self.valinit):
self.set_val(self.valinit)
class CheckButtons(AxesWidget):
"""
A GUI neutral set of check buttons.
For the check buttons to remain responsive you must keep a
reference to this object.
The following attributes are exposed
*ax*
The :class:`matplotlib.axes.Axes` instance the buttons are
located in
*labels*
List of :class:`matplotlib.text.Text` instances
*lines*
List of (line1, line2) tuples for the x's in the check boxes.
These lines exist for each box, but have ``set_visible(False)``
when its box is not checked.
*rectangles*
List of :class:`matplotlib.patches.Rectangle` instances
Connect to the CheckButtons with the :meth:`on_clicked` method
"""
def __init__(self, ax, labels, actives):
"""
Add check buttons to :class:`matplotlib.axes.Axes` instance *ax*
*labels*
A len(buttons) list of labels as strings
*actives*
A len(buttons) list of booleans indicating whether
the button is active
"""
AxesWidget.__init__(self, ax)
ax.set_xticks([])
ax.set_yticks([])
ax.set_navigate(False)
if len(labels) > 1:
dy = 1. / (len(labels) + 1)
ys = np.linspace(1 - dy, dy, len(labels))
else:
dy = 0.25
ys = [0.5]
cnt = 0
axcolor = ax.get_facecolor()
self.labels = []
self.lines = []
self.rectangles = []
lineparams = {'color': 'k', 'linewidth': 1.25,
'transform': ax.transAxes, 'solid_capstyle': 'butt'}
for y, label in zip(ys, labels):
t = ax.text(0.25, y, label, transform=ax.transAxes,
horizontalalignment='left',
verticalalignment='center')
w, h = dy / 2., dy / 2.
x, y = 0.05, y - h / 2.
p = Rectangle(xy=(x, y), width=w, height=h, edgecolor='black',
facecolor=axcolor, transform=ax.transAxes)
l1 = Line2D([x, x + w], [y + h, y], **lineparams)
l2 = Line2D([x, x + w], [y, y + h], **lineparams)
l1.set_visible(actives[cnt])
l2.set_visible(actives[cnt])
self.labels.append(t)
self.rectangles.append(p)
self.lines.append((l1, l2))
ax.add_patch(p)
ax.add_line(l1)
ax.add_line(l2)
cnt += 1
self.connect_event('button_press_event', self._clicked)
self.cnt = 0
self.observers = {}
def _clicked(self, event):
if self.ignore(event):
return
if event.button != 1:
return
if event.inaxes != self.ax:
return
for i, (p, t) in enumerate(zip(self.rectangles, self.labels)):
if (t.get_window_extent().contains(event.x, event.y) or
p.get_window_extent().contains(event.x, event.y)):
self.set_active(i)
break
else:
return
def set_active(self, index):
"""
Directly (de)activate a check button by index.
*index* is an index into the original label list
that this object was constructed with.
Raises ValueError if *index* is invalid.
Callbacks will be triggered if :attr:`eventson` is True.
"""
if 0 > index >= len(self.labels):
raise ValueError("Invalid CheckButton index: %d" % index)
l1, l2 = self.lines[index]
l1.set_visible(not l1.get_visible())
l2.set_visible(not l2.get_visible())
if self.drawon:
self.ax.figure.canvas.draw()
if not self.eventson:
return
for cid, func in six.iteritems(self.observers):
func(self.labels[index].get_text())
def get_status(self):
"""
returns a tuple of the status (True/False) of all of the check buttons
"""
return [l1.get_visible() for (l1, l2) in self.lines]
def on_clicked(self, func):
"""
When the button is clicked, call *func* with button label
A connection id is returned which can be used to disconnect
"""
cid = self.cnt
self.observers[cid] = func
self.cnt += 1
return cid
def disconnect(self, cid):
"""remove the observer with connection id *cid*"""
try:
del self.observers[cid]
except KeyError:
pass
class TextBox(AxesWidget):
"""
A GUI neutral text input box.
For the text box to remain responsive you must keep a reference to it.
The following attributes are accessible:
*ax*
The :class:`matplotlib.axes.Axes` the button renders into.
*label*
A :class:`matplotlib.text.Text` instance.
*color*
The color of the text box when not hovering.
*hovercolor*
The color of the text box when hovering.
Call :meth:`on_text_change` to be updated whenever the text changes.
Call :meth:`on_submit` to be updated whenever the user hits enter or
leaves the text entry field.
"""
def __init__(self, ax, label, initial='',
color='.95', hovercolor='1', label_pad=.01):
"""
Parameters
----------
ax : matplotlib.axes.Axes
The :class:`matplotlib.axes.Axes` instance the button
will be placed into.
label : str
Label for this text box. Accepts string.
initial : str
Initial value in the text box
color : color
The color of the box
hovercolor : color
The color of the box when the mouse is over it
label_pad : float
the distance between the label and the right side of the textbox
"""
AxesWidget.__init__(self, ax)
self.DIST_FROM_LEFT = .05
self.params_to_disable = [key for key in rcParams if u'keymap' in key]
self.text = initial
self.label = ax.text(-label_pad, 0.5, label,
verticalalignment='center',
horizontalalignment='right',
transform=ax.transAxes)
self.text_disp = self._make_text_disp(self.text)
self.cnt = 0
self.change_observers = {}
self.submit_observers = {}
# If these lines are removed, the cursor won't appear the first
# time the box is clicked:
self.ax.set_xlim(0, 1)
self.ax.set_ylim(0, 1)
self.cursor_index = 0
# Because this is initialized, _render_cursor
# can assume that cursor exists.
self.cursor = self.ax.vlines(0, 0, 0)
self.cursor.set_visible(False)
self.connect_event('button_press_event', self._click)
self.connect_event('button_release_event', self._release)
self.connect_event('motion_notify_event', self._motion)
self.connect_event('key_press_event', self._keypress)
self.connect_event('resize_event', self._resize)
ax.set_navigate(False)
ax.set_facecolor(color)
ax.set_xticks([])
ax.set_yticks([])
self.color = color
self.hovercolor = hovercolor
self._lastcolor = color
self.capturekeystrokes = False
def _make_text_disp(self, string):
return self.ax.text(self.DIST_FROM_LEFT, 0.5, string,
verticalalignment='center',
horizontalalignment='left',
transform=self.ax.transAxes)
def _rendercursor(self):
# this is a hack to figure out where the cursor should go.
# we draw the text up to where the cursor should go, measure
# and save its dimensions, draw the real text, then put the cursor
# at the saved dimensions
widthtext = self.text[:self.cursor_index]
no_text = False
if(widthtext == "" or widthtext == " " or widthtext == " "):
no_text = widthtext == ""
widthtext = ","
wt_disp = self._make_text_disp(widthtext)
self.ax.figure.canvas.draw()
bb = wt_disp.get_window_extent()
inv = self.ax.transData.inverted()
bb = inv.transform(bb)
wt_disp.set_visible(False)
if no_text:
bb[1, 0] = bb[0, 0]
# hack done
self.cursor.set_visible(False)
self.cursor = self.ax.vlines(bb[1, 0], bb[0, 1], bb[1, 1])
self.ax.figure.canvas.draw()
def _notify_submit_observers(self):
for cid, func in six.iteritems(self.submit_observers):
func(self.text)
def _release(self, event):
if self.ignore(event):
return
if event.canvas.mouse_grabber != self.ax:
return
event.canvas.release_mouse(self.ax)
def _keypress(self, event):
if self.ignore(event):
return
if self.capturekeystrokes:
key = event.key
if(len(key) == 1):
self.text = (self.text[:self.cursor_index] + key +
self.text[self.cursor_index:])
self.cursor_index += 1
elif key == "right":
if self.cursor_index != len(self.text):
self.cursor_index += 1
elif key == "left":
if self.cursor_index != 0:
self.cursor_index -= 1
elif key == "home":
self.cursor_index = 0
elif key == "end":
self.cursor_index = len(self.text)
elif(key == "backspace"):
if self.cursor_index != 0:
self.text = (self.text[:self.cursor_index - 1] +
self.text[self.cursor_index:])
self.cursor_index -= 1
elif(key == "delete"):
if self.cursor_index != len(self.text):
self.text = (self.text[:self.cursor_index] +
self.text[self.cursor_index + 1:])
self.text_disp.remove()
self.text_disp = self._make_text_disp(self.text)
self._rendercursor()
self._notify_change_observers()
if key == "enter":
self._notify_submit_observers()
def set_val(self, val):
newval = str(val)
if self.text == newval:
return
self.text = newval
self.text_disp.remove()
self.text_disp = self._make_text_disp(self.text)
self._rendercursor()
self._notify_change_observers()
self._notify_submit_observers()
def _notify_change_observers(self):
for cid, func in six.iteritems(self.change_observers):
func(self.text)
def begin_typing(self, x):
self.capturekeystrokes = True
# disable command keys so that the user can type without
# command keys causing figure to be saved, etc
self.reset_params = {}
for key in self.params_to_disable:
self.reset_params[key] = rcParams[key]
rcParams[key] = []
def stop_typing(self):
notifysubmit = False
# because _notify_submit_users might throw an error in the
# user's code, we only want to call it once we've already done
# our cleanup.
if self.capturekeystrokes:
# since the user is no longer typing,
# reactivate the standard command keys
for key in self.params_to_disable:
rcParams[key] = self.reset_params[key]
notifysubmit = True
self.capturekeystrokes = False
self.cursor.set_visible(False)
self.ax.figure.canvas.draw()
if notifysubmit:
self._notify_submit_observers()
def position_cursor(self, x):
# now, we have to figure out where the cursor goes.
# approximate it based on assuming all characters the same length
if len(self.text) == 0:
self.cursor_index = 0
else:
bb = self.text_disp.get_window_extent()
trans = self.ax.transData
inv = self.ax.transData.inverted()
bb = trans.transform(inv.transform(bb))
text_start = bb[0, 0]
text_end = bb[1, 0]
ratio = (x - text_start) / (text_end - text_start)
if ratio < 0:
ratio = 0
if ratio > 1:
ratio = 1
self.cursor_index = int(len(self.text) * ratio)
self._rendercursor()
def _click(self, event):
if self.ignore(event):
return
if event.inaxes != self.ax:
self.stop_typing()
return
if not self.eventson:
return
if event.canvas.mouse_grabber != self.ax:
event.canvas.grab_mouse(self.ax)
if not self.capturekeystrokes:
self.begin_typing(event.x)
self.position_cursor(event.x)
def _resize(self, event):
self.stop_typing()
def _motion(self, event):
if self.ignore(event):
return
if event.inaxes == self.ax:
c = self.hovercolor
else:
c = self.color
if c != self._lastcolor:
self.ax.set_facecolor(c)
self._lastcolor = c
if self.drawon:
self.ax.figure.canvas.draw()
def on_text_change(self, func):
"""
When the text changes, call this *func* with event.
A connection id is returned which can be used to disconnect.
"""
cid = self.cnt
self.change_observers[cid] = func
self.cnt += 1
return cid
def on_submit(self, func):
"""
When the user hits enter or leaves the submision box, call this
*func* with event.
A connection id is returned which can be used to disconnect.
"""
cid = self.cnt
self.submit_observers[cid] = func
self.cnt += 1
return cid
def disconnect(self, cid):
"""remove the observer with connection id *cid*"""
for reg in (self.change_observers, self.submit_observers):
try:
del reg[cid]
except KeyError:
pass
class RadioButtons(AxesWidget):
"""
A GUI neutral radio button.
For the buttons to remain responsive
you must keep a reference to this object.
The following attributes are exposed:
*ax*
The :class:`matplotlib.axes.Axes` instance the buttons are in
*activecolor*
The color of the button when clicked
*labels*
A list of :class:`matplotlib.text.Text` instances
*circles*
A list of :class:`matplotlib.patches.Circle` instances
*value_selected*
A string listing the current value selected
Connect to the RadioButtons with the :meth:`on_clicked` method
"""
def __init__(self, ax, labels, active=0, activecolor='blue'):
"""
Add radio buttons to :class:`matplotlib.axes.Axes` instance *ax*
*labels*
A len(buttons) list of labels as strings
*active*
The index into labels for the button that is active
*activecolor*
The color of the button when clicked
"""
AxesWidget.__init__(self, ax)
self.activecolor = activecolor
self.value_selected = None
ax.set_xticks([])
ax.set_yticks([])
ax.set_navigate(False)
dy = 1. / (len(labels) + 1)
ys = np.linspace(1 - dy, dy, len(labels))
cnt = 0
axcolor = ax.get_facecolor()
self.labels = []
self.circles = []
for y, label in zip(ys, labels):
t = ax.text(0.25, y, label, transform=ax.transAxes,
horizontalalignment='left',
verticalalignment='center')
if cnt == active:
self.value_selected = label
facecolor = activecolor
else:
facecolor = axcolor
p = Circle(xy=(0.15, y), radius=0.05, edgecolor='black',
facecolor=facecolor, transform=ax.transAxes)
self.labels.append(t)
self.circles.append(p)
ax.add_patch(p)
cnt += 1
self.connect_event('button_press_event', self._clicked)
self.cnt = 0
self.observers = {}
def _clicked(self, event):
if self.ignore(event):
return
if event.button != 1:
return
if event.inaxes != self.ax:
return
xy = self.ax.transAxes.inverted().transform_point((event.x, event.y))
pclicked = np.array([xy[0], xy[1]])
def inside(p):
pcirc = np.array([p.center[0], p.center[1]])
d = pclicked - pcirc
return np.sqrt(np.dot(d, d)) < p.radius
for i, (p, t) in enumerate(zip(self.circles, self.labels)):
if t.get_window_extent().contains(event.x, event.y) or inside(p):
self.set_active(i)
break
else:
return
def set_active(self, index):
"""
Trigger which radio button to make active.
*index* is an index into the original label list
that this object was constructed with.
Raise ValueError if the index is invalid.
Callbacks will be triggered if :attr:`eventson` is True.
"""
if 0 > index >= len(self.labels):
raise ValueError("Invalid RadioButton index: %d" % index)
self.value_selected = self.labels[index].get_text()
for i, p in enumerate(self.circles):
if i == index:
color = self.activecolor
else:
color = self.ax.get_facecolor()
p.set_facecolor(color)
if self.drawon:
self.ax.figure.canvas.draw()
if not self.eventson:
return
for cid, func in six.iteritems(self.observers):
func(self.labels[index].get_text())
def on_clicked(self, func):
"""
When the button is clicked, call *func* with button label
A connection id is returned which can be used to disconnect
"""
cid = self.cnt
self.observers[cid] = func
self.cnt += 1
return cid
def disconnect(self, cid):
"""remove the observer with connection id *cid*"""
try:
del self.observers[cid]
except KeyError:
pass
class SubplotTool(Widget):
"""
A tool to adjust the subplot params of a :class:`matplotlib.figure.Figure`.
"""
def __init__(self, targetfig, toolfig):
"""
*targetfig*
The figure instance to adjust.
*toolfig*
The figure instance to embed the subplot tool into. If
*None*, a default figure will be created. If you are using
this from the GUI
"""
# FIXME: The docstring seems to just abruptly end without...
self.targetfig = targetfig
toolfig.subplots_adjust(left=0.2, right=0.9)
class toolbarfmt:
def __init__(self, slider):
self.slider = slider
def __call__(self, x, y):
fmt = '%s=%s' % (self.slider.label.get_text(),
self.slider.valfmt)
return fmt % x
self.axleft = toolfig.add_subplot(711)
self.axleft.set_title('Click on slider to adjust subplot param')
self.axleft.set_navigate(False)
self.sliderleft = Slider(self.axleft, 'left',
0, 1, targetfig.subplotpars.left,
closedmax=False)
self.sliderleft.on_changed(self.funcleft)
self.axbottom = toolfig.add_subplot(712)
self.axbottom.set_navigate(False)
self.sliderbottom = Slider(self.axbottom,
'bottom', 0, 1,
targetfig.subplotpars.bottom,
closedmax=False)
self.sliderbottom.on_changed(self.funcbottom)
self.axright = toolfig.add_subplot(713)
self.axright.set_navigate(False)
self.sliderright = Slider(self.axright, 'right', 0, 1,
targetfig.subplotpars.right,
closedmin=False)
self.sliderright.on_changed(self.funcright)
self.axtop = toolfig.add_subplot(714)
self.axtop.set_navigate(False)
self.slidertop = Slider(self.axtop, 'top', 0, 1,
targetfig.subplotpars.top,
closedmin=False)
self.slidertop.on_changed(self.functop)
self.axwspace = toolfig.add_subplot(715)
self.axwspace.set_navigate(False)
self.sliderwspace = Slider(self.axwspace, 'wspace',
0, 1, targetfig.subplotpars.wspace,
closedmax=False)
self.sliderwspace.on_changed(self.funcwspace)
self.axhspace = toolfig.add_subplot(716)
self.axhspace.set_navigate(False)
self.sliderhspace = Slider(self.axhspace, 'hspace',
0, 1, targetfig.subplotpars.hspace,
closedmax=False)
self.sliderhspace.on_changed(self.funchspace)
# constraints
self.sliderleft.slidermax = self.sliderright
self.sliderright.slidermin = self.sliderleft
self.sliderbottom.slidermax = self.slidertop
self.slidertop.slidermin = self.sliderbottom
bax = toolfig.add_axes([0.8, 0.05, 0.15, 0.075])
self.buttonreset = Button(bax, 'Reset')
sliders = (self.sliderleft, self.sliderbottom, self.sliderright,
self.slidertop, self.sliderwspace, self.sliderhspace,)
def func(event):
thisdrawon = self.drawon
self.drawon = False
# store the drawon state of each slider
bs = []
for slider in sliders:
bs.append(slider.drawon)
slider.drawon = False
# reset the slider to the initial position
for slider in sliders:
slider.reset()
# reset drawon
for slider, b in zip(sliders, bs):
slider.drawon = b
# draw the canvas
self.drawon = thisdrawon
if self.drawon:
toolfig.canvas.draw()
self.targetfig.canvas.draw()
# during reset there can be a temporary invalid state
# depending on the order of the reset so we turn off
# validation for the resetting
validate = toolfig.subplotpars.validate
toolfig.subplotpars.validate = False
self.buttonreset.on_clicked(func)
toolfig.subplotpars.validate = validate
def funcleft(self, val):
self.targetfig.subplots_adjust(left=val)
if self.drawon:
self.targetfig.canvas.draw()
def funcright(self, val):
self.targetfig.subplots_adjust(right=val)
if self.drawon:
self.targetfig.canvas.draw()
def funcbottom(self, val):
self.targetfig.subplots_adjust(bottom=val)
if self.drawon:
self.targetfig.canvas.draw()
def functop(self, val):
self.targetfig.subplots_adjust(top=val)
if self.drawon:
self.targetfig.canvas.draw()
def funcwspace(self, val):
self.targetfig.subplots_adjust(wspace=val)
if self.drawon:
self.targetfig.canvas.draw()
def funchspace(self, val):
self.targetfig.subplots_adjust(hspace=val)
if self.drawon:
self.targetfig.canvas.draw()
class Cursor(AxesWidget):
"""
A horizontal and vertical line that spans the axes and moves with
the pointer. You can turn off the hline or vline respectively with
the following attributes:
*horizOn*
Controls the visibility of the horizontal line
*vertOn*
Controls the visibility of the horizontal line
and the visibility of the cursor itself with the *visible* attribute.
For the cursor to remain responsive you must keep a reference to
it.
"""
def __init__(self, ax, horizOn=True, vertOn=True, useblit=False,
**lineprops):
"""
Add a cursor to *ax*. If ``useblit=True``, use the backend-dependent
blitting features for faster updates. *lineprops* is a dictionary of
line properties.
"""
AxesWidget.__init__(self, ax)
self.connect_event('motion_notify_event', self.onmove)
self.connect_event('draw_event', self.clear)
self.visible = True
self.horizOn = horizOn
self.vertOn = vertOn
self.useblit = useblit and self.canvas.supports_blit
if self.useblit:
lineprops['animated'] = True
self.lineh = ax.axhline(ax.get_ybound()[0], visible=False, **lineprops)
self.linev = ax.axvline(ax.get_xbound()[0], visible=False, **lineprops)
self.background = None
self.needclear = False
def clear(self, event):
"""clear the cursor"""
if self.ignore(event):
return
if self.useblit:
self.background = self.canvas.copy_from_bbox(self.ax.bbox)
self.linev.set_visible(False)
self.lineh.set_visible(False)
def onmove(self, event):
"""on mouse motion draw the cursor if visible"""
if self.ignore(event):
return
if not self.canvas.widgetlock.available(self):
return
if event.inaxes != self.ax:
self.linev.set_visible(False)
self.lineh.set_visible(False)
if self.needclear:
self.canvas.draw()
self.needclear = False
return
self.needclear = True
if not self.visible:
return
self.linev.set_xdata((event.xdata, event.xdata))
self.lineh.set_ydata((event.ydata, event.ydata))
self.linev.set_visible(self.visible and self.vertOn)
self.lineh.set_visible(self.visible and self.horizOn)
self._update()
def _update(self):
if self.useblit:
if self.background is not None:
self.canvas.restore_region(self.background)
self.ax.draw_artist(self.linev)
self.ax.draw_artist(self.lineh)
self.canvas.blit(self.ax.bbox)
else:
self.canvas.draw_idle()
return False
class MultiCursor(Widget):
"""
Provide a vertical (default) and/or horizontal line cursor shared between
multiple axes.
For the cursor to remain responsive you must keep a reference to
it.
Example usage::
from matplotlib.widgets import MultiCursor
from pylab import figure, show, np
t = np.arange(0.0, 2.0, 0.01)
s1 = np.sin(2*np.pi*t)
s2 = np.sin(4*np.pi*t)
fig = figure()
ax1 = fig.add_subplot(211)
ax1.plot(t, s1)
ax2 = fig.add_subplot(212, sharex=ax1)
ax2.plot(t, s2)
multi = MultiCursor(fig.canvas, (ax1, ax2), color='r', lw=1,
horizOn=False, vertOn=True)
show()
"""
def __init__(self, canvas, axes, useblit=True, horizOn=False, vertOn=True,
**lineprops):
self.canvas = canvas
self.axes = axes
self.horizOn = horizOn
self.vertOn = vertOn
xmin, xmax = axes[-1].get_xlim()
ymin, ymax = axes[-1].get_ylim()
xmid = 0.5 * (xmin + xmax)
ymid = 0.5 * (ymin + ymax)
self.visible = True
self.useblit = useblit and self.canvas.supports_blit
self.background = None
self.needclear = False
if self.useblit:
lineprops['animated'] = True
if vertOn:
self.vlines = [ax.axvline(xmid, visible=False, **lineprops)
for ax in axes]
else:
self.vlines = []
if horizOn:
self.hlines = [ax.axhline(ymid, visible=False, **lineprops)
for ax in axes]
else:
self.hlines = []
self.connect()
def connect(self):
"""connect events"""
self._cidmotion = self.canvas.mpl_connect('motion_notify_event',
self.onmove)
self._ciddraw = self.canvas.mpl_connect('draw_event', self.clear)
def disconnect(self):
"""disconnect events"""
self.canvas.mpl_disconnect(self._cidmotion)
self.canvas.mpl_disconnect(self._ciddraw)
def clear(self, event):
"""clear the cursor"""
if self.ignore(event):
return
if self.useblit:
self.background = (
self.canvas.copy_from_bbox(self.canvas.figure.bbox))
for line in self.vlines + self.hlines:
line.set_visible(False)
def onmove(self, event):
if self.ignore(event):
return
if event.inaxes is None:
return
if not self.canvas.widgetlock.available(self):
return
self.needclear = True
if not self.visible:
return
if self.vertOn:
for line in self.vlines:
line.set_xdata((event.xdata, event.xdata))
line.set_visible(self.visible)
if self.horizOn:
for line in self.hlines:
line.set_ydata((event.ydata, event.ydata))
line.set_visible(self.visible)
self._update()
def _update(self):
if self.useblit:
if self.background is not None:
self.canvas.restore_region(self.background)
if self.vertOn:
for ax, line in zip(self.axes, self.vlines):
ax.draw_artist(line)
if self.horizOn:
for ax, line in zip(self.axes, self.hlines):
ax.draw_artist(line)
self.canvas.blit(self.canvas.figure.bbox)
else:
self.canvas.draw_idle()
class _SelectorWidget(AxesWidget):
def __init__(self, ax, onselect, useblit=False, button=None,
state_modifier_keys=None):
AxesWidget.__init__(self, ax)
self.visible = True
self.onselect = onselect
self.useblit = useblit and self.canvas.supports_blit
self.connect_default_events()
self.state_modifier_keys = dict(move=' ', clear='escape',
square='shift', center='control')
self.state_modifier_keys.update(state_modifier_keys or {})
self.background = None
self.artists = []
if isinstance(button, int):
self.validButtons = [button]
else:
self.validButtons = button
# will save the data (position at mouseclick)
self.eventpress = None
# will save the data (pos. at mouserelease)
self.eventrelease = None
self._prev_event = None
self.state = set()
def set_active(self, active):
AxesWidget.set_active(self, active)
if active:
self.update_background(None)
def update_background(self, event):
"""force an update of the background"""
# If you add a call to `ignore` here, you'll want to check edge case:
# `release` can call a draw event even when `ignore` is True.
if self.useblit:
self.background = self.canvas.copy_from_bbox(self.ax.bbox)
def connect_default_events(self):
"""Connect the major canvas events to methods."""
self.connect_event('motion_notify_event', self.onmove)
self.connect_event('button_press_event', self.press)
self.connect_event('button_release_event', self.release)
self.connect_event('draw_event', self.update_background)
self.connect_event('key_press_event', self.on_key_press)
self.connect_event('key_release_event', self.on_key_release)
self.connect_event('scroll_event', self.on_scroll)
def ignore(self, event):
"""return *True* if *event* should be ignored"""
if not self.active or not self.ax.get_visible():
return True
# If canvas was locked
if not self.canvas.widgetlock.available(self):
return True
if not hasattr(event, 'button'):
event.button = None
# Only do rectangle selection if event was triggered
# with a desired button
if self.validButtons is not None:
if event.button not in self.validButtons:
return True
# If no button was pressed yet ignore the event if it was out
# of the axes
if self.eventpress is None:
return event.inaxes != self.ax
# If a button was pressed, check if the release-button is the
# same.
if event.button == self.eventpress.button:
return False
# If a button was pressed, check if the release-button is the
# same.
return (event.inaxes != self.ax or
event.button != self.eventpress.button)
def update(self):
"""draw using newfangled blit or oldfangled draw depending on
useblit
"""
if not self.ax.get_visible():
return False
if self.useblit:
if self.background is not None:
self.canvas.restore_region(self.background)
for artist in self.artists:
self.ax.draw_artist(artist)
self.canvas.blit(self.ax.bbox)
else:
self.canvas.draw_idle()
return False
def _get_data(self, event):
"""Get the xdata and ydata for event, with limits"""
if event.xdata is None:
return None, None
x0, x1 = self.ax.get_xbound()
y0, y1 = self.ax.get_ybound()
xdata = max(x0, event.xdata)
xdata = min(x1, xdata)
ydata = max(y0, event.ydata)
ydata = min(y1, ydata)
return xdata, ydata
def _clean_event(self, event):
"""Clean up an event
Use prev event if there is no xdata
Limit the xdata and ydata to the axes limits
Set the prev event
"""
if event.xdata is None:
event = self._prev_event
else:
event = copy.copy(event)
event.xdata, event.ydata = self._get_data(event)
self._prev_event = event
return event
def press(self, event):
"""Button press handler and validator"""
if not self.ignore(event):
event = self._clean_event(event)
self.eventpress = event
self._prev_event = event
key = event.key or ''
key = key.replace('ctrl', 'control')
# move state is locked in on a button press
if key == self.state_modifier_keys['move']:
self.state.add('move')
self._press(event)
return True
return False
def _press(self, event):
"""Button press handler"""
pass
def release(self, event):
"""Button release event handler and validator"""
if not self.ignore(event) and self.eventpress:
event = self._clean_event(event)
self.eventrelease = event
self._release(event)
self.eventpress = None
self.eventrelease = None
self.state.discard('move')
return True
return False
def _release(self, event):
"""Button release event handler"""
pass
def onmove(self, event):
"""Cursor move event handler and validator"""
if not self.ignore(event) and self.eventpress:
event = self._clean_event(event)
self._onmove(event)
return True
return False
def _onmove(self, event):
"""Cursor move event handler"""
pass
def on_scroll(self, event):
"""Mouse scroll event handler and validator"""
if not self.ignore(event):
self._on_scroll(event)
def _on_scroll(self, event):
"""Mouse scroll event handler"""
pass
def on_key_press(self, event):
"""Key press event handler and validator for all selection widgets"""
if self.active:
key = event.key or ''
key = key.replace('ctrl', 'control')
if key == self.state_modifier_keys['clear']:
for artist in self.artists:
artist.set_visible(False)
self.update()
return
for (state, modifier) in self.state_modifier_keys.items():
if modifier in key:
self.state.add(state)
self._on_key_press(event)
def _on_key_press(self, event):
"""Key press event handler - use for widget-specific key press actions.
"""
pass
def on_key_release(self, event):
"""Key release event handler and validator"""
if self.active:
key = event.key or ''
for (state, modifier) in self.state_modifier_keys.items():
if modifier in key:
self.state.discard(state)
self._on_key_release(event)
def _on_key_release(self, event):
"""Key release event handler"""
pass
def set_visible(self, visible):
""" Set the visibility of our artists """
self.visible = visible
for artist in self.artists:
artist.set_visible(visible)
class SpanSelector(_SelectorWidget):
"""
Visually select a min/max range on a single axis and call a function with
those values.
To guarantee that the selector remains responsive, keep a reference to it.
In order to turn off the SpanSelector, set `span_selector.active=False`. To
turn it back on, set `span_selector.active=True`.
Parameters
----------
ax : :class:`matplotlib.axes.Axes` object
onselect : func(min, max), min/max are floats
direction : "horizontal" or "vertical"
The axis along which to draw the span selector
minspan : float, default is None
If selection is less than *minspan*, do not call *onselect*
useblit : bool, default is False
If True, use the backend-dependent blitting features for faster
canvas updates.
rectprops : dict, default is None
Dictionary of :class:`matplotlib.patches.Patch` properties
onmove_callback : func(min, max), min/max are floats, default is None
Called on mouse move while the span is being selected
span_stays : bool, default is False
If True, the span stays visible after the mouse is released
button : int or list of ints
Determines which mouse buttons activate the span selector
1 = left mouse button\n
2 = center mouse button (scroll wheel)\n
3 = right mouse button\n
Examples
--------
>>> import matplotlib.pyplot as plt
>>> import matplotlib.widgets as mwidgets
>>> fig, ax = plt.subplots()
>>> ax.plot([1, 2, 3], [10, 50, 100])
>>> def onselect(vmin, vmax):
print(vmin, vmax)
>>> rectprops = dict(facecolor='blue', alpha=0.5)
>>> span = mwidgets.SpanSelector(ax, onselect, 'horizontal',
rectprops=rectprops)
>>> fig.show()
See also: :ref:`sphx_glr_gallery_widgets_span_selector.py`
"""
def __init__(self, ax, onselect, direction, minspan=None, useblit=False,
rectprops=None, onmove_callback=None, span_stays=False,
button=None):
_SelectorWidget.__init__(self, ax, onselect, useblit=useblit,
button=button)
if rectprops is None:
rectprops = dict(facecolor='red', alpha=0.5)
rectprops['animated'] = self.useblit
if direction not in ['horizontal', 'vertical']:
raise ValueError("direction must be 'horizontal' or 'vertical'")
self.direction = direction
self.rect = None
self.pressv = None
self.rectprops = rectprops
self.onmove_callback = onmove_callback
self.minspan = minspan
self.span_stays = span_stays
# Needed when dragging out of axes
self.prev = (0, 0)
# Reset canvas so that `new_axes` connects events.
self.canvas = None
self.new_axes(ax)
def new_axes(self, ax):
"""Set SpanSelector to operate on a new Axes"""
self.ax = ax
if self.canvas is not ax.figure.canvas:
if self.canvas is not None:
self.disconnect_events()
self.canvas = ax.figure.canvas
self.connect_default_events()
if self.direction == 'horizontal':
trans = blended_transform_factory(self.ax.transData,
self.ax.transAxes)
w, h = 0, 1
else:
trans = blended_transform_factory(self.ax.transAxes,
self.ax.transData)
w, h = 1, 0
self.rect = Rectangle((0, 0), w, h,
transform=trans,
visible=False,
**self.rectprops)
if self.span_stays:
self.stay_rect = Rectangle((0, 0), w, h,
transform=trans,
visible=False,
**self.rectprops)
self.stay_rect.set_animated(False)
self.ax.add_patch(self.stay_rect)
self.ax.add_patch(self.rect)
self.artists = [self.rect]
def ignore(self, event):
"""return *True* if *event* should be ignored"""
return _SelectorWidget.ignore(self, event) or not self.visible
def _press(self, event):
"""on button press event"""
self.rect.set_visible(self.visible)
if self.span_stays:
self.stay_rect.set_visible(False)
# really force a draw so that the stay rect is not in
# the blit background
if self.useblit:
self.canvas.draw()
xdata, ydata = self._get_data(event)
if self.direction == 'horizontal':
self.pressv = xdata
else:
self.pressv = ydata
return False
def _release(self, event):
"""on button release event"""
if self.pressv is None:
return
self.buttonDown = False
self.rect.set_visible(False)
if self.span_stays:
self.stay_rect.set_x(self.rect.get_x())
self.stay_rect.set_y(self.rect.get_y())
self.stay_rect.set_width(self.rect.get_width())
self.stay_rect.set_height(self.rect.get_height())
self.stay_rect.set_visible(True)
self.canvas.draw_idle()
vmin = self.pressv
xdata, ydata = self._get_data(event)
if self.direction == 'horizontal':
vmax = xdata or self.prev[0]
else:
vmax = ydata or self.prev[1]
if vmin > vmax:
vmin, vmax = vmax, vmin
span = vmax - vmin
if self.minspan is not None and span < self.minspan:
return
self.onselect(vmin, vmax)
self.pressv = None
return False
def _onmove(self, event):
"""on motion notify event"""
if self.pressv is None:
return
x, y = self._get_data(event)
if x is None:
return
self.prev = x, y
if self.direction == 'horizontal':
v = x
else:
v = y
minv, maxv = v, self.pressv
if minv > maxv:
minv, maxv = maxv, minv
if self.direction == 'horizontal':
self.rect.set_x(minv)
self.rect.set_width(maxv - minv)
else:
self.rect.set_y(minv)
self.rect.set_height(maxv - minv)
if self.onmove_callback is not None:
vmin = self.pressv
xdata, ydata = self._get_data(event)
if self.direction == 'horizontal':
vmax = xdata or self.prev[0]
else:
vmax = ydata or self.prev[1]
if vmin > vmax:
vmin, vmax = vmax, vmin
self.onmove_callback(vmin, vmax)
self.update()
return False
class ToolHandles(object):
"""Control handles for canvas tools.
Parameters
----------
ax : :class:`matplotlib.axes.Axes`
Matplotlib axes where tool handles are displayed.
x, y : 1D arrays
Coordinates of control handles.
marker : str
Shape of marker used to display handle. See `matplotlib.pyplot.plot`.
marker_props : dict
Additional marker properties. See :class:`matplotlib.lines.Line2D`.
"""
def __init__(self, ax, x, y, marker='o', marker_props=None, useblit=True):
self.ax = ax
props = dict(marker=marker, markersize=7, mfc='w', ls='none',
alpha=0.5, visible=False, label='_nolegend_')
props.update(marker_props if marker_props is not None else {})
self._markers = Line2D(x, y, animated=useblit, **props)
self.ax.add_line(self._markers)
self.artist = self._markers
@property
def x(self):
return self._markers.get_xdata()
@property
def y(self):
return self._markers.get_ydata()
def set_data(self, pts, y=None):
"""Set x and y positions of handles"""
if y is not None:
x = pts
pts = np.array([x, y])
self._markers.set_data(pts)
def set_visible(self, val):
self._markers.set_visible(val)
def set_animated(self, val):
self._markers.set_animated(val)
def closest(self, x, y):
"""Return index and pixel distance to closest index."""
pts = np.transpose((self.x, self.y))
# Transform data coordinates to pixel coordinates.
pts = self.ax.transData.transform(pts)
diff = pts - ((x, y))
if diff.ndim == 2:
dist = np.sqrt(np.sum(diff ** 2, axis=1))
return np.argmin(dist), np.min(dist)
else:
return 0, np.sqrt(np.sum(diff ** 2))
class RectangleSelector(_SelectorWidget):
"""
Select a rectangular region of an axes.
For the cursor to remain responsive you must keep a reference to
it.
Example usage::
from matplotlib.widgets import RectangleSelector
from pylab import *
def onselect(eclick, erelease):
'eclick and erelease are matplotlib events at press and release'
print(' startposition : (%f, %f)' % (eclick.xdata, eclick.ydata))
print(' endposition : (%f, %f)' % (erelease.xdata, erelease.ydata))
print(' used button : ', eclick.button)
def toggle_selector(event):
print(' Key pressed.')
if event.key in ['Q', 'q'] and toggle_selector.RS.active:
print(' RectangleSelector deactivated.')
toggle_selector.RS.set_active(False)
if event.key in ['A', 'a'] and not toggle_selector.RS.active:
print(' RectangleSelector activated.')
toggle_selector.RS.set_active(True)
x = arange(100)/(99.0)
y = sin(x)
fig = figure
ax = subplot(111)
ax.plot(x,y)
toggle_selector.RS = RectangleSelector(ax, onselect, drawtype='line')
connect('key_press_event', toggle_selector)
show()
"""
_shape_klass = Rectangle
def __init__(self, ax, onselect, drawtype='box',
minspanx=None, minspany=None, useblit=False,
lineprops=None, rectprops=None, spancoords='data',
button=None, maxdist=10, marker_props=None,
interactive=False, state_modifier_keys=None):
"""
Create a selector in *ax*. When a selection is made, clear
the span and call onselect with::
onselect(pos_1, pos_2)
and clear the drawn box/line. The ``pos_1`` and ``pos_2`` are
arrays of length 2 containing the x- and y-coordinate.
If *minspanx* is not *None* then events smaller than *minspanx*
in x direction are ignored (it's the same for y).
The rectangle is drawn with *rectprops*; default::
rectprops = dict(facecolor='red', edgecolor = 'black',
alpha=0.2, fill=True)
The line is drawn with *lineprops*; default::
lineprops = dict(color='black', linestyle='-',
linewidth = 2, alpha=0.5)
Use *drawtype* if you want the mouse to draw a line,
a box or nothing between click and actual position by setting
``drawtype = 'line'``, ``drawtype='box'`` or ``drawtype = 'none'``.
Drawing a line would result in a line from vertex A to vertex C in
a rectangle ABCD.
*spancoords* is one of 'data' or 'pixels'. If 'data', *minspanx*
and *minspanx* will be interpreted in the same coordinates as
the x and y axis. If 'pixels', they are in pixels.
*button* is a list of integers indicating which mouse buttons should
be used for rectangle selection. You can also specify a single
integer if only a single button is desired. Default is *None*,
which does not limit which button can be used.
Note, typically:
1 = left mouse button
2 = center mouse button (scroll wheel)
3 = right mouse button
*interactive* will draw a set of handles and allow you interact
with the widget after it is drawn.
*state_modifier_keys* are keyboard modifiers that affect the behavior
of the widget.
The defaults are:
dict(move=' ', clear='escape', square='shift', center='ctrl')
Keyboard modifiers, which:
'move': Move the existing shape.
'clear': Clear the current shape.
'square': Makes the shape square.
'center': Make the initial point the center of the shape.
'square' and 'center' can be combined.
"""
_SelectorWidget.__init__(self, ax, onselect, useblit=useblit,
button=button,
state_modifier_keys=state_modifier_keys)
self.to_draw = None
self.visible = True
self.interactive = interactive
if drawtype == 'none':
drawtype = 'line' # draw a line but make it
self.visible = False # invisible
if drawtype == 'box':
if rectprops is None:
rectprops = dict(facecolor='red', edgecolor='black',
alpha=0.2, fill=True)
rectprops['animated'] = self.useblit
self.rectprops = rectprops
self.to_draw = self._shape_klass((0, 0), 0, 1, visible=False,
**self.rectprops)
self.ax.add_patch(self.to_draw)
if drawtype == 'line':
if lineprops is None:
lineprops = dict(color='black', linestyle='-',
linewidth=2, alpha=0.5)
lineprops['animated'] = self.useblit
self.lineprops = lineprops
self.to_draw = Line2D([0, 0], [0, 0], visible=False,
**self.lineprops)
self.ax.add_line(self.to_draw)
self.minspanx = minspanx
self.minspany = minspany
if spancoords not in ('data', 'pixels'):
raise ValueError("'spancoords' must be 'data' or 'pixels'")
self.spancoords = spancoords
self.drawtype = drawtype
self.maxdist = maxdist
if rectprops is None:
props = dict(mec='r')
else:
props = dict(mec=rectprops.get('edgecolor', 'r'))
self._corner_order = ['NW', 'NE', 'SE', 'SW']
xc, yc = self.corners
self._corner_handles = ToolHandles(self.ax, xc, yc, marker_props=props,
useblit=self.useblit)
self._edge_order = ['W', 'N', 'E', 'S']
xe, ye = self.edge_centers
self._edge_handles = ToolHandles(self.ax, xe, ye, marker='s',
marker_props=props,
useblit=self.useblit)
xc, yc = self.center
self._center_handle = ToolHandles(self.ax, [xc], [yc], marker='s',
marker_props=props,
useblit=self.useblit)
self.active_handle = None
self.artists = [self.to_draw, self._center_handle.artist,
self._corner_handles.artist,
self._edge_handles.artist]
if not self.interactive:
self.artists = [self.to_draw]
self._extents_on_press = None
def _press(self, event):
"""on button press event"""
# make the drawed box/line visible get the click-coordinates,
# button, ...
if self.interactive and self.to_draw.get_visible():
self._set_active_handle(event)
else:
self.active_handle = None
if self.active_handle is None or not self.interactive:
# Clear previous rectangle before drawing new rectangle.
self.update()
self.set_visible(self.visible)
def _release(self, event):
"""on button release event"""
if not self.interactive:
self.to_draw.set_visible(False)
# update the eventpress and eventrelease with the resulting extents
x1, x2, y1, y2 = self.extents
self.eventpress.xdata = x1
self.eventpress.ydata = y1
xy1 = self.ax.transData.transform_point([x1, y1])
self.eventpress.x, self.eventpress.y = xy1
self.eventrelease.xdata = x2
self.eventrelease.ydata = y2
xy2 = self.ax.transData.transform_point([x2, y2])
self.eventrelease.x, self.eventrelease.y = xy2
if self.spancoords == 'data':
xmin, ymin = self.eventpress.xdata, self.eventpress.ydata
xmax, ymax = self.eventrelease.xdata, self.eventrelease.ydata
# calculate dimensions of box or line get values in the right
# order
elif self.spancoords == 'pixels':
xmin, ymin = self.eventpress.x, self.eventpress.y
xmax, ymax = self.eventrelease.x, self.eventrelease.y
else:
raise ValueError('spancoords must be "data" or "pixels"')
if xmin > xmax:
xmin, xmax = xmax, xmin
if ymin > ymax:
ymin, ymax = ymax, ymin
spanx = xmax - xmin
spany = ymax - ymin
xproblems = self.minspanx is not None and spanx < self.minspanx
yproblems = self.minspany is not None and spany < self.minspany
# check if drawn distance (if it exists) is not too small in
# either x or y-direction
if self.drawtype != 'none' and (xproblems or yproblems):
for artist in self.artists:
artist.set_visible(False)
self.update()
return
# call desired function
self.onselect(self.eventpress, self.eventrelease)
self.update()
return False
def _onmove(self, event):
"""on motion notify event if box/line is wanted"""
# resize an existing shape
if self.active_handle and not self.active_handle == 'C':
x1, x2, y1, y2 = self._extents_on_press
if self.active_handle in ['E', 'W'] + self._corner_order:
x2 = event.xdata
if self.active_handle in ['N', 'S'] + self._corner_order:
y2 = event.ydata
# move existing shape
elif (('move' in self.state or self.active_handle == 'C')
and self._extents_on_press is not None):
x1, x2, y1, y2 = self._extents_on_press
dx = event.xdata - self.eventpress.xdata
dy = event.ydata - self.eventpress.ydata
x1 += dx
x2 += dx
y1 += dy
y2 += dy
# new shape
else:
center = [self.eventpress.xdata, self.eventpress.ydata]
center_pix = [self.eventpress.x, self.eventpress.y]
dx = (event.xdata - center[0]) / 2.
dy = (event.ydata - center[1]) / 2.
# square shape
if 'square' in self.state:
dx_pix = abs(event.x - center_pix[0])
dy_pix = abs(event.y - center_pix[1])
if not dx_pix:
return
maxd = max(abs(dx_pix), abs(dy_pix))
if abs(dx_pix) < maxd:
dx *= maxd / (abs(dx_pix) + 1e-6)
if abs(dy_pix) < maxd:
dy *= maxd / (abs(dy_pix) + 1e-6)
# from center
if 'center' in self.state:
dx *= 2
dy *= 2
# from corner
else:
center[0] += dx
center[1] += dy
x1, x2, y1, y2 = (center[0] - dx, center[0] + dx,
center[1] - dy, center[1] + dy)
self.extents = x1, x2, y1, y2
@property
def _rect_bbox(self):
if self.drawtype == 'box':
x0 = self.to_draw.get_x()
y0 = self.to_draw.get_y()
width = self.to_draw.get_width()
height = self.to_draw.get_height()
return x0, y0, width, height
else:
x, y = self.to_draw.get_data()
x0, x1 = min(x), max(x)
y0, y1 = min(y), max(y)
return x0, y0, x1 - x0, y1 - y0
@property
def corners(self):
"""Corners of rectangle from lower left, moving clockwise."""
x0, y0, width, height = self._rect_bbox
xc = x0, x0 + width, x0 + width, x0
yc = y0, y0, y0 + height, y0 + height
return xc, yc
@property
def edge_centers(self):
"""Midpoint of rectangle edges from left, moving clockwise."""
x0, y0, width, height = self._rect_bbox
w = width / 2.
h = height / 2.
xe = x0, x0 + w, x0 + width, x0 + w
ye = y0 + h, y0, y0 + h, y0 + height
return xe, ye
@property
def center(self):
"""Center of rectangle"""
x0, y0, width, height = self._rect_bbox
return x0 + width / 2., y0 + height / 2.
@property
def extents(self):
"""Return (xmin, xmax, ymin, ymax)."""
x0, y0, width, height = self._rect_bbox
xmin, xmax = sorted([x0, x0 + width])
ymin, ymax = sorted([y0, y0 + height])
return xmin, xmax, ymin, ymax
@extents.setter
def extents(self, extents):
# Update displayed shape
self.draw_shape(extents)
# Update displayed handles
self._corner_handles.set_data(*self.corners)
self._edge_handles.set_data(*self.edge_centers)
self._center_handle.set_data(*self.center)
self.set_visible(self.visible)
self.update()
def draw_shape(self, extents):
x0, x1, y0, y1 = extents
xmin, xmax = sorted([x0, x1])
ymin, ymax = sorted([y0, y1])
xlim = sorted(self.ax.get_xlim())
ylim = sorted(self.ax.get_ylim())
xmin = max(xlim[0], xmin)
ymin = max(ylim[0], ymin)
xmax = min(xmax, xlim[1])
ymax = min(ymax, ylim[1])
if self.drawtype == 'box':
self.to_draw.set_x(xmin)
self.to_draw.set_y(ymin)
self.to_draw.set_width(xmax - xmin)
self.to_draw.set_height(ymax - ymin)
elif self.drawtype == 'line':
self.to_draw.set_data([xmin, xmax], [ymin, ymax])
def _set_active_handle(self, event):
"""Set active handle based on the location of the mouse event"""
# Note: event.xdata/ydata in data coordinates, event.x/y in pixels
c_idx, c_dist = self._corner_handles.closest(event.x, event.y)
e_idx, e_dist = self._edge_handles.closest(event.x, event.y)
m_idx, m_dist = self._center_handle.closest(event.x, event.y)
if 'move' in self.state:
self.active_handle = 'C'
self._extents_on_press = self.extents
# Set active handle as closest handle, if mouse click is close enough.
elif m_dist < self.maxdist * 2:
self.active_handle = 'C'
elif c_dist > self.maxdist and e_dist > self.maxdist:
self.active_handle = None
return
elif c_dist < e_dist:
self.active_handle = self._corner_order[c_idx]
else:
self.active_handle = self._edge_order[e_idx]
# Save coordinates of rectangle at the start of handle movement.
x1, x2, y1, y2 = self.extents
# Switch variables so that only x2 and/or y2 are updated on move.
if self.active_handle in ['W', 'SW', 'NW']:
x1, x2 = x2, event.xdata
if self.active_handle in ['N', 'NW', 'NE']:
y1, y2 = y2, event.ydata
self._extents_on_press = x1, x2, y1, y2
@property
def geometry(self):
"""
Returns numpy.ndarray of shape (2,5) containing
x (``RectangleSelector.geometry[1,:]``) and
y (``RectangleSelector.geometry[0,:]``)
coordinates of the four corners of the rectangle starting
and ending in the top left corner.
"""
if hasattr(self.to_draw, 'get_verts'):
xfm = self.ax.transData.inverted()
y, x = xfm.transform(self.to_draw.get_verts()).T
return np.array([x, y])
else:
return np.array(self.to_draw.get_data())
class EllipseSelector(RectangleSelector):
"""
Select an elliptical region of an axes.
For the cursor to remain responsive you must keep a reference to
it.
Example usage::
from matplotlib.widgets import EllipseSelector
from pylab import *
def onselect(eclick, erelease):
'eclick and erelease are matplotlib events at press and release'
print(' startposition : (%f, %f)' % (eclick.xdata, eclick.ydata))
print(' endposition : (%f, %f)' % (erelease.xdata, erelease.ydata))
print(' used button : ', eclick.button)
def toggle_selector(event):
print(' Key pressed.')
if event.key in ['Q', 'q'] and toggle_selector.ES.active:
print(' EllipseSelector deactivated.')
toggle_selector.RS.set_active(False)
if event.key in ['A', 'a'] and not toggle_selector.ES.active:
print(' EllipseSelector activated.')
toggle_selector.ES.set_active(True)
x = arange(100)/(99.0)
y = sin(x)
fig = figure
ax = subplot(111)
ax.plot(x,y)
toggle_selector.ES = EllipseSelector(ax, onselect, drawtype='line')
connect('key_press_event', toggle_selector)
show()
"""
_shape_klass = Ellipse
def draw_shape(self, extents):
x1, x2, y1, y2 = extents
xmin, xmax = sorted([x1, x2])
ymin, ymax = sorted([y1, y2])
center = [x1 + (x2 - x1) / 2., y1 + (y2 - y1) / 2.]
a = (xmax - xmin) / 2.
b = (ymax - ymin) / 2.
if self.drawtype == 'box':
self.to_draw.center = center
self.to_draw.width = 2 * a
self.to_draw.height = 2 * b
else:
rad = np.deg2rad(np.arange(31) * 12)
x = a * np.cos(rad) + center[0]
y = b * np.sin(rad) + center[1]
self.to_draw.set_data(x, y)
@property
def _rect_bbox(self):
if self.drawtype == 'box':
x, y = self.to_draw.center
width = self.to_draw.width
height = self.to_draw.height
return x - width / 2., y - height / 2., width, height
else:
x, y = self.to_draw.get_data()
x0, x1 = min(x), max(x)
y0, y1 = min(y), max(y)
return x0, y0, x1 - x0, y1 - y0
class LassoSelector(_SelectorWidget):
"""
Selection curve of an arbitrary shape.
For the selector to remain responsive you must keep a reference to it.
The selected path can be used in conjunction with `~.Path.contains_point`
to select data points from an image.
In contrast to `Lasso`, `LassoSelector` is written with an interface
similar to `RectangleSelector` and `SpanSelector`, and will continue to
interact with the axes until disconnected.
Example usage::
ax = subplot(111)
ax.plot(x,y)
def onselect(verts):
print(verts)
lasso = LassoSelector(ax, onselect)
Parameters
----------
ax : :class:`~matplotlib.axes.Axes`
The parent axes for the widget.
onselect : function
Whenever the lasso is released, the *onselect* function is called and
passed the vertices of the selected path.
button : List[Int], optional
A list of integers indicating which mouse buttons should be used for
rectangle selection. You can also specify a single integer if only a
single button is desired. Default is ``None``, which does not limit
which button can be used.
Note, typically:
- 1 = left mouse button
- 2 = center mouse button (scroll wheel)
- 3 = right mouse button
"""
def __init__(self, ax, onselect=None, useblit=True, lineprops=None,
button=None):
_SelectorWidget.__init__(self, ax, onselect, useblit=useblit,
button=button)
self.verts = None
if lineprops is None:
lineprops = dict()
if useblit:
lineprops['animated'] = True
self.line = Line2D([], [], **lineprops)
self.line.set_visible(False)
self.ax.add_line(self.line)
self.artists = [self.line]
def onpress(self, event):
self.press(event)
def _press(self, event):
self.verts = [self._get_data(event)]
self.line.set_visible(True)
def onrelease(self, event):
self.release(event)
def _release(self, event):
if self.verts is not None:
self.verts.append(self._get_data(event))
self.onselect(self.verts)
self.line.set_data([[], []])
self.line.set_visible(False)
self.verts = None
def _onmove(self, event):
if self.verts is None:
return
self.verts.append(self._get_data(event))
self.line.set_data(list(zip(*self.verts)))
self.update()
class PolygonSelector(_SelectorWidget):
"""Select a polygon region of an axes.
Place vertices with each mouse click, and make the selection by completing
the polygon (clicking on the first vertex). Hold the *ctrl* key and click
and drag a vertex to reposition it (the *ctrl* key is not necessary if the
polygon has already been completed). Hold the *shift* key and click and
drag anywhere in the axes to move all vertices. Press the *esc* key to
start a new polygon.
For the selector to remain responsive you must keep a reference to
it.
Parameters
----------
ax : :class:`~matplotlib.axes.Axes`
The parent axes for the widget.
onselect : function
When a polygon is completed or modified after completion,
the `onselect` function is called and passed a list of the vertices as
``(xdata, ydata)`` tuples.
useblit : bool, optional
lineprops : dict, optional
The line for the sides of the polygon is drawn with the properties
given by `lineprops`. The default is ``dict(color='k', linestyle='-',
linewidth=2, alpha=0.5)``.
markerprops : dict, optional
The markers for the vertices of the polygon are drawn with the
properties given by `markerprops`. The default is ``dict(marker='o',
markersize=7, mec='k', mfc='k', alpha=0.5)``.
vertex_select_radius : float, optional
A vertex is selected (to complete the polygon or to move a vertex)
if the mouse click is within `vertex_select_radius` pixels of the
vertex. The default radius is 15 pixels.
See Also
--------
:ref:`sphx_glr_gallery_widgets_polygon_selector_demo.py`
"""
def __init__(self, ax, onselect, useblit=False,
lineprops=None, markerprops=None, vertex_select_radius=15):
# The state modifiers 'move', 'square', and 'center' are expected by
# _SelectorWidget but are not supported by PolygonSelector
# Note: could not use the existing 'move' state modifier in-place of
# 'move_all' because _SelectorWidget automatically discards 'move'
# from the state on button release.
state_modifier_keys = dict(clear='escape', move_vertex='control',
move_all='shift', move='not-applicable',
square='not-applicable',
center='not-applicable')
_SelectorWidget.__init__(self, ax, onselect, useblit=useblit,
state_modifier_keys=state_modifier_keys)
self._xs, self._ys = [0], [0]
self._polygon_completed = False
if lineprops is None:
lineprops = dict(color='k', linestyle='-', linewidth=2, alpha=0.5)
lineprops['animated'] = self.useblit
self.line = Line2D(self._xs, self._ys, **lineprops)
self.ax.add_line(self.line)
if markerprops is None:
markerprops = dict(mec='k', mfc=lineprops.get('color', 'k'))
self._polygon_handles = ToolHandles(self.ax, self._xs, self._ys,
useblit=self.useblit,
marker_props=markerprops)
self._active_handle_idx = -1
self.vertex_select_radius = vertex_select_radius
self.artists = [self.line, self._polygon_handles.artist]
self.set_visible(True)
def _press(self, event):
"""Button press event handler"""
# Check for selection of a tool handle.
if ((self._polygon_completed or 'move_vertex' in self.state)
and len(self._xs) > 0):
h_idx, h_dist = self._polygon_handles.closest(event.x, event.y)
if h_dist < self.vertex_select_radius:
self._active_handle_idx = h_idx
# Save the vertex positions at the time of the press event (needed to
# support the 'move_all' state modifier).
self._xs_at_press, self._ys_at_press = self._xs[:], self._ys[:]
def _release(self, event):
"""Button release event handler"""
# Release active tool handle.
if self._active_handle_idx >= 0:
self._active_handle_idx = -1
# Complete the polygon.
elif (len(self._xs) > 3
and self._xs[-1] == self._xs[0]
and self._ys[-1] == self._ys[0]):
self._polygon_completed = True
# Place new vertex.
elif (not self._polygon_completed
and 'move_all' not in self.state
and 'move_vertex' not in self.state):
self._xs.insert(-1, event.xdata)
self._ys.insert(-1, event.ydata)
if self._polygon_completed:
self.onselect(self.verts)
def onmove(self, event):
"""Cursor move event handler and validator"""
# Method overrides _SelectorWidget.onmove because the polygon selector
# needs to process the move callback even if there is no button press.
# _SelectorWidget.onmove include logic to ignore move event if
# eventpress is None.
if not self.ignore(event):
event = self._clean_event(event)
self._onmove(event)
return True
return False
def _onmove(self, event):
"""Cursor move event handler"""
# Move the active vertex (ToolHandle).
if self._active_handle_idx >= 0:
idx = self._active_handle_idx
self._xs[idx], self._ys[idx] = event.xdata, event.ydata
# Also update the end of the polygon line if the first vertex is
# the active handle and the polygon is completed.
if idx == 0 and self._polygon_completed:
self._xs[-1], self._ys[-1] = event.xdata, event.ydata
# Move all vertices.
elif 'move_all' in self.state and self.eventpress:
dx = event.xdata - self.eventpress.xdata
dy = event.ydata - self.eventpress.ydata
for k in range(len(self._xs)):
self._xs[k] = self._xs_at_press[k] + dx
self._ys[k] = self._ys_at_press[k] + dy
# Do nothing if completed or waiting for a move.
elif (self._polygon_completed
or 'move_vertex' in self.state or 'move_all' in self.state):
return
# Position pending vertex.
else:
# Calculate distance to the start vertex.
x0, y0 = self.line.get_transform().transform((self._xs[0],
self._ys[0]))
v0_dist = np.sqrt((x0 - event.x) ** 2 + (y0 - event.y) ** 2)
# Lock on to the start vertex if near it and ready to complete.
if len(self._xs) > 3 and v0_dist < self.vertex_select_radius:
self._xs[-1], self._ys[-1] = self._xs[0], self._ys[0]
else:
self._xs[-1], self._ys[-1] = event.xdata, event.ydata
self._draw_polygon()
def _on_key_press(self, event):
"""Key press event handler"""
# Remove the pending vertex if entering the 'move_vertex' or
# 'move_all' mode
if (not self._polygon_completed
and ('move_vertex' in self.state or 'move_all' in self.state)):
self._xs, self._ys = self._xs[:-1], self._ys[:-1]
self._draw_polygon()
def _on_key_release(self, event):
"""Key release event handler"""
# Add back the pending vertex if leaving the 'move_vertex' or
# 'move_all' mode (by checking the released key)
if (not self._polygon_completed
and
(event.key == self.state_modifier_keys.get('move_vertex')
or event.key == self.state_modifier_keys.get('move_all'))):
self._xs.append(event.xdata)
self._ys.append(event.ydata)
self._draw_polygon()
# Reset the polygon if the released key is the 'clear' key.
elif event.key == self.state_modifier_keys.get('clear'):
event = self._clean_event(event)
self._xs, self._ys = [event.xdata], [event.ydata]
self._polygon_completed = False
self.set_visible(True)
def _draw_polygon(self):
"""Redraw the polygon based on the new vertex positions."""
self.line.set_data(self._xs, self._ys)
# Only show one tool handle at the start and end vertex of the polygon
# if the polygon is completed or the user is locked on to the start
# vertex.
if (self._polygon_completed
or (len(self._xs) > 3
and self._xs[-1] == self._xs[0]
and self._ys[-1] == self._ys[0])):
self._polygon_handles.set_data(self._xs[:-1], self._ys[:-1])
else:
self._polygon_handles.set_data(self._xs, self._ys)
self.update()
@property
def verts(self):
"""Get the polygon vertices.
Returns
-------
list
A list of the vertices of the polygon as ``(xdata, ydata)`` tuples.
"""
return list(zip(self._xs[:-1], self._ys[:-1]))
class Lasso(AxesWidget):
"""Selection curve of an arbitrary shape.
The selected path can be used in conjunction with
:func:`~matplotlib.path.Path.contains_point` to select data points
from an image.
Unlike :class:`LassoSelector`, this must be initialized with a starting
point `xy`, and the `Lasso` events are destroyed upon release.
Parameters
----------
ax : `~matplotlib.axes.Axes`
The parent axes for the widget.
xy : array
Coordinates of the start of the lasso.
callback : callable
Whenever the lasso is released, the `callback` function is called and
passed the vertices of the selected path.
"""
def __init__(self, ax, xy, callback=None, useblit=True):
AxesWidget.__init__(self, ax)
self.useblit = useblit and self.canvas.supports_blit
if self.useblit:
self.background = self.canvas.copy_from_bbox(self.ax.bbox)
x, y = xy
self.verts = [(x, y)]
self.line = Line2D([x], [y], linestyle='-', color='black', lw=2)
self.ax.add_line(self.line)
self.callback = callback
self.connect_event('button_release_event', self.onrelease)
self.connect_event('motion_notify_event', self.onmove)
def onrelease(self, event):
if self.ignore(event):
return
if self.verts is not None:
self.verts.append((event.xdata, event.ydata))
if len(self.verts) > 2:
self.callback(self.verts)
self.ax.lines.remove(self.line)
self.verts = None
self.disconnect_events()
def onmove(self, event):
if self.ignore(event):
return
if self.verts is None:
return
if event.inaxes != self.ax:
return
if event.button != 1:
return
self.verts.append((event.xdata, event.ydata))
self.line.set_data(list(zip(*self.verts)))
if self.useblit:
self.canvas.restore_region(self.background)
self.ax.draw_artist(self.line)
self.canvas.blit(self.ax.bbox)
else:
self.canvas.draw_idle()
| 93,692 | 32.236254 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/table.py
|
"""
Place a table below the x-axis at location loc.
The table consists of a grid of cells.
The grid need not be rectangular and can have holes.
Cells are added by specifying their row and column.
For the purposes of positioning the cell at (0, 0) is
assumed to be at the top left and the cell at (max_row, max_col)
is assumed to be at bottom right.
You can add additional cells outside this range to have convenient
ways of positioning more interesting grids.
Author : John Gill <jng@europe.renre.com>
Copyright : 2004 John Gill and John Hunter
License : matplotlib license
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange
import warnings
from . import artist
from .artist import Artist, allow_rasterization
from .patches import Rectangle
from matplotlib import docstring
from .text import Text
from .transforms import Bbox
from matplotlib.path import Path
class Cell(Rectangle):
"""
A cell is a Rectangle with some associated text.
"""
PAD = 0.1 # padding between text and rectangle
def __init__(self, xy, width, height,
edgecolor='k', facecolor='w',
fill=True,
text='',
loc=None,
fontproperties=None
):
# Call base
Rectangle.__init__(self, xy, width=width, height=height,
edgecolor=edgecolor, facecolor=facecolor)
self.set_clip_on(False)
# Create text object
if loc is None:
loc = 'right'
self._loc = loc
self._text = Text(x=xy[0], y=xy[1], text=text,
fontproperties=fontproperties)
self._text.set_clip_on(False)
def set_transform(self, trans):
Rectangle.set_transform(self, trans)
# the text does not get the transform!
self.stale = True
def set_figure(self, fig):
Rectangle.set_figure(self, fig)
self._text.set_figure(fig)
def get_text(self):
'Return the cell Text intance'
return self._text
def set_fontsize(self, size):
self._text.set_fontsize(size)
self.stale = True
def get_fontsize(self):
'Return the cell fontsize'
return self._text.get_fontsize()
def auto_set_font_size(self, renderer):
""" Shrink font size until text fits. """
fontsize = self.get_fontsize()
required = self.get_required_width(renderer)
while fontsize > 1 and required > self.get_width():
fontsize -= 1
self.set_fontsize(fontsize)
required = self.get_required_width(renderer)
return fontsize
@allow_rasterization
def draw(self, renderer):
if not self.get_visible():
return
# draw the rectangle
Rectangle.draw(self, renderer)
# position the text
self._set_text_position(renderer)
self._text.draw(renderer)
self.stale = False
def _set_text_position(self, renderer):
""" Set text up so it draws in the right place.
Currently support 'left', 'center' and 'right'
"""
bbox = self.get_window_extent(renderer)
l, b, w, h = bbox.bounds
# draw in center vertically
self._text.set_verticalalignment('center')
y = b + (h / 2.0)
# now position horizontally
if self._loc == 'center':
self._text.set_horizontalalignment('center')
x = l + (w / 2.0)
elif self._loc == 'left':
self._text.set_horizontalalignment('left')
x = l + (w * self.PAD)
else:
self._text.set_horizontalalignment('right')
x = l + (w * (1.0 - self.PAD))
self._text.set_position((x, y))
def get_text_bounds(self, renderer):
""" Get text bounds in axes co-ordinates. """
bbox = self._text.get_window_extent(renderer)
bboxa = bbox.inverse_transformed(self.get_data_transform())
return bboxa.bounds
def get_required_width(self, renderer):
""" Get width required for this cell. """
l, b, w, h = self.get_text_bounds(renderer)
return w * (1.0 + (2.0 * self.PAD))
def set_text_props(self, **kwargs):
'update the text properties with kwargs'
self._text.update(kwargs)
self.stale = True
class CustomCell(Cell):
"""
A subclass of Cell where the sides may be visibly toggled.
"""
_edges = 'BRTL'
_edge_aliases = {'open': '',
'closed': _edges, # default
'horizontal': 'BT',
'vertical': 'RL'
}
def __init__(self, *args, **kwargs):
visible_edges = kwargs.pop('visible_edges')
Cell.__init__(self, *args, **kwargs)
self.visible_edges = visible_edges
@property
def visible_edges(self):
return self._visible_edges
@visible_edges.setter
def visible_edges(self, value):
if value is None:
self._visible_edges = self._edges
elif value in self._edge_aliases:
self._visible_edges = self._edge_aliases[value]
else:
for edge in value:
if edge not in self._edges:
raise ValueError('Invalid edge param {}, must only be one '
'of {} or string of {}'.format(
value,
", ".join(self._edge_aliases),
", ".join(self._edges)))
self._visible_edges = value
self.stale = True
def get_path(self):
'Return a path where the edges specified by _visible_edges are drawn'
codes = [Path.MOVETO]
for edge in self._edges:
if edge in self._visible_edges:
codes.append(Path.LINETO)
else:
codes.append(Path.MOVETO)
if Path.MOVETO not in codes[1:]: # All sides are visible
codes[-1] = Path.CLOSEPOLY
return Path(
[[0.0, 0.0], [1.0, 0.0], [1.0, 1.0], [0.0, 1.0], [0.0, 0.0]],
codes,
readonly=True
)
class Table(Artist):
"""
Create a table of cells.
Table can have (optional) row and column headers.
Each entry in the table can be either text or patches.
Column widths and row heights for the table can be specified.
Return value is a sequence of text, line and patch instances that make
up the table
"""
codes = {'best': 0,
'upper right': 1, # default
'upper left': 2,
'lower left': 3,
'lower right': 4,
'center left': 5,
'center right': 6,
'lower center': 7,
'upper center': 8,
'center': 9,
'top right': 10,
'top left': 11,
'bottom left': 12,
'bottom right': 13,
'right': 14,
'left': 15,
'top': 16,
'bottom': 17,
}
FONTSIZE = 10
AXESPAD = 0.02 # the border between the axes and table edge
def __init__(self, ax, loc=None, bbox=None, **kwargs):
Artist.__init__(self)
if isinstance(loc, six.string_types) and loc not in self.codes:
warnings.warn('Unrecognized location %s. Falling back on '
'bottom; valid locations are\n%s\t' %
(loc, '\n\t'.join(self.codes)))
loc = 'bottom'
if isinstance(loc, six.string_types):
loc = self.codes.get(loc, 1)
self.set_figure(ax.figure)
self._axes = ax
self._loc = loc
self._bbox = bbox
# use axes coords
self.set_transform(ax.transAxes)
self._texts = []
self._cells = {}
self._edges = None
self._autoRows = []
self._autoColumns = []
self._autoFontsize = True
self.update(kwargs)
self.set_clip_on(False)
def add_cell(self, row, col, *args, **kwargs):
"""
Add a cell to the table.
Parameters
----------
row : int
Row index
col : int
Column index
Returns
-------
`CustomCell`: Automatically created cell
"""
xy = (0, 0)
cell = CustomCell(xy, visible_edges=self.edges, *args, **kwargs)
self[row, col] = cell
return cell
def __setitem__(self, position, cell):
"""
Set a customcell in a given position
"""
if not isinstance(cell, CustomCell):
raise TypeError('Table only accepts CustomCell')
try:
row, col = position[0], position[1]
except Exception:
raise KeyError('Only tuples length 2 are accepted as coordinates')
cell.set_figure(self.figure)
cell.set_transform(self.get_transform())
cell.set_clip_on(False)
self._cells[row, col] = cell
self.stale = True
def __getitem__(self, position):
"""
Retreive a custom cell from a given position
"""
try:
row, col = position[0], position[1]
except Exception:
raise KeyError('Only tuples length 2 are accepted as coordinates')
return self._cells[row, col]
@property
def edges(self):
return self._edges
@edges.setter
def edges(self, value):
self._edges = value
self.stale = True
def _approx_text_height(self):
return (self.FONTSIZE / 72.0 * self.figure.dpi /
self._axes.bbox.height * 1.2)
@allow_rasterization
def draw(self, renderer):
# Need a renderer to do hit tests on mouseevent; assume the last one
# will do
if renderer is None:
renderer = self.figure._cachedRenderer
if renderer is None:
raise RuntimeError('No renderer defined')
if not self.get_visible():
return
renderer.open_group('table')
self._update_positions(renderer)
for key in sorted(self._cells):
self._cells[key].draw(renderer)
renderer.close_group('table')
self.stale = False
def _get_grid_bbox(self, renderer):
"""Get a bbox, in axes co-ordinates for the cells.
Only include those in the range (0,0) to (maxRow, maxCol)"""
boxes = [cell.get_window_extent(renderer)
for (row, col), cell in six.iteritems(self._cells)
if row >= 0 and col >= 0]
bbox = Bbox.union(boxes)
return bbox.inverse_transformed(self.get_transform())
def contains(self, mouseevent):
"""Test whether the mouse event occurred in the table.
Returns T/F, {}
"""
if callable(self._contains):
return self._contains(self, mouseevent)
# TODO: Return index of the cell containing the cursor so that the user
# doesn't have to bind to each one individually.
renderer = self.figure._cachedRenderer
if renderer is not None:
boxes = [cell.get_window_extent(renderer)
for (row, col), cell in six.iteritems(self._cells)
if row >= 0 and col >= 0]
bbox = Bbox.union(boxes)
return bbox.contains(mouseevent.x, mouseevent.y), {}
else:
return False, {}
def get_children(self):
'Return the Artists contained by the table'
return list(six.itervalues(self._cells))
get_child_artists = get_children # backward compatibility
def get_window_extent(self, renderer):
'Return the bounding box of the table in window coords'
boxes = [cell.get_window_extent(renderer)
for cell in six.itervalues(self._cells)]
return Bbox.union(boxes)
def _do_cell_alignment(self):
""" Calculate row heights and column widths.
Position cells accordingly.
"""
# Calculate row/column widths
widths = {}
heights = {}
for (row, col), cell in six.iteritems(self._cells):
height = heights.setdefault(row, 0.0)
heights[row] = max(height, cell.get_height())
width = widths.setdefault(col, 0.0)
widths[col] = max(width, cell.get_width())
# work out left position for each column
xpos = 0
lefts = {}
for col in sorted(widths):
lefts[col] = xpos
xpos += widths[col]
ypos = 0
bottoms = {}
for row in sorted(heights, reverse=True):
bottoms[row] = ypos
ypos += heights[row]
# set cell positions
for (row, col), cell in six.iteritems(self._cells):
cell.set_x(lefts[col])
cell.set_y(bottoms[row])
def auto_set_column_width(self, col):
""" Given column indexs in either List, Tuple or int. Will be able to
automatically set the columns into optimal sizes.
Here is the example of the input, which triger automatic adjustment on
columns to optimal size by given index numbers.
-1: the row labling
0: the 1st column
1: the 2nd column
Args:
col(List): list of indexs
>>>table.auto_set_column_width([-1,0,1])
col(Tuple): tuple of indexs
>>>table.auto_set_column_width((-1,0,1))
col(int): index integer
>>>table.auto_set_column_width(-1)
>>>table.auto_set_column_width(0)
>>>table.auto_set_column_width(1)
"""
# check for col possibility on iteration
try:
iter(col)
except (TypeError, AttributeError):
self._autoColumns.append(col)
else:
for cell in col:
self._autoColumns.append(cell)
self.stale = True
def _auto_set_column_width(self, col, renderer):
""" Automagically set width for column.
"""
cells = [key for key in self._cells if key[1] == col]
# find max width
width = 0
for cell in cells:
c = self._cells[cell]
width = max(c.get_required_width(renderer), width)
# Now set the widths
for cell in cells:
self._cells[cell].set_width(width)
def auto_set_font_size(self, value=True):
""" Automatically set font size. """
self._autoFontsize = value
self.stale = True
def _auto_set_font_size(self, renderer):
if len(self._cells) == 0:
return
fontsize = list(six.itervalues(self._cells))[0].get_fontsize()
cells = []
for key, cell in six.iteritems(self._cells):
# ignore auto-sized columns
if key[1] in self._autoColumns:
continue
size = cell.auto_set_font_size(renderer)
fontsize = min(fontsize, size)
cells.append(cell)
# now set all fontsizes equal
for cell in six.itervalues(self._cells):
cell.set_fontsize(fontsize)
def scale(self, xscale, yscale):
""" Scale column widths by xscale and row heights by yscale. """
for c in six.itervalues(self._cells):
c.set_width(c.get_width() * xscale)
c.set_height(c.get_height() * yscale)
def set_fontsize(self, size):
"""
Set the fontsize of the cell text
ACCEPTS: a float in points
"""
for cell in six.itervalues(self._cells):
cell.set_fontsize(size)
self.stale = True
def _offset(self, ox, oy):
'Move all the artists by ox,oy (axes coords)'
for c in six.itervalues(self._cells):
x, y = c.get_x(), c.get_y()
c.set_x(x + ox)
c.set_y(y + oy)
def _update_positions(self, renderer):
# called from renderer to allow more precise estimates of
# widths and heights with get_window_extent
# Do any auto width setting
for col in self._autoColumns:
self._auto_set_column_width(col, renderer)
if self._autoFontsize:
self._auto_set_font_size(renderer)
# Align all the cells
self._do_cell_alignment()
bbox = self._get_grid_bbox(renderer)
l, b, w, h = bbox.bounds
if self._bbox is not None:
# Position according to bbox
rl, rb, rw, rh = self._bbox
self.scale(rw / w, rh / h)
ox = rl - l
oy = rb - b
self._do_cell_alignment()
else:
# Position using loc
(BEST, UR, UL, LL, LR, CL, CR, LC, UC, C,
TR, TL, BL, BR, R, L, T, B) = xrange(len(self.codes))
# defaults for center
ox = (0.5 - w / 2) - l
oy = (0.5 - h / 2) - b
if self._loc in (UL, LL, CL): # left
ox = self.AXESPAD - l
if self._loc in (BEST, UR, LR, R, CR): # right
ox = 1 - (l + w + self.AXESPAD)
if self._loc in (BEST, UR, UL, UC): # upper
oy = 1 - (b + h + self.AXESPAD)
if self._loc in (LL, LR, LC): # lower
oy = self.AXESPAD - b
if self._loc in (LC, UC, C): # center x
ox = (0.5 - w / 2) - l
if self._loc in (CL, CR, C): # center y
oy = (0.5 - h / 2) - b
if self._loc in (TL, BL, L): # out left
ox = - (l + w)
if self._loc in (TR, BR, R): # out right
ox = 1.0 - l
if self._loc in (TR, TL, T): # out top
oy = 1.0 - b
if self._loc in (BL, BR, B): # out bottom
oy = - (b + h)
self._offset(ox, oy)
def get_celld(self):
'return a dict of cells in the table'
return self._cells
def table(ax,
cellText=None, cellColours=None,
cellLoc='right', colWidths=None,
rowLabels=None, rowColours=None, rowLoc='left',
colLabels=None, colColours=None, colLoc='center',
loc='bottom', bbox=None, edges='closed',
**kwargs):
"""
TABLE(cellText=None, cellColours=None,
cellLoc='right', colWidths=None,
rowLabels=None, rowColours=None, rowLoc='left',
colLabels=None, colColours=None, colLoc='center',
loc='bottom', bbox=None, edges='closed')
Factory function to generate a Table instance.
Thanks to John Gill for providing the class and table.
"""
if cellColours is None and cellText is None:
raise ValueError('At least one argument from "cellColours" or '
'"cellText" must be provided to create a table.')
# Check we have some cellText
if cellText is None:
# assume just colours are needed
rows = len(cellColours)
cols = len(cellColours[0])
cellText = [[''] * cols] * rows
rows = len(cellText)
cols = len(cellText[0])
for row in cellText:
if len(row) != cols:
raise ValueError("Each row in 'cellText' must have {} columns"
.format(cols))
if cellColours is not None:
if len(cellColours) != rows:
raise ValueError("'cellColours' must have {} rows".format(rows))
for row in cellColours:
if len(row) != cols:
raise ValueError("Each row in 'cellColours' must have {} "
"columns".format(cols))
else:
cellColours = ['w' * cols] * rows
# Set colwidths if not given
if colWidths is None:
colWidths = [1.0 / cols] * cols
# Fill in missing information for column
# and row labels
rowLabelWidth = 0
if rowLabels is None:
if rowColours is not None:
rowLabels = [''] * rows
rowLabelWidth = colWidths[0]
elif rowColours is None:
rowColours = 'w' * rows
if rowLabels is not None:
if len(rowLabels) != rows:
raise ValueError("'rowLabels' must be of length {0}".format(rows))
# If we have column labels, need to shift
# the text and colour arrays down 1 row
offset = 1
if colLabels is None:
if colColours is not None:
colLabels = [''] * cols
else:
offset = 0
elif colColours is None:
colColours = 'w' * cols
# Set up cell colours if not given
if cellColours is None:
cellColours = ['w' * cols] * rows
# Now create the table
table = Table(ax, loc, bbox, **kwargs)
table.edges = edges
height = table._approx_text_height()
# Add the cells
for row in xrange(rows):
for col in xrange(cols):
table.add_cell(row + offset, col,
width=colWidths[col], height=height,
text=cellText[row][col],
facecolor=cellColours[row][col],
loc=cellLoc)
# Do column labels
if colLabels is not None:
for col in xrange(cols):
table.add_cell(0, col,
width=colWidths[col], height=height,
text=colLabels[col], facecolor=colColours[col],
loc=colLoc)
# Do row labels
if rowLabels is not None:
for row in xrange(rows):
table.add_cell(row + offset, -1,
width=rowLabelWidth or 1e-15, height=height,
text=rowLabels[row], facecolor=rowColours[row],
loc=rowLoc)
if rowLabelWidth == 0:
table.auto_set_column_width(-1)
ax.add_table(table)
return table
docstring.interpd.update(Table=artist.kwdoc(Table))
| 22,167 | 30.533428 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/tight_layout.py
|
"""
This module provides routines to adjust subplot params so that subplots are
nicely fit in the figure. In doing so, only axis labels, tick labels, axes
titles and offsetboxes that are anchored to axes are currently considered.
Internally, it assumes that the margins (left_margin, etc.) which are
differences between ax.get_tightbbox and ax.bbox are independent of axes
position. This may fail if Axes.adjustable is datalim. Also, This will fail
for some cases (for example, left or right margin is affected by xlabel).
"""
import warnings
import matplotlib
from matplotlib.transforms import TransformedBbox, Bbox
from matplotlib.font_manager import FontProperties
rcParams = matplotlib.rcParams
def _get_left(tight_bbox, axes_bbox):
return axes_bbox.xmin - tight_bbox.xmin
def _get_right(tight_bbox, axes_bbox):
return tight_bbox.xmax - axes_bbox.xmax
def _get_bottom(tight_bbox, axes_bbox):
return axes_bbox.ymin - tight_bbox.ymin
def _get_top(tight_bbox, axes_bbox):
return tight_bbox.ymax - axes_bbox.ymax
def auto_adjust_subplotpars(
fig, renderer, nrows_ncols, num1num2_list, subplot_list,
ax_bbox_list=None, pad=1.08, h_pad=None, w_pad=None, rect=None):
"""
Return a dict of subplot parameters to adjust spacing between subplots.
Note that this function ignores geometry information of subplot
itself, but uses what is given by the *nrows_ncols* and *num1num2_list*
parameters. Also, the results could be incorrect if some subplots have
``adjustable=datalim``.
Parameters
----------
nrows_ncols : Tuple[int, int]
Number of rows and number of columns of the grid.
num1num2_list : List[int]
List of numbers specifying the area occupied by the subplot
subplot_list : list of subplots
List of subplots that will be used to calculate optimal subplot_params.
pad : float
Padding between the figure edge and the edges of subplots, as a
fraction of the font size.
h_pad, w_pad : float
Padding (height/width) between edges of adjacent subplots, as a
fraction of the font size. Defaults to *pad*.
rect : Tuple[float, float, float, float]
[left, bottom, right, top] in normalized (0, 1) figure coordinates.
"""
rows, cols = nrows_ncols
font_size_inches = (
FontProperties(size=rcParams["font.size"]).get_size_in_points() / 72)
pad_inches = pad * font_size_inches
if h_pad is not None:
vpad_inches = h_pad * font_size_inches
else:
vpad_inches = pad_inches
if w_pad is not None:
hpad_inches = w_pad * font_size_inches
else:
hpad_inches = pad_inches
if len(num1num2_list) != len(subplot_list) or len(subplot_list) == 0:
raise ValueError
if rect is None:
margin_left = margin_bottom = margin_right = margin_top = None
else:
margin_left, margin_bottom, _right, _top = rect
if _right:
margin_right = 1 - _right
else:
margin_right = None
if _top:
margin_top = 1 - _top
else:
margin_top = None
vspaces = [[] for i in range((rows + 1) * cols)]
hspaces = [[] for i in range(rows * (cols + 1))]
union = Bbox.union
if ax_bbox_list is None:
ax_bbox_list = []
for subplots in subplot_list:
ax_bbox = union([ax.get_position(original=True)
for ax in subplots])
ax_bbox_list.append(ax_bbox)
for subplots, ax_bbox, (num1, num2) in zip(subplot_list,
ax_bbox_list,
num1num2_list):
if all([not ax.get_visible() for ax in subplots]):
continue
tight_bbox_raw = union([ax.get_tightbbox(renderer) for ax in subplots
if ax.get_visible()])
tight_bbox = TransformedBbox(tight_bbox_raw,
fig.transFigure.inverted())
row1, col1 = divmod(num1, cols)
if num2 is None:
# left
hspaces[row1 * (cols + 1) + col1].append(
_get_left(tight_bbox, ax_bbox))
# right
hspaces[row1 * (cols + 1) + (col1 + 1)].append(
_get_right(tight_bbox, ax_bbox))
# top
vspaces[row1 * cols + col1].append(
_get_top(tight_bbox, ax_bbox))
# bottom
vspaces[(row1 + 1) * cols + col1].append(
_get_bottom(tight_bbox, ax_bbox))
else:
row2, col2 = divmod(num2, cols)
for row_i in range(row1, row2 + 1):
# left
hspaces[row_i * (cols + 1) + col1].append(
_get_left(tight_bbox, ax_bbox))
# right
hspaces[row_i * (cols + 1) + (col2 + 1)].append(
_get_right(tight_bbox, ax_bbox))
for col_i in range(col1, col2 + 1):
# top
vspaces[row1 * cols + col_i].append(
_get_top(tight_bbox, ax_bbox))
# bottom
vspaces[(row2 + 1) * cols + col_i].append(
_get_bottom(tight_bbox, ax_bbox))
fig_width_inch, fig_height_inch = fig.get_size_inches()
# margins can be negative for axes with aspect applied. And we
# append + [0] to make minimum margins 0
if not margin_left:
margin_left = max([sum(s) for s in hspaces[::cols + 1]] + [0])
margin_left += pad_inches / fig_width_inch
if not margin_right:
margin_right = max([sum(s) for s in hspaces[cols::cols + 1]] + [0])
margin_right += pad_inches / fig_width_inch
if not margin_top:
margin_top = max([sum(s) for s in vspaces[:cols]] + [0])
margin_top += pad_inches / fig_height_inch
if not margin_bottom:
margin_bottom = max([sum(s) for s in vspaces[-cols:]] + [0])
margin_bottom += pad_inches / fig_height_inch
kwargs = dict(left=margin_left,
right=1 - margin_right,
bottom=margin_bottom,
top=1 - margin_top)
if cols > 1:
hspace = (
max(sum(s)
for i in range(rows)
for s in hspaces[i * (cols + 1) + 1:(i + 1) * (cols + 1) - 1])
+ hpad_inches / fig_width_inch)
h_axes = (1 - margin_right - margin_left - hspace * (cols - 1)) / cols
kwargs["wspace"] = hspace / h_axes
if rows > 1:
vspace = (max(sum(s) for s in vspaces[cols:-cols])
+ vpad_inches / fig_height_inch)
v_axes = (1 - margin_top - margin_bottom - vspace * (rows - 1)) / rows
kwargs["hspace"] = vspace / v_axes
return kwargs
def get_renderer(fig):
if fig._cachedRenderer:
renderer = fig._cachedRenderer
else:
canvas = fig.canvas
if canvas and hasattr(canvas, "get_renderer"):
renderer = canvas.get_renderer()
else:
# not sure if this can happen
warnings.warn("tight_layout : falling back to Agg renderer")
from matplotlib.backends.backend_agg import FigureCanvasAgg
canvas = FigureCanvasAgg(fig)
renderer = canvas.get_renderer()
return renderer
def get_subplotspec_list(axes_list, grid_spec=None):
"""Return a list of subplotspec from the given list of axes.
For an instance of axes that does not support subplotspec, None is inserted
in the list.
If grid_spec is given, None is inserted for those not from the given
grid_spec.
"""
subplotspec_list = []
for ax in axes_list:
axes_or_locator = ax.get_axes_locator()
if axes_or_locator is None:
axes_or_locator = ax
if hasattr(axes_or_locator, "get_subplotspec"):
subplotspec = axes_or_locator.get_subplotspec()
subplotspec = subplotspec.get_topmost_subplotspec()
gs = subplotspec.get_gridspec()
if grid_spec is not None:
if gs != grid_spec:
subplotspec = None
elif gs.locally_modified_subplot_params():
subplotspec = None
else:
subplotspec = None
subplotspec_list.append(subplotspec)
return subplotspec_list
def get_tight_layout_figure(fig, axes_list, subplotspec_list, renderer,
pad=1.08, h_pad=None, w_pad=None, rect=None):
"""
Return subplot parameters for tight-layouted-figure with specified padding.
Parameters
----------
fig : Figure
axes_list : list of Axes
subplotspec_list : list of `.SubplotSpec`
The subplotspecs of each axes.
renderer : renderer
pad : float
Padding between the figure edge and the edges of subplots, as a
fraction of the font size.
h_pad, w_pad : float
Padding (height/width) between edges of adjacent subplots. Defaults to
*pad_inches*.
rect : Tuple[float, float, float, float], optional
(left, bottom, right, top) rectangle in normalized figure coordinates
that the whole subplots area (including labels) will fit into.
Defaults to using the entire figure.
"""
subplot_list = []
nrows_list = []
ncols_list = []
ax_bbox_list = []
subplot_dict = {} # Multiple axes can share same subplot_interface (e.g.,
# axes_grid1); thus we need to join them together.
subplotspec_list2 = []
for ax, subplotspec in zip(axes_list,
subplotspec_list):
if subplotspec is None:
continue
subplots = subplot_dict.setdefault(subplotspec, [])
if not subplots:
myrows, mycols, _, _ = subplotspec.get_geometry()
nrows_list.append(myrows)
ncols_list.append(mycols)
subplotspec_list2.append(subplotspec)
subplot_list.append(subplots)
ax_bbox_list.append(subplotspec.get_position(fig))
subplots.append(ax)
if (len(nrows_list) == 0) or (len(ncols_list) == 0):
return {}
max_nrows = max(nrows_list)
max_ncols = max(ncols_list)
num1num2_list = []
for subplotspec in subplotspec_list2:
rows, cols, num1, num2 = subplotspec.get_geometry()
div_row, mod_row = divmod(max_nrows, rows)
div_col, mod_col = divmod(max_ncols, cols)
if (mod_row != 0) or (mod_col != 0):
raise RuntimeError("")
rowNum1, colNum1 = divmod(num1, cols)
if num2 is None:
rowNum2, colNum2 = rowNum1, colNum1
else:
rowNum2, colNum2 = divmod(num2, cols)
num1num2_list.append((rowNum1 * div_row * max_ncols +
colNum1 * div_col,
((rowNum2 + 1) * div_row - 1) * max_ncols +
(colNum2 + 1) * div_col - 1))
kwargs = auto_adjust_subplotpars(fig, renderer,
nrows_ncols=(max_nrows, max_ncols),
num1num2_list=num1num2_list,
subplot_list=subplot_list,
ax_bbox_list=ax_bbox_list,
pad=pad, h_pad=h_pad, w_pad=w_pad)
if rect is not None:
# if rect is given, the whole subplots area (including
# labels) will fit into the rect instead of the
# figure. Note that the rect argument of
# *auto_adjust_subplotpars* specify the area that will be
# covered by the total area of axes.bbox. Thus we call
# auto_adjust_subplotpars twice, where the second run
# with adjusted rect parameters.
left, bottom, right, top = rect
if left is not None:
left += kwargs["left"]
if bottom is not None:
bottom += kwargs["bottom"]
if right is not None:
right -= (1 - kwargs["right"])
if top is not None:
top -= (1 - kwargs["top"])
#if h_pad is None: h_pad = pad
#if w_pad is None: w_pad = pad
kwargs = auto_adjust_subplotpars(fig, renderer,
nrows_ncols=(max_nrows, max_ncols),
num1num2_list=num1num2_list,
subplot_list=subplot_list,
ax_bbox_list=ax_bbox_list,
pad=pad, h_pad=h_pad, w_pad=w_pad,
rect=(left, bottom, right, top))
return kwargs
| 12,906 | 34.753463 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/scale.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import numpy as np
from numpy import ma
from matplotlib import cbook, docstring, rcParams
from matplotlib.ticker import (
NullFormatter, ScalarFormatter, LogFormatterSciNotation, LogitFormatter,
NullLocator, LogLocator, AutoLocator, AutoMinorLocator,
SymmetricalLogLocator, LogitLocator)
from matplotlib.transforms import Transform, IdentityTransform
class ScaleBase(object):
"""
The base class for all scales.
Scales are separable transformations, working on a single dimension.
Any subclasses will want to override:
- :attr:`name`
- :meth:`get_transform`
- :meth:`set_default_locators_and_formatters`
And optionally:
- :meth:`limit_range_for_scale`
"""
def get_transform(self):
"""
Return the :class:`~matplotlib.transforms.Transform` object
associated with this scale.
"""
raise NotImplementedError()
def set_default_locators_and_formatters(self, axis):
"""
Set the :class:`~matplotlib.ticker.Locator` and
:class:`~matplotlib.ticker.Formatter` objects on the given
axis to match this scale.
"""
raise NotImplementedError()
def limit_range_for_scale(self, vmin, vmax, minpos):
"""
Returns the range *vmin*, *vmax*, possibly limited to the
domain supported by this scale.
*minpos* should be the minimum positive value in the data.
This is used by log scales to determine a minimum value.
"""
return vmin, vmax
class LinearScale(ScaleBase):
"""
The default linear scale.
"""
name = 'linear'
def __init__(self, axis, **kwargs):
pass
def set_default_locators_and_formatters(self, axis):
"""
Set the locators and formatters to reasonable defaults for
linear scaling.
"""
axis.set_major_locator(AutoLocator())
axis.set_major_formatter(ScalarFormatter())
axis.set_minor_formatter(NullFormatter())
# update the minor locator for x and y axis based on rcParams
if rcParams['xtick.minor.visible']:
axis.set_minor_locator(AutoMinorLocator())
else:
axis.set_minor_locator(NullLocator())
def get_transform(self):
"""
The transform for linear scaling is just the
:class:`~matplotlib.transforms.IdentityTransform`.
"""
return IdentityTransform()
class LogTransformBase(Transform):
input_dims = 1
output_dims = 1
is_separable = True
has_inverse = True
def __init__(self, nonpos='clip'):
Transform.__init__(self)
self._clip = {"clip": True, "mask": False}[nonpos]
def transform_non_affine(self, a):
# Ignore invalid values due to nans being passed to the transform
with np.errstate(divide="ignore", invalid="ignore"):
out = np.log(a)
out /= np.log(self.base)
if self._clip:
# SVG spec says that conforming viewers must support values up
# to 3.4e38 (C float); however experiments suggest that
# Inkscape (which uses cairo for rendering) runs into cairo's
# 24-bit limit (which is apparently shared by Agg).
# Ghostscript (used for pdf rendering appears to overflow even
# earlier, with the max value around 2 ** 15 for the tests to
# pass. On the other hand, in practice, we want to clip beyond
# np.log10(np.nextafter(0, 1)) ~ -323
# so 1000 seems safe.
out[a <= 0] = -1000
return out
def __str__(self):
return "{}({!r})".format(
type(self).__name__, "clip" if self._clip else "mask")
class InvertedLogTransformBase(Transform):
input_dims = 1
output_dims = 1
is_separable = True
has_inverse = True
def transform_non_affine(self, a):
return ma.power(self.base, a)
def __str__(self):
return "{}()".format(type(self).__name__)
class Log10Transform(LogTransformBase):
base = 10.0
def inverted(self):
return InvertedLog10Transform()
class InvertedLog10Transform(InvertedLogTransformBase):
base = 10.0
def inverted(self):
return Log10Transform()
class Log2Transform(LogTransformBase):
base = 2.0
def inverted(self):
return InvertedLog2Transform()
class InvertedLog2Transform(InvertedLogTransformBase):
base = 2.0
def inverted(self):
return Log2Transform()
class NaturalLogTransform(LogTransformBase):
base = np.e
def inverted(self):
return InvertedNaturalLogTransform()
class InvertedNaturalLogTransform(InvertedLogTransformBase):
base = np.e
def inverted(self):
return NaturalLogTransform()
class LogTransform(LogTransformBase):
def __init__(self, base, nonpos='clip'):
LogTransformBase.__init__(self, nonpos)
self.base = base
def inverted(self):
return InvertedLogTransform(self.base)
class InvertedLogTransform(InvertedLogTransformBase):
def __init__(self, base):
InvertedLogTransformBase.__init__(self)
self.base = base
def inverted(self):
return LogTransform(self.base)
class LogScale(ScaleBase):
"""
A standard logarithmic scale. Care is taken so non-positive
values are not plotted.
For computational efficiency (to push as much as possible to Numpy
C code in the common cases), this scale provides different
transforms depending on the base of the logarithm:
- base 10 (:class:`Log10Transform`)
- base 2 (:class:`Log2Transform`)
- base e (:class:`NaturalLogTransform`)
- arbitrary base (:class:`LogTransform`)
"""
name = 'log'
# compatibility shim
LogTransformBase = LogTransformBase
Log10Transform = Log10Transform
InvertedLog10Transform = InvertedLog10Transform
Log2Transform = Log2Transform
InvertedLog2Transform = InvertedLog2Transform
NaturalLogTransform = NaturalLogTransform
InvertedNaturalLogTransform = InvertedNaturalLogTransform
LogTransform = LogTransform
InvertedLogTransform = InvertedLogTransform
def __init__(self, axis, **kwargs):
"""
*basex*/*basey*:
The base of the logarithm
*nonposx*/*nonposy*: ['mask' | 'clip' ]
non-positive values in *x* or *y* can be masked as
invalid, or clipped to a very small positive number
*subsx*/*subsy*:
Where to place the subticks between each major tick.
Should be a sequence of integers. For example, in a log10
scale: ``[2, 3, 4, 5, 6, 7, 8, 9]``
will place 8 logarithmically spaced minor ticks between
each major tick.
"""
if axis.axis_name == 'x':
base = kwargs.pop('basex', 10.0)
subs = kwargs.pop('subsx', None)
nonpos = kwargs.pop('nonposx', 'clip')
else:
base = kwargs.pop('basey', 10.0)
subs = kwargs.pop('subsy', None)
nonpos = kwargs.pop('nonposy', 'clip')
if len(kwargs):
raise ValueError(("provided too many kwargs, can only pass "
"{'basex', 'subsx', nonposx'} or "
"{'basey', 'subsy', nonposy'}. You passed ") +
"{!r}".format(kwargs))
if nonpos not in ['mask', 'clip']:
raise ValueError("nonposx, nonposy kwarg must be 'mask' or 'clip'")
if base <= 0 or base == 1:
raise ValueError('The log base cannot be <= 0 or == 1')
if base == 10.0:
self._transform = self.Log10Transform(nonpos)
elif base == 2.0:
self._transform = self.Log2Transform(nonpos)
elif base == np.e:
self._transform = self.NaturalLogTransform(nonpos)
else:
self._transform = self.LogTransform(base, nonpos)
self.base = base
self.subs = subs
def set_default_locators_and_formatters(self, axis):
"""
Set the locators and formatters to specialized versions for
log scaling.
"""
axis.set_major_locator(LogLocator(self.base))
axis.set_major_formatter(LogFormatterSciNotation(self.base))
axis.set_minor_locator(LogLocator(self.base, self.subs))
axis.set_minor_formatter(
LogFormatterSciNotation(self.base,
labelOnlyBase=(self.subs is not None)))
def get_transform(self):
"""
Return a :class:`~matplotlib.transforms.Transform` instance
appropriate for the given logarithm base.
"""
return self._transform
def limit_range_for_scale(self, vmin, vmax, minpos):
"""
Limit the domain to positive values.
"""
if not np.isfinite(minpos):
minpos = 1e-300 # This value should rarely if ever
# end up with a visible effect.
return (minpos if vmin <= 0 else vmin,
minpos if vmax <= 0 else vmax)
class SymmetricalLogTransform(Transform):
input_dims = 1
output_dims = 1
is_separable = True
has_inverse = True
def __init__(self, base, linthresh, linscale):
Transform.__init__(self)
self.base = base
self.linthresh = linthresh
self.linscale = linscale
self._linscale_adj = (linscale / (1.0 - self.base ** -1))
self._log_base = np.log(base)
def transform_non_affine(self, a):
sign = np.sign(a)
masked = ma.masked_inside(a,
-self.linthresh,
self.linthresh,
copy=False)
log = sign * self.linthresh * (
self._linscale_adj +
ma.log(np.abs(masked) / self.linthresh) / self._log_base)
if masked.mask.any():
return ma.where(masked.mask, a * self._linscale_adj, log)
else:
return log
def inverted(self):
return InvertedSymmetricalLogTransform(self.base, self.linthresh,
self.linscale)
class InvertedSymmetricalLogTransform(Transform):
input_dims = 1
output_dims = 1
is_separable = True
has_inverse = True
def __init__(self, base, linthresh, linscale):
Transform.__init__(self)
symlog = SymmetricalLogTransform(base, linthresh, linscale)
self.base = base
self.linthresh = linthresh
self.invlinthresh = symlog.transform(linthresh)
self.linscale = linscale
self._linscale_adj = (linscale / (1.0 - self.base ** -1))
def transform_non_affine(self, a):
sign = np.sign(a)
masked = ma.masked_inside(a, -self.invlinthresh,
self.invlinthresh, copy=False)
exp = sign * self.linthresh * (
ma.power(self.base, (sign * (masked / self.linthresh))
- self._linscale_adj))
if masked.mask.any():
return ma.where(masked.mask, a / self._linscale_adj, exp)
else:
return exp
def inverted(self):
return SymmetricalLogTransform(self.base,
self.linthresh, self.linscale)
class SymmetricalLogScale(ScaleBase):
"""
The symmetrical logarithmic scale is logarithmic in both the
positive and negative directions from the origin.
Since the values close to zero tend toward infinity, there is a
need to have a range around zero that is linear. The parameter
*linthresh* allows the user to specify the size of this range
(-*linthresh*, *linthresh*).
"""
name = 'symlog'
# compatibility shim
SymmetricalLogTransform = SymmetricalLogTransform
InvertedSymmetricalLogTransform = InvertedSymmetricalLogTransform
def __init__(self, axis, **kwargs):
"""
*basex*/*basey*:
The base of the logarithm
*linthreshx*/*linthreshy*:
A single float which defines the range (-*x*, *x*), within
which the plot is linear. This avoids having the plot go to
infinity around zero.
*subsx*/*subsy*:
Where to place the subticks between each major tick.
Should be a sequence of integers. For example, in a log10
scale: ``[2, 3, 4, 5, 6, 7, 8, 9]``
will place 8 logarithmically spaced minor ticks between
each major tick.
*linscalex*/*linscaley*:
This allows the linear range (-*linthresh* to *linthresh*)
to be stretched relative to the logarithmic range. Its
value is the number of decades to use for each half of the
linear range. For example, when *linscale* == 1.0 (the
default), the space used for the positive and negative
halves of the linear range will be equal to one decade in
the logarithmic range.
"""
if axis.axis_name == 'x':
base = kwargs.pop('basex', 10.0)
linthresh = kwargs.pop('linthreshx', 2.0)
subs = kwargs.pop('subsx', None)
linscale = kwargs.pop('linscalex', 1.0)
else:
base = kwargs.pop('basey', 10.0)
linthresh = kwargs.pop('linthreshy', 2.0)
subs = kwargs.pop('subsy', None)
linscale = kwargs.pop('linscaley', 1.0)
if base <= 1.0:
raise ValueError("'basex/basey' must be larger than 1")
if linthresh <= 0.0:
raise ValueError("'linthreshx/linthreshy' must be positive")
if linscale <= 0.0:
raise ValueError("'linscalex/linthreshy' must be positive")
self._transform = self.SymmetricalLogTransform(base,
linthresh,
linscale)
self.base = base
self.linthresh = linthresh
self.linscale = linscale
self.subs = subs
def set_default_locators_and_formatters(self, axis):
"""
Set the locators and formatters to specialized versions for
symmetrical log scaling.
"""
axis.set_major_locator(SymmetricalLogLocator(self.get_transform()))
axis.set_major_formatter(LogFormatterSciNotation(self.base))
axis.set_minor_locator(SymmetricalLogLocator(self.get_transform(),
self.subs))
axis.set_minor_formatter(NullFormatter())
def get_transform(self):
"""
Return a :class:`SymmetricalLogTransform` instance.
"""
return self._transform
class LogitTransform(Transform):
input_dims = 1
output_dims = 1
is_separable = True
has_inverse = True
def __init__(self, nonpos='mask'):
Transform.__init__(self)
self._nonpos = nonpos
self._clip = {"clip": True, "mask": False}[nonpos]
def transform_non_affine(self, a):
"""logit transform (base 10), masked or clipped"""
with np.errstate(divide="ignore", invalid="ignore"):
out = np.log10(a / (1 - a))
if self._clip: # See LogTransform for choice of clip value.
out[a <= 0] = -1000
out[1 <= a] = 1000
return out
def inverted(self):
return LogisticTransform(self._nonpos)
def __str__(self):
return "{}({!r})".format(type(self).__name__,
"clip" if self._clip else "mask")
class LogisticTransform(Transform):
input_dims = 1
output_dims = 1
is_separable = True
has_inverse = True
def __init__(self, nonpos='mask'):
Transform.__init__(self)
self._nonpos = nonpos
def transform_non_affine(self, a):
"""logistic transform (base 10)"""
return 1.0 / (1 + 10**(-a))
def inverted(self):
return LogitTransform(self._nonpos)
def __str__(self):
return "{}({!r})".format(type(self).__name__, self._nonpos)
class LogitScale(ScaleBase):
"""
Logit scale for data between zero and one, both excluded.
This scale is similar to a log scale close to zero and to one, and almost
linear around 0.5. It maps the interval ]0, 1[ onto ]-infty, +infty[.
"""
name = 'logit'
def __init__(self, axis, nonpos='mask'):
"""
*nonpos*: ['mask' | 'clip' ]
values beyond ]0, 1[ can be masked as invalid, or clipped to a number
very close to 0 or 1
"""
if nonpos not in ['mask', 'clip']:
raise ValueError("nonposx, nonposy kwarg must be 'mask' or 'clip'")
self._transform = LogitTransform(nonpos)
def get_transform(self):
"""
Return a :class:`LogitTransform` instance.
"""
return self._transform
def set_default_locators_and_formatters(self, axis):
# ..., 0.01, 0.1, 0.5, 0.9, 0.99, ...
axis.set_major_locator(LogitLocator())
axis.set_major_formatter(LogitFormatter())
axis.set_minor_locator(LogitLocator(minor=True))
axis.set_minor_formatter(LogitFormatter())
def limit_range_for_scale(self, vmin, vmax, minpos):
"""
Limit the domain to values between 0 and 1 (excluded).
"""
if not np.isfinite(minpos):
minpos = 1e-7 # This value should rarely if ever
# end up with a visible effect.
return (minpos if vmin <= 0 else vmin,
1 - minpos if vmax >= 1 else vmax)
_scale_mapping = {
'linear': LinearScale,
'log': LogScale,
'symlog': SymmetricalLogScale,
'logit': LogitScale,
}
def get_scale_names():
return sorted(_scale_mapping)
def scale_factory(scale, axis, **kwargs):
"""
Return a scale class by name.
ACCEPTS: [ %(names)s ]
"""
scale = scale.lower()
if scale is None:
scale = 'linear'
if scale not in _scale_mapping:
raise ValueError("Unknown scale type '%s'" % scale)
return _scale_mapping[scale](axis, **kwargs)
scale_factory.__doc__ = cbook.dedent(scale_factory.__doc__) % \
{'names': " | ".join(get_scale_names())}
def register_scale(scale_class):
"""
Register a new kind of scale.
*scale_class* must be a subclass of :class:`ScaleBase`.
"""
_scale_mapping[scale_class.name] = scale_class
def get_scale_docs():
"""
Helper function for generating docstrings related to scales.
"""
docs = []
for name in get_scale_names():
scale_class = _scale_mapping[name]
docs.append(" '%s'" % name)
docs.append("")
class_docs = cbook.dedent(scale_class.__init__.__doc__)
class_docs = "".join([" %s\n" %
x for x in class_docs.split("\n")])
docs.append(class_docs)
docs.append("")
return "\n".join(docs)
docstring.interpd.update(
scale=' | '.join([repr(x) for x in get_scale_names()]),
scale_docs=get_scale_docs().rstrip(),
)
| 19,290 | 30.728618 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/streamplot.py
|
"""
Streamline plotting for 2D vector fields.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange
import numpy as np
import matplotlib
import matplotlib.cm as cm
import matplotlib.colors as mcolors
import matplotlib.collections as mcollections
import matplotlib.lines as mlines
import matplotlib.patches as patches
__all__ = ['streamplot']
def streamplot(axes, x, y, u, v, density=1, linewidth=None, color=None,
cmap=None, norm=None, arrowsize=1, arrowstyle='-|>',
minlength=0.1, transform=None, zorder=None, start_points=None,
maxlength=4.0, integration_direction='both'):
"""Draws streamlines of a vector flow.
*x*, *y* : 1d arrays
an *evenly spaced* grid.
*u*, *v* : 2d arrays
x and y-velocities. Number of rows should match length of y, and
the number of columns should match x.
*density* : float or 2-tuple
Controls the closeness of streamlines. When `density = 1`, the domain
is divided into a 30x30 grid---*density* linearly scales this grid.
Each cell in the grid can have, at most, one traversing streamline.
For different densities in each direction, use [density_x, density_y].
*linewidth* : numeric or 2d array
vary linewidth when given a 2d array with the same shape as velocities.
*color* : matplotlib color code, or 2d array
Streamline color. When given an array with the same shape as
velocities, *color* values are converted to colors using *cmap*.
*cmap* : :class:`~matplotlib.colors.Colormap`
Colormap used to plot streamlines and arrows. Only necessary when using
an array input for *color*.
*norm* : :class:`~matplotlib.colors.Normalize`
Normalize object used to scale luminance data to 0, 1. If None, stretch
(min, max) to (0, 1). Only necessary when *color* is an array.
*arrowsize* : float
Factor scale arrow size.
*arrowstyle* : str
Arrow style specification.
See :class:`~matplotlib.patches.FancyArrowPatch`.
*minlength* : float
Minimum length of streamline in axes coordinates.
*start_points*: Nx2 array
Coordinates of starting points for the streamlines.
In data coordinates, the same as the ``x`` and ``y`` arrays.
*zorder* : int
any number
*maxlength* : float
Maximum length of streamline in axes coordinates.
*integration_direction* : ['forward', 'backward', 'both']
Integrate the streamline in forward, backward or both directions.
Returns:
*stream_container* : StreamplotSet
Container object with attributes
- lines: `matplotlib.collections.LineCollection` of streamlines
- arrows: collection of `matplotlib.patches.FancyArrowPatch`
objects representing arrows half-way along stream
lines.
This container will probably change in the future to allow changes
to the colormap, alpha, etc. for both lines and arrows, but these
changes should be backward compatible.
"""
grid = Grid(x, y)
mask = StreamMask(density)
dmap = DomainMap(grid, mask)
if zorder is None:
zorder = mlines.Line2D.zorder
# default to data coordinates
if transform is None:
transform = axes.transData
if color is None:
color = axes._get_lines.get_next_color()
if linewidth is None:
linewidth = matplotlib.rcParams['lines.linewidth']
line_kw = {}
arrow_kw = dict(arrowstyle=arrowstyle, mutation_scale=10 * arrowsize)
if integration_direction not in ['both', 'forward', 'backward']:
errstr = ("Integration direction '%s' not recognised. "
"Expected 'both', 'forward' or 'backward'." %
integration_direction)
raise ValueError(errstr)
if integration_direction == 'both':
maxlength /= 2.
use_multicolor_lines = isinstance(color, np.ndarray)
if use_multicolor_lines:
if color.shape != grid.shape:
raise ValueError(
"If 'color' is given, must have the shape of 'Grid(x,y)'")
line_colors = []
color = np.ma.masked_invalid(color)
else:
line_kw['color'] = color
arrow_kw['color'] = color
if isinstance(linewidth, np.ndarray):
if linewidth.shape != grid.shape:
raise ValueError(
"If 'linewidth' is given, must have the shape of 'Grid(x,y)'")
line_kw['linewidth'] = []
else:
line_kw['linewidth'] = linewidth
arrow_kw['linewidth'] = linewidth
line_kw['zorder'] = zorder
arrow_kw['zorder'] = zorder
## Sanity checks.
if u.shape != grid.shape or v.shape != grid.shape:
raise ValueError("'u' and 'v' must be of shape 'Grid(x,y)'")
u = np.ma.masked_invalid(u)
v = np.ma.masked_invalid(v)
integrate = get_integrator(u, v, dmap, minlength, maxlength,
integration_direction)
trajectories = []
if start_points is None:
for xm, ym in _gen_starting_points(mask.shape):
if mask[ym, xm] == 0:
xg, yg = dmap.mask2grid(xm, ym)
t = integrate(xg, yg)
if t is not None:
trajectories.append(t)
else:
sp2 = np.asanyarray(start_points, dtype=float).copy()
# Check if start_points are outside the data boundaries
for xs, ys in sp2:
if not (grid.x_origin <= xs <= grid.x_origin + grid.width
and grid.y_origin <= ys <= grid.y_origin + grid.height):
raise ValueError("Starting point ({}, {}) outside of data "
"boundaries".format(xs, ys))
# Convert start_points from data to array coords
# Shift the seed points from the bottom left of the data so that
# data2grid works properly.
sp2[:, 0] -= grid.x_origin
sp2[:, 1] -= grid.y_origin
for xs, ys in sp2:
xg, yg = dmap.data2grid(xs, ys)
t = integrate(xg, yg)
if t is not None:
trajectories.append(t)
if use_multicolor_lines:
if norm is None:
norm = mcolors.Normalize(color.min(), color.max())
if cmap is None:
cmap = cm.get_cmap(matplotlib.rcParams['image.cmap'])
else:
cmap = cm.get_cmap(cmap)
streamlines = []
arrows = []
for t in trajectories:
tgx = np.array(t[0])
tgy = np.array(t[1])
# Rescale from grid-coordinates to data-coordinates.
tx, ty = dmap.grid2data(*np.array(t))
tx += grid.x_origin
ty += grid.y_origin
points = np.transpose([tx, ty]).reshape(-1, 1, 2)
streamlines.extend(np.hstack([points[:-1], points[1:]]))
# Add arrows half way along each trajectory.
s = np.cumsum(np.sqrt(np.diff(tx) ** 2 + np.diff(ty) ** 2))
n = np.searchsorted(s, s[-1] / 2.)
arrow_tail = (tx[n], ty[n])
arrow_head = (np.mean(tx[n:n + 2]), np.mean(ty[n:n + 2]))
if isinstance(linewidth, np.ndarray):
line_widths = interpgrid(linewidth, tgx, tgy)[:-1]
line_kw['linewidth'].extend(line_widths)
arrow_kw['linewidth'] = line_widths[n]
if use_multicolor_lines:
color_values = interpgrid(color, tgx, tgy)[:-1]
line_colors.append(color_values)
arrow_kw['color'] = cmap(norm(color_values[n]))
p = patches.FancyArrowPatch(
arrow_tail, arrow_head, transform=transform, **arrow_kw)
axes.add_patch(p)
arrows.append(p)
lc = mcollections.LineCollection(
streamlines, transform=transform, **line_kw)
lc.sticky_edges.x[:] = [grid.x_origin, grid.x_origin + grid.width]
lc.sticky_edges.y[:] = [grid.y_origin, grid.y_origin + grid.height]
if use_multicolor_lines:
lc.set_array(np.ma.hstack(line_colors))
lc.set_cmap(cmap)
lc.set_norm(norm)
axes.add_collection(lc)
axes.autoscale_view()
ac = matplotlib.collections.PatchCollection(arrows)
stream_container = StreamplotSet(lc, ac)
return stream_container
class StreamplotSet(object):
def __init__(self, lines, arrows, **kwargs):
self.lines = lines
self.arrows = arrows
# Coordinate definitions
# ========================
class DomainMap(object):
"""Map representing different coordinate systems.
Coordinate definitions:
* axes-coordinates goes from 0 to 1 in the domain.
* data-coordinates are specified by the input x-y coordinates.
* grid-coordinates goes from 0 to N and 0 to M for an N x M grid,
where N and M match the shape of the input data.
* mask-coordinates goes from 0 to N and 0 to M for an N x M mask,
where N and M are user-specified to control the density of streamlines.
This class also has methods for adding trajectories to the StreamMask.
Before adding a trajectory, run `start_trajectory` to keep track of regions
crossed by a given trajectory. Later, if you decide the trajectory is bad
(e.g., if the trajectory is very short) just call `undo_trajectory`.
"""
def __init__(self, grid, mask):
self.grid = grid
self.mask = mask
# Constants for conversion between grid- and mask-coordinates
self.x_grid2mask = (mask.nx - 1) / grid.nx
self.y_grid2mask = (mask.ny - 1) / grid.ny
self.x_mask2grid = 1. / self.x_grid2mask
self.y_mask2grid = 1. / self.y_grid2mask
self.x_data2grid = 1. / grid.dx
self.y_data2grid = 1. / grid.dy
def grid2mask(self, xi, yi):
"""Return nearest space in mask-coords from given grid-coords."""
return (int((xi * self.x_grid2mask) + 0.5),
int((yi * self.y_grid2mask) + 0.5))
def mask2grid(self, xm, ym):
return xm * self.x_mask2grid, ym * self.y_mask2grid
def data2grid(self, xd, yd):
return xd * self.x_data2grid, yd * self.y_data2grid
def grid2data(self, xg, yg):
return xg / self.x_data2grid, yg / self.y_data2grid
def start_trajectory(self, xg, yg):
xm, ym = self.grid2mask(xg, yg)
self.mask._start_trajectory(xm, ym)
def reset_start_point(self, xg, yg):
xm, ym = self.grid2mask(xg, yg)
self.mask._current_xy = (xm, ym)
def update_trajectory(self, xg, yg):
if not self.grid.within_grid(xg, yg):
raise InvalidIndexError
xm, ym = self.grid2mask(xg, yg)
self.mask._update_trajectory(xm, ym)
def undo_trajectory(self):
self.mask._undo_trajectory()
class Grid(object):
"""Grid of data."""
def __init__(self, x, y):
if x.ndim == 1:
pass
elif x.ndim == 2:
x_row = x[0, :]
if not np.allclose(x_row, x):
raise ValueError("The rows of 'x' must be equal")
x = x_row
else:
raise ValueError("'x' can have at maximum 2 dimensions")
if y.ndim == 1:
pass
elif y.ndim == 2:
y_col = y[:, 0]
if not np.allclose(y_col, y.T):
raise ValueError("The columns of 'y' must be equal")
y = y_col
else:
raise ValueError("'y' can have at maximum 2 dimensions")
self.nx = len(x)
self.ny = len(y)
self.dx = x[1] - x[0]
self.dy = y[1] - y[0]
self.x_origin = x[0]
self.y_origin = y[0]
self.width = x[-1] - x[0]
self.height = y[-1] - y[0]
@property
def shape(self):
return self.ny, self.nx
def within_grid(self, xi, yi):
"""Return True if point is a valid index of grid."""
# Note that xi/yi can be floats; so, for example, we can't simply check
# `xi < self.nx` since `xi` can be `self.nx - 1 < xi < self.nx`
return xi >= 0 and xi <= self.nx - 1 and yi >= 0 and yi <= self.ny - 1
class StreamMask(object):
"""Mask to keep track of discrete regions crossed by streamlines.
The resolution of this grid determines the approximate spacing between
trajectories. Streamlines are only allowed to pass through zeroed cells:
When a streamline enters a cell, that cell is set to 1, and no new
streamlines are allowed to enter.
"""
def __init__(self, density):
if np.isscalar(density):
if density <= 0:
raise ValueError("If a scalar, 'density' must be positive")
self.nx = self.ny = int(30 * density)
else:
if len(density) != 2:
raise ValueError("'density' can have at maximum 2 dimensions")
self.nx = int(30 * density[0])
self.ny = int(30 * density[1])
self._mask = np.zeros((self.ny, self.nx))
self.shape = self._mask.shape
self._current_xy = None
def __getitem__(self, *args):
return self._mask.__getitem__(*args)
def _start_trajectory(self, xm, ym):
"""Start recording streamline trajectory"""
self._traj = []
self._update_trajectory(xm, ym)
def _undo_trajectory(self):
"""Remove current trajectory from mask"""
for t in self._traj:
self._mask.__setitem__(t, 0)
def _update_trajectory(self, xm, ym):
"""Update current trajectory position in mask.
If the new position has already been filled, raise `InvalidIndexError`.
"""
if self._current_xy != (xm, ym):
if self[ym, xm] == 0:
self._traj.append((ym, xm))
self._mask[ym, xm] = 1
self._current_xy = (xm, ym)
else:
raise InvalidIndexError
class InvalidIndexError(Exception):
pass
class TerminateTrajectory(Exception):
pass
# Integrator definitions
#========================
def get_integrator(u, v, dmap, minlength, maxlength, integration_direction):
# rescale velocity onto grid-coordinates for integrations.
u, v = dmap.data2grid(u, v)
# speed (path length) will be in axes-coordinates
u_ax = u / dmap.grid.nx
v_ax = v / dmap.grid.ny
speed = np.ma.sqrt(u_ax ** 2 + v_ax ** 2)
def forward_time(xi, yi):
ds_dt = interpgrid(speed, xi, yi)
if ds_dt == 0:
raise TerminateTrajectory()
dt_ds = 1. / ds_dt
ui = interpgrid(u, xi, yi)
vi = interpgrid(v, xi, yi)
return ui * dt_ds, vi * dt_ds
def backward_time(xi, yi):
dxi, dyi = forward_time(xi, yi)
return -dxi, -dyi
def integrate(x0, y0):
"""Return x, y grid-coordinates of trajectory based on starting point.
Integrate both forward and backward in time from starting point in
grid coordinates.
Integration is terminated when a trajectory reaches a domain boundary
or when it crosses into an already occupied cell in the StreamMask. The
resulting trajectory is None if it is shorter than `minlength`.
"""
stotal, x_traj, y_traj = 0., [], []
try:
dmap.start_trajectory(x0, y0)
except InvalidIndexError:
return None
if integration_direction in ['both', 'backward']:
s, xt, yt = _integrate_rk12(x0, y0, dmap, backward_time, maxlength)
stotal += s
x_traj += xt[::-1]
y_traj += yt[::-1]
if integration_direction in ['both', 'forward']:
dmap.reset_start_point(x0, y0)
s, xt, yt = _integrate_rk12(x0, y0, dmap, forward_time, maxlength)
if len(x_traj) > 0:
xt = xt[1:]
yt = yt[1:]
stotal += s
x_traj += xt
y_traj += yt
if stotal > minlength:
return x_traj, y_traj
else: # reject short trajectories
dmap.undo_trajectory()
return None
return integrate
def _integrate_rk12(x0, y0, dmap, f, maxlength):
"""2nd-order Runge-Kutta algorithm with adaptive step size.
This method is also referred to as the improved Euler's method, or Heun's
method. This method is favored over higher-order methods because:
1. To get decent looking trajectories and to sample every mask cell
on the trajectory we need a small timestep, so a lower order
solver doesn't hurt us unless the data is *very* high resolution.
In fact, for cases where the user inputs
data smaller or of similar grid size to the mask grid, the higher
order corrections are negligible because of the very fast linear
interpolation used in `interpgrid`.
2. For high resolution input data (i.e. beyond the mask
resolution), we must reduce the timestep. Therefore, an adaptive
timestep is more suited to the problem as this would be very hard
to judge automatically otherwise.
This integrator is about 1.5 - 2x as fast as both the RK4 and RK45
solvers in most setups on my machine. I would recommend removing the
other two to keep things simple.
"""
# This error is below that needed to match the RK4 integrator. It
# is set for visual reasons -- too low and corners start
# appearing ugly and jagged. Can be tuned.
maxerror = 0.003
# This limit is important (for all integrators) to avoid the
# trajectory skipping some mask cells. We could relax this
# condition if we use the code which is commented out below to
# increment the location gradually. However, due to the efficient
# nature of the interpolation, this doesn't boost speed by much
# for quite a bit of complexity.
maxds = min(1. / dmap.mask.nx, 1. / dmap.mask.ny, 0.1)
ds = maxds
stotal = 0
xi = x0
yi = y0
xf_traj = []
yf_traj = []
while dmap.grid.within_grid(xi, yi):
xf_traj.append(xi)
yf_traj.append(yi)
try:
k1x, k1y = f(xi, yi)
k2x, k2y = f(xi + ds * k1x,
yi + ds * k1y)
except IndexError:
# Out of the domain on one of the intermediate integration steps.
# Take an Euler step to the boundary to improve neatness.
ds, xf_traj, yf_traj = _euler_step(xf_traj, yf_traj, dmap, f)
stotal += ds
break
except TerminateTrajectory:
break
dx1 = ds * k1x
dy1 = ds * k1y
dx2 = ds * 0.5 * (k1x + k2x)
dy2 = ds * 0.5 * (k1y + k2y)
nx, ny = dmap.grid.shape
# Error is normalized to the axes coordinates
error = np.sqrt(((dx2 - dx1) / nx) ** 2 + ((dy2 - dy1) / ny) ** 2)
# Only save step if within error tolerance
if error < maxerror:
xi += dx2
yi += dy2
try:
dmap.update_trajectory(xi, yi)
except InvalidIndexError:
break
if (stotal + ds) > maxlength:
break
stotal += ds
# recalculate stepsize based on step error
if error == 0:
ds = maxds
else:
ds = min(maxds, 0.85 * ds * (maxerror / error) ** 0.5)
return stotal, xf_traj, yf_traj
def _euler_step(xf_traj, yf_traj, dmap, f):
"""Simple Euler integration step that extends streamline to boundary."""
ny, nx = dmap.grid.shape
xi = xf_traj[-1]
yi = yf_traj[-1]
cx, cy = f(xi, yi)
if cx == 0:
dsx = np.inf
elif cx < 0:
dsx = xi / -cx
else:
dsx = (nx - 1 - xi) / cx
if cy == 0:
dsy = np.inf
elif cy < 0:
dsy = yi / -cy
else:
dsy = (ny - 1 - yi) / cy
ds = min(dsx, dsy)
xf_traj.append(xi + cx * ds)
yf_traj.append(yi + cy * ds)
return ds, xf_traj, yf_traj
# Utility functions
# ========================
def interpgrid(a, xi, yi):
"""Fast 2D, linear interpolation on an integer grid"""
Ny, Nx = np.shape(a)
if isinstance(xi, np.ndarray):
x = xi.astype(int)
y = yi.astype(int)
# Check that xn, yn don't exceed max index
xn = np.clip(x + 1, 0, Nx - 1)
yn = np.clip(y + 1, 0, Ny - 1)
else:
x = int(xi)
y = int(yi)
# conditional is faster than clipping for integers
if x == (Nx - 2):
xn = x
else:
xn = x + 1
if y == (Ny - 2):
yn = y
else:
yn = y + 1
a00 = a[y, x]
a01 = a[y, xn]
a10 = a[yn, x]
a11 = a[yn, xn]
xt = xi - x
yt = yi - y
a0 = a00 * (1 - xt) + a01 * xt
a1 = a10 * (1 - xt) + a11 * xt
ai = a0 * (1 - yt) + a1 * yt
if not isinstance(xi, np.ndarray):
if np.ma.is_masked(ai):
raise TerminateTrajectory
return ai
def _gen_starting_points(shape):
"""Yield starting points for streamlines.
Trying points on the boundary first gives higher quality streamlines.
This algorithm starts with a point on the mask corner and spirals inward.
This algorithm is inefficient, but fast compared to rest of streamplot.
"""
ny, nx = shape
xfirst = 0
yfirst = 1
xlast = nx - 1
ylast = ny - 1
x, y = 0, 0
i = 0
direction = 'right'
for i in xrange(nx * ny):
yield x, y
if direction == 'right':
x += 1
if x >= xlast:
xlast -= 1
direction = 'up'
elif direction == 'up':
y += 1
if y >= ylast:
ylast -= 1
direction = 'left'
elif direction == 'left':
x -= 1
if x <= xfirst:
xfirst += 1
direction = 'down'
elif direction == 'down':
y -= 1
if y <= yfirst:
yfirst += 1
direction = 'right'
| 22,086 | 31.721481 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/backend_bases.py
|
"""
Abstract base classes define the primitives that renderers and
graphics contexts must implement to serve as a matplotlib backend
:class:`RendererBase`
An abstract base class to handle drawing/rendering operations.
:class:`FigureCanvasBase`
The abstraction layer that separates the
:class:`matplotlib.figure.Figure` from the backend specific
details like a user interface drawing area
:class:`GraphicsContextBase`
An abstract base class that provides color, line styles, etc...
:class:`Event`
The base class for all of the matplotlib event
handling. Derived classes such as :class:`KeyEvent` and
:class:`MouseEvent` store the meta data like keys and buttons
pressed, x and y locations in pixel and
:class:`~matplotlib.axes.Axes` coordinates.
:class:`ShowBase`
The base class for the Show class of each interactive backend;
the 'show' callable is then set to Show.__call__, inherited from
ShowBase.
:class:`ToolContainerBase`
The base class for the Toolbar class of each interactive backend.
:class:`StatusbarBase`
The base class for the messaging area.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange
from contextlib import contextmanager
from functools import partial
import importlib
import io
import os
import sys
import time
import warnings
from weakref import WeakKeyDictionary
import numpy as np
from matplotlib import (
backend_tools as tools, cbook, colors, textpath, tight_bbox, transforms,
widgets, get_backend, is_interactive, rcParams)
from matplotlib._pylab_helpers import Gcf
from matplotlib.transforms import Bbox, TransformedBbox, Affine2D
from matplotlib.path import Path
try:
from PIL import Image
_has_pil = True
del Image
except ImportError:
_has_pil = False
_default_filetypes = {
'ps': 'Postscript',
'eps': 'Encapsulated Postscript',
'pdf': 'Portable Document Format',
'pgf': 'PGF code for LaTeX',
'png': 'Portable Network Graphics',
'raw': 'Raw RGBA bitmap',
'rgba': 'Raw RGBA bitmap',
'svg': 'Scalable Vector Graphics',
'svgz': 'Scalable Vector Graphics'
}
_default_backends = {
'ps': 'matplotlib.backends.backend_ps',
'eps': 'matplotlib.backends.backend_ps',
'pdf': 'matplotlib.backends.backend_pdf',
'pgf': 'matplotlib.backends.backend_pgf',
'png': 'matplotlib.backends.backend_agg',
'raw': 'matplotlib.backends.backend_agg',
'rgba': 'matplotlib.backends.backend_agg',
'svg': 'matplotlib.backends.backend_svg',
'svgz': 'matplotlib.backends.backend_svg',
}
def register_backend(format, backend, description=None):
"""
Register a backend for saving to a given file format.
Parameters
----------
format : str
File extension
backend : module string or canvas class
Backend for handling file output
description : str, optional
Description of the file type. Defaults to an empty string
"""
if description is None:
description = ''
_default_backends[format] = backend
_default_filetypes[format] = description
def get_registered_canvas_class(format):
"""
Return the registered default canvas for given file format.
Handles deferred import of required backend.
"""
if format not in _default_backends:
return None
backend_class = _default_backends[format]
if isinstance(backend_class, six.string_types):
backend_class = importlib.import_module(backend_class).FigureCanvas
_default_backends[format] = backend_class
return backend_class
class _Backend(object):
# A backend can be defined by using the following pattern:
#
# @_Backend.export
# class FooBackend(_Backend):
# # override the attributes and methods documented below.
# The following attributes and methods must be overridden by subclasses.
# The `FigureCanvas` and `FigureManager` classes must be defined.
FigureCanvas = None
FigureManager = None
# The following methods must be left as None for non-interactive backends.
# For interactive backends, `trigger_manager_draw` should be a function
# taking a manager as argument and triggering a canvas draw, and `mainloop`
# should be a function taking no argument and starting the backend main
# loop.
trigger_manager_draw = None
mainloop = None
# The following methods will be automatically defined and exported, but
# can be overridden.
@classmethod
def new_figure_manager(cls, num, *args, **kwargs):
"""Create a new figure manager instance.
"""
# This import needs to happen here due to circular imports.
from matplotlib.figure import Figure
fig_cls = kwargs.pop('FigureClass', Figure)
fig = fig_cls(*args, **kwargs)
return cls.new_figure_manager_given_figure(num, fig)
@classmethod
def new_figure_manager_given_figure(cls, num, figure):
"""Create a new figure manager instance for the given figure.
"""
canvas = cls.FigureCanvas(figure)
manager = cls.FigureManager(canvas, num)
return manager
@classmethod
def draw_if_interactive(cls):
if cls.trigger_manager_draw is not None and is_interactive():
manager = Gcf.get_active()
if manager:
cls.trigger_manager_draw(manager)
@classmethod
def show(cls, block=None):
"""Show all figures.
`show` blocks by calling `mainloop` if *block* is ``True``, or if it
is ``None`` and we are neither in IPython's ``%pylab`` mode, nor in
`interactive` mode.
"""
if cls.mainloop is None:
return
managers = Gcf.get_all_fig_managers()
if not managers:
return
for manager in managers:
manager.show()
if block is None:
# Hack: Are we in IPython's pylab mode?
from matplotlib import pyplot
try:
# IPython versions >= 0.10 tack the _needmain attribute onto
# pyplot.show, and always set it to False, when in %pylab mode.
ipython_pylab = not pyplot.show._needmain
except AttributeError:
ipython_pylab = False
block = not ipython_pylab and not is_interactive()
# TODO: The above is a hack to get the WebAgg backend working with
# ipython's `%pylab` mode until proper integration is implemented.
if get_backend() == "WebAgg":
block = True
if block:
cls.mainloop()
# This method is the one actually exporting the required methods.
@staticmethod
def export(cls):
for name in ["FigureCanvas",
"FigureManager",
"new_figure_manager",
"new_figure_manager_given_figure",
"draw_if_interactive",
"show"]:
setattr(sys.modules[cls.__module__], name, getattr(cls, name))
# For back-compatibility, generate a shim `Show` class.
class Show(ShowBase):
def mainloop(self):
return cls.mainloop()
setattr(sys.modules[cls.__module__], "Show", Show)
return cls
class ShowBase(_Backend):
"""
Simple base class to generate a show() callable in backends.
Subclass must override mainloop() method.
"""
def __call__(self, block=None):
return self.show(block=block)
class RendererBase(object):
"""An abstract base class to handle drawing/rendering operations.
The following methods must be implemented in the backend for full
functionality (though just implementing :meth:`draw_path` alone would
give a highly capable backend):
* :meth:`draw_path`
* :meth:`draw_image`
* :meth:`draw_gouraud_triangle`
The following methods *should* be implemented in the backend for
optimization reasons:
* :meth:`draw_text`
* :meth:`draw_markers`
* :meth:`draw_path_collection`
* :meth:`draw_quad_mesh`
"""
def __init__(self):
self._texmanager = None
self._text2path = textpath.TextToPath()
def open_group(self, s, gid=None):
"""
Open a grouping element with label *s*. If *gid* is given, use
*gid* as the id of the group. Is only currently used by
:mod:`~matplotlib.backends.backend_svg`.
"""
def close_group(self, s):
"""
Close a grouping element with label *s*
Is only currently used by :mod:`~matplotlib.backends.backend_svg`
"""
def draw_path(self, gc, path, transform, rgbFace=None):
"""
Draws a :class:`~matplotlib.path.Path` instance using the
given affine transform.
"""
raise NotImplementedError
def draw_markers(self, gc, marker_path, marker_trans, path,
trans, rgbFace=None):
"""
Draws a marker at each of the vertices in path. This includes
all vertices, including control points on curves. To avoid
that behavior, those vertices should be removed before calling
this function.
This provides a fallback implementation of draw_markers that
makes multiple calls to :meth:`draw_path`. Some backends may
want to override this method in order to draw the marker only
once and reuse it multiple times.
Parameters
----------
gc : `GraphicsContextBase`
The graphics context
marker_trans : `matplotlib.transforms.Transform`
An affine transform applied to the marker.
trans : `matplotlib.transforms.Transform`
An affine transform applied to the path.
"""
for vertices, codes in path.iter_segments(trans, simplify=False):
if len(vertices):
x, y = vertices[-2:]
self.draw_path(gc, marker_path,
marker_trans +
transforms.Affine2D().translate(x, y),
rgbFace)
def draw_path_collection(self, gc, master_transform, paths, all_transforms,
offsets, offsetTrans, facecolors, edgecolors,
linewidths, linestyles, antialiaseds, urls,
offset_position):
"""
Draws a collection of paths selecting drawing properties from
the lists *facecolors*, *edgecolors*, *linewidths*,
*linestyles* and *antialiaseds*. *offsets* is a list of
offsets to apply to each of the paths. The offsets in
*offsets* are first transformed by *offsetTrans* before being
applied. *offset_position* may be either "screen" or "data"
depending on the space that the offsets are in.
This provides a fallback implementation of
:meth:`draw_path_collection` that makes multiple calls to
:meth:`draw_path`. Some backends may want to override this in
order to render each set of path data only once, and then
reference that path multiple times with the different offsets,
colors, styles etc. The generator methods
:meth:`_iter_collection_raw_paths` and
:meth:`_iter_collection` are provided to help with (and
standardize) the implementation across backends. It is highly
recommended to use those generators, so that changes to the
behavior of :meth:`draw_path_collection` can be made globally.
"""
path_ids = []
for path, transform in self._iter_collection_raw_paths(
master_transform, paths, all_transforms):
path_ids.append((path, transforms.Affine2D(transform)))
for xo, yo, path_id, gc0, rgbFace in self._iter_collection(
gc, master_transform, all_transforms, path_ids, offsets,
offsetTrans, facecolors, edgecolors, linewidths, linestyles,
antialiaseds, urls, offset_position):
path, transform = path_id
transform = transforms.Affine2D(
transform.get_matrix()).translate(xo, yo)
self.draw_path(gc0, path, transform, rgbFace)
def draw_quad_mesh(self, gc, master_transform, meshWidth, meshHeight,
coordinates, offsets, offsetTrans, facecolors,
antialiased, edgecolors):
"""
This provides a fallback implementation of
:meth:`draw_quad_mesh` that generates paths and then calls
:meth:`draw_path_collection`.
"""
from matplotlib.collections import QuadMesh
paths = QuadMesh.convert_mesh_to_paths(
meshWidth, meshHeight, coordinates)
if edgecolors is None:
edgecolors = facecolors
linewidths = np.array([gc.get_linewidth()], float)
return self.draw_path_collection(
gc, master_transform, paths, [], offsets, offsetTrans, facecolors,
edgecolors, linewidths, [], [antialiased], [None], 'screen')
def draw_gouraud_triangle(self, gc, points, colors, transform):
"""
Draw a Gouraud-shaded triangle.
Parameters
----------
points : array_like, shape=(3, 2)
Array of (x, y) points for the triangle.
colors : array_like, shape=(3, 4)
RGBA colors for each point of the triangle.
transform : `matplotlib.transforms.Transform`
An affine transform to apply to the points.
"""
raise NotImplementedError
def draw_gouraud_triangles(self, gc, triangles_array, colors_array,
transform):
"""
Draws a series of Gouraud triangles.
Parameters
----------
points : array_like, shape=(N, 3, 2)
Array of *N* (x, y) points for the triangles.
colors : array_like, shape=(N, 3, 4)
Array of *N* RGBA colors for each point of the triangles.
transform : `matplotlib.transforms.Transform`
An affine transform to apply to the points.
"""
transform = transform.frozen()
for tri, col in zip(triangles_array, colors_array):
self.draw_gouraud_triangle(gc, tri, col, transform)
def _iter_collection_raw_paths(self, master_transform, paths,
all_transforms):
"""
This is a helper method (along with :meth:`_iter_collection`) to make
it easier to write a space-efficient :meth:`draw_path_collection`
implementation in a backend.
This method yields all of the base path/transform
combinations, given a master transform, a list of paths and
list of transforms.
The arguments should be exactly what is passed in to
:meth:`draw_path_collection`.
The backend should take each yielded path and transform and
create an object that can be referenced (reused) later.
"""
Npaths = len(paths)
Ntransforms = len(all_transforms)
N = max(Npaths, Ntransforms)
if Npaths == 0:
return
transform = transforms.IdentityTransform()
for i in xrange(N):
path = paths[i % Npaths]
if Ntransforms:
transform = Affine2D(all_transforms[i % Ntransforms])
yield path, transform + master_transform
def _iter_collection_uses_per_path(self, paths, all_transforms,
offsets, facecolors, edgecolors):
"""
Compute how many times each raw path object returned by
_iter_collection_raw_paths would be used when calling
_iter_collection. This is intended for the backend to decide
on the tradeoff between using the paths in-line and storing
them once and reusing. Rounds up in case the number of uses
is not the same for every path.
"""
Npaths = len(paths)
if Npaths == 0 or (len(facecolors) == 0 and len(edgecolors) == 0):
return 0
Npath_ids = max(Npaths, len(all_transforms))
N = max(Npath_ids, len(offsets))
return (N + Npath_ids - 1) // Npath_ids
def _iter_collection(self, gc, master_transform, all_transforms,
path_ids, offsets, offsetTrans, facecolors,
edgecolors, linewidths, linestyles,
antialiaseds, urls, offset_position):
"""
This is a helper method (along with
:meth:`_iter_collection_raw_paths`) to make it easier to write
a space-efficient :meth:`draw_path_collection` implementation in a
backend.
This method yields all of the path, offset and graphics
context combinations to draw the path collection. The caller
should already have looped over the results of
:meth:`_iter_collection_raw_paths` to draw this collection.
The arguments should be the same as that passed into
:meth:`draw_path_collection`, with the exception of
*path_ids*, which is a list of arbitrary objects that the
backend will use to reference one of the paths created in the
:meth:`_iter_collection_raw_paths` stage.
Each yielded result is of the form::
xo, yo, path_id, gc, rgbFace
where *xo*, *yo* is an offset; *path_id* is one of the elements of
*path_ids*; *gc* is a graphics context and *rgbFace* is a color to
use for filling the path.
"""
Ntransforms = len(all_transforms)
Npaths = len(path_ids)
Noffsets = len(offsets)
N = max(Npaths, Noffsets)
Nfacecolors = len(facecolors)
Nedgecolors = len(edgecolors)
Nlinewidths = len(linewidths)
Nlinestyles = len(linestyles)
Naa = len(antialiaseds)
Nurls = len(urls)
if (Nfacecolors == 0 and Nedgecolors == 0) or Npaths == 0:
return
if Noffsets:
toffsets = offsetTrans.transform(offsets)
gc0 = self.new_gc()
gc0.copy_properties(gc)
if Nfacecolors == 0:
rgbFace = None
if Nedgecolors == 0:
gc0.set_linewidth(0.0)
xo, yo = 0, 0
for i in xrange(N):
path_id = path_ids[i % Npaths]
if Noffsets:
xo, yo = toffsets[i % Noffsets]
if offset_position == 'data':
if Ntransforms:
transform = (
Affine2D(all_transforms[i % Ntransforms]) +
master_transform)
else:
transform = master_transform
xo, yo = transform.transform_point((xo, yo))
xp, yp = transform.transform_point((0, 0))
xo = -(xp - xo)
yo = -(yp - yo)
if not (np.isfinite(xo) and np.isfinite(yo)):
continue
if Nfacecolors:
rgbFace = facecolors[i % Nfacecolors]
if Nedgecolors:
if Nlinewidths:
gc0.set_linewidth(linewidths[i % Nlinewidths])
if Nlinestyles:
gc0.set_dashes(*linestyles[i % Nlinestyles])
fg = edgecolors[i % Nedgecolors]
if len(fg) == 4:
if fg[3] == 0.0:
gc0.set_linewidth(0)
else:
gc0.set_foreground(fg)
else:
gc0.set_foreground(fg)
if rgbFace is not None and len(rgbFace) == 4:
if rgbFace[3] == 0:
rgbFace = None
gc0.set_antialiased(antialiaseds[i % Naa])
if Nurls:
gc0.set_url(urls[i % Nurls])
yield xo, yo, path_id, gc0, rgbFace
gc0.restore()
def get_image_magnification(self):
"""
Get the factor by which to magnify images passed to :meth:`draw_image`.
Allows a backend to have images at a different resolution to other
artists.
"""
return 1.0
def draw_image(self, gc, x, y, im, transform=None):
"""
Draw an RGBA image.
Parameters
----------
gc : `GraphicsContextBase`
a graphics context with clipping information.
x : scalar
the distance in physical units (i.e., dots or pixels) from the left
hand side of the canvas.
y : scalar
the distance in physical units (i.e., dots or pixels) from the
bottom side of the canvas.
im : array_like, shape=(N, M, 4), dtype=np.uint8
An array of RGBA pixels.
transform : `matplotlib.transforms.Affine2DBase`
If and only if the concrete backend is written such that
:meth:`option_scale_image` returns ``True``, an affine
transformation *may* be passed to :meth:`draw_image`. It takes the
form of a :class:`~matplotlib.transforms.Affine2DBase` instance.
The translation vector of the transformation is given in physical
units (i.e., dots or pixels). Note that the transformation does not
override `x` and `y`, and has to be applied *before* translating
the result by `x` and `y` (this can be accomplished by adding `x`
and `y` to the translation vector defined by `transform`).
"""
raise NotImplementedError
def option_image_nocomposite(self):
"""
override this method for renderers that do not necessarily always
want to rescale and composite raster images. (like SVG, PDF, or PS)
"""
return False
def option_scale_image(self):
"""
override this method for renderers that support arbitrary affine
transformations in :meth:`draw_image` (most vector backends).
"""
return False
def draw_tex(self, gc, x, y, s, prop, angle, ismath='TeX!', mtext=None):
"""
"""
self._draw_text_as_path(gc, x, y, s, prop, angle, ismath="TeX")
def draw_text(self, gc, x, y, s, prop, angle, ismath=False, mtext=None):
"""
Draw the text instance
Parameters
----------
gc : `GraphicsContextBase`
the graphics context
x : scalar
the x location of the text in display coords
y : scalar
the y location of the text baseline in display coords
s : str
the text string
prop : `matplotlib.font_manager.FontProperties`
font properties
angle : scalar
the rotation angle in degrees
mtext : `matplotlib.text.Text`
the original text object to be rendered
Notes
-----
**backend implementers note**
When you are trying to determine if you have gotten your bounding box
right (which is what enables the text layout/alignment to work
properly), it helps to change the line in text.py::
if 0: bbox_artist(self, renderer)
to if 1, and then the actual bounding box will be plotted along with
your text.
"""
self._draw_text_as_path(gc, x, y, s, prop, angle, ismath)
def _get_text_path_transform(self, x, y, s, prop, angle, ismath):
"""
return the text path and transform
Parameters
----------
prop : `matplotlib.font_manager.FontProperties`
font property
s : str
text to be converted
usetex : bool
If True, use matplotlib usetex mode.
ismath : bool
If True, use mathtext parser. If "TeX", use *usetex* mode.
"""
text2path = self._text2path
fontsize = self.points_to_pixels(prop.get_size_in_points())
if ismath == "TeX":
verts, codes = text2path.get_text_path(prop, s, ismath=False,
usetex=True)
else:
verts, codes = text2path.get_text_path(prop, s, ismath=ismath,
usetex=False)
path = Path(verts, codes)
angle = np.deg2rad(angle)
if self.flipy():
transform = Affine2D().scale(fontsize / text2path.FONT_SCALE,
fontsize / text2path.FONT_SCALE)
transform = transform.rotate(angle).translate(x, self.height - y)
else:
transform = Affine2D().scale(fontsize / text2path.FONT_SCALE,
fontsize / text2path.FONT_SCALE)
transform = transform.rotate(angle).translate(x, y)
return path, transform
def _draw_text_as_path(self, gc, x, y, s, prop, angle, ismath):
"""
draw the text by converting them to paths using textpath module.
Parameters
----------
prop : `matplotlib.font_manager.FontProperties`
font property
s : str
text to be converted
usetex : bool
If True, use matplotlib usetex mode.
ismath : bool
If True, use mathtext parser. If "TeX", use *usetex* mode.
"""
path, transform = self._get_text_path_transform(
x, y, s, prop, angle, ismath)
color = gc.get_rgb()
gc.set_linewidth(0.0)
self.draw_path(gc, path, transform, rgbFace=color)
def get_text_width_height_descent(self, s, prop, ismath):
"""
Get the width, height, and descent (offset from the bottom
to the baseline), in display coords, of the string *s* with
:class:`~matplotlib.font_manager.FontProperties` *prop*
"""
if ismath == 'TeX':
# todo: handle props
size = prop.get_size_in_points()
texmanager = self._text2path.get_texmanager()
fontsize = prop.get_size_in_points()
w, h, d = texmanager.get_text_width_height_descent(
s, fontsize, renderer=self)
return w, h, d
dpi = self.points_to_pixels(72)
if ismath:
dims = self._text2path.mathtext_parser.parse(s, dpi, prop)
return dims[0:3] # return width, height, descent
flags = self._text2path._get_hinting_flag()
font = self._text2path._get_font(prop)
size = prop.get_size_in_points()
font.set_size(size, dpi)
# the width and height of unrotated string
font.set_text(s, 0.0, flags=flags)
w, h = font.get_width_height()
d = font.get_descent()
w /= 64.0 # convert from subpixels
h /= 64.0
d /= 64.0
return w, h, d
def flipy(self):
"""
Return true if y small numbers are top for renderer Is used
for drawing text (:mod:`matplotlib.text`) and images
(:mod:`matplotlib.image`) only
"""
return True
def get_canvas_width_height(self):
'return the canvas width and height in display coords'
return 1, 1
def get_texmanager(self):
"""
return the :class:`matplotlib.texmanager.TexManager` instance
"""
if self._texmanager is None:
from matplotlib.texmanager import TexManager
self._texmanager = TexManager()
return self._texmanager
def new_gc(self):
"""
Return an instance of a :class:`GraphicsContextBase`
"""
return GraphicsContextBase()
def points_to_pixels(self, points):
"""
Convert points to display units
You need to override this function (unless your backend
doesn't have a dpi, e.g., postscript or svg). Some imaging
systems assume some value for pixels per inch::
points to pixels = points * pixels_per_inch/72.0 * dpi/72.0
Parameters
----------
points : scalar or array_like
a float or a numpy array of float
Returns
-------
Points converted to pixels
"""
return points
def strip_math(self, s):
return cbook.strip_math(s)
def start_rasterizing(self):
"""
Used in MixedModeRenderer. Switch to the raster renderer.
"""
def stop_rasterizing(self):
"""
Used in MixedModeRenderer. Switch back to the vector renderer
and draw the contents of the raster renderer as an image on
the vector renderer.
"""
def start_filter(self):
"""
Used in AggRenderer. Switch to a temporary renderer for image
filtering effects.
"""
def stop_filter(self, filter_func):
"""
Used in AggRenderer. Switch back to the original renderer.
The contents of the temporary renderer is processed with the
*filter_func* and is drawn on the original renderer as an
image.
"""
class GraphicsContextBase(object):
"""
An abstract base class that provides color, line styles, etc...
"""
def __init__(self):
self._alpha = 1.0
self._forced_alpha = False # if True, _alpha overrides A from RGBA
self._antialiased = 1 # use 0,1 not True, False for extension code
self._capstyle = 'butt'
self._cliprect = None
self._clippath = None
self._dashes = None, None
self._joinstyle = 'round'
self._linestyle = 'solid'
self._linewidth = 1
self._rgb = (0.0, 0.0, 0.0, 1.0)
self._hatch = None
self._hatch_color = colors.to_rgba(rcParams['hatch.color'])
self._hatch_linewidth = rcParams['hatch.linewidth']
self._url = None
self._gid = None
self._snap = None
self._sketch = None
def copy_properties(self, gc):
'Copy properties from gc to self'
self._alpha = gc._alpha
self._forced_alpha = gc._forced_alpha
self._antialiased = gc._antialiased
self._capstyle = gc._capstyle
self._cliprect = gc._cliprect
self._clippath = gc._clippath
self._dashes = gc._dashes
self._joinstyle = gc._joinstyle
self._linestyle = gc._linestyle
self._linewidth = gc._linewidth
self._rgb = gc._rgb
self._hatch = gc._hatch
self._hatch_color = gc._hatch_color
self._hatch_linewidth = gc._hatch_linewidth
self._url = gc._url
self._gid = gc._gid
self._snap = gc._snap
self._sketch = gc._sketch
def restore(self):
"""
Restore the graphics context from the stack - needed only
for backends that save graphics contexts on a stack
"""
def get_alpha(self):
"""
Return the alpha value used for blending - not supported on
all backends
"""
return self._alpha
def get_antialiased(self):
"Return true if the object should try to do antialiased rendering"
return self._antialiased
def get_capstyle(self):
"""
Return the capstyle as a string in ('butt', 'round', 'projecting')
"""
return self._capstyle
def get_clip_rectangle(self):
"""
Return the clip rectangle as a :class:`~matplotlib.transforms.Bbox`
instance
"""
return self._cliprect
def get_clip_path(self):
"""
Return the clip path in the form (path, transform), where path
is a :class:`~matplotlib.path.Path` instance, and transform is
an affine transform to apply to the path before clipping.
"""
if self._clippath is not None:
return self._clippath.get_transformed_path_and_affine()
return None, None
def get_dashes(self):
"""
Return the dash information as an offset dashlist tuple.
The dash list is a even size list that gives the ink on, ink
off in pixels.
See p107 of to PostScript `BLUEBOOK
<https://www-cdf.fnal.gov/offline/PostScript/BLUEBOOK.PDF>`_
for more info.
Default value is None
"""
return self._dashes
def get_forced_alpha(self):
"""
Return whether the value given by get_alpha() should be used to
override any other alpha-channel values.
"""
return self._forced_alpha
def get_joinstyle(self):
"""
Return the line join style as one of ('miter', 'round', 'bevel')
"""
return self._joinstyle
@cbook.deprecated("2.1")
def get_linestyle(self):
"""
Return the linestyle: one of ('solid', 'dashed', 'dashdot',
'dotted').
"""
return self._linestyle
def get_linewidth(self):
"""
Return the line width in points as a scalar
"""
return self._linewidth
def get_rgb(self):
"""
returns a tuple of three or four floats from 0-1.
"""
return self._rgb
def get_url(self):
"""
returns a url if one is set, None otherwise
"""
return self._url
def get_gid(self):
"""
Return the object identifier if one is set, None otherwise.
"""
return self._gid
def get_snap(self):
"""
returns the snap setting which may be:
* True: snap vertices to the nearest pixel center
* False: leave vertices as-is
* None: (auto) If the path contains only rectilinear line
segments, round to the nearest pixel center
"""
return self._snap
def set_alpha(self, alpha):
"""
Set the alpha value used for blending - not supported on all backends.
If ``alpha=None`` (the default), the alpha components of the
foreground and fill colors will be used to set their respective
transparencies (where applicable); otherwise, ``alpha`` will override
them.
"""
if alpha is not None:
self._alpha = alpha
self._forced_alpha = True
else:
self._alpha = 1.0
self._forced_alpha = False
self.set_foreground(self._rgb, isRGBA=True)
def set_antialiased(self, b):
"""
True if object should be drawn with antialiased rendering
"""
# use 0, 1 to make life easier on extension code trying to read the gc
if b:
self._antialiased = 1
else:
self._antialiased = 0
def set_capstyle(self, cs):
"""
Set the capstyle as a string in ('butt', 'round', 'projecting')
"""
if cs in ('butt', 'round', 'projecting'):
self._capstyle = cs
else:
raise ValueError('Unrecognized cap style. Found %s' % cs)
def set_clip_rectangle(self, rectangle):
"""
Set the clip rectangle with sequence (left, bottom, width, height)
"""
self._cliprect = rectangle
def set_clip_path(self, path):
"""
Set the clip path and transformation. Path should be a
:class:`~matplotlib.transforms.TransformedPath` instance.
"""
if (path is not None
and not isinstance(path, transforms.TransformedPath)):
raise ValueError("Path should be a "
"matplotlib.transforms.TransformedPath instance")
self._clippath = path
def set_dashes(self, dash_offset, dash_list):
"""
Set the dash style for the gc.
Parameters
----------
dash_offset : float
is the offset (usually 0).
dash_list : array_like
specifies the on-off sequence as points.
``(None, None)`` specifies a solid line
"""
if dash_list is not None:
dl = np.asarray(dash_list)
if np.any(dl < 0.0):
raise ValueError(
"All values in the dash list must be positive")
self._dashes = dash_offset, dash_list
def set_foreground(self, fg, isRGBA=False):
"""
Set the foreground color. fg can be a MATLAB format string, a
html hex color string, an rgb or rgba unit tuple, or a float between 0
and 1. In the latter case, grayscale is used.
If you know fg is rgba, set ``isRGBA=True`` for efficiency.
"""
if self._forced_alpha and isRGBA:
self._rgb = fg[:3] + (self._alpha,)
elif self._forced_alpha:
self._rgb = colors.to_rgba(fg, self._alpha)
elif isRGBA:
self._rgb = fg
else:
self._rgb = colors.to_rgba(fg)
def set_joinstyle(self, js):
"""
Set the join style to be one of ('miter', 'round', 'bevel')
"""
if js in ('miter', 'round', 'bevel'):
self._joinstyle = js
else:
raise ValueError('Unrecognized join style. Found %s' % js)
def set_linewidth(self, w):
"""
Set the linewidth in points
"""
self._linewidth = float(w)
@cbook.deprecated("2.1")
def set_linestyle(self, style):
"""
Set the linestyle to be one of ('solid', 'dashed', 'dashdot',
'dotted'). These are defined in the rcParams
`lines.dashed_pattern`, `lines.dashdot_pattern` and
`lines.dotted_pattern`. One may also specify customized dash
styles by providing a tuple of (offset, dash pairs).
"""
self._linestyle = style
def set_url(self, url):
"""
Sets the url for links in compatible backends
"""
self._url = url
def set_gid(self, id):
"""
Sets the id.
"""
self._gid = id
def set_snap(self, snap):
"""
Sets the snap setting which may be:
* True: snap vertices to the nearest pixel center
* False: leave vertices as-is
* None: (auto) If the path contains only rectilinear line
segments, round to the nearest pixel center
"""
self._snap = snap
def set_hatch(self, hatch):
"""
Sets the hatch style for filling
"""
self._hatch = hatch
def get_hatch(self):
"""
Gets the current hatch style
"""
return self._hatch
def get_hatch_path(self, density=6.0):
"""
Returns a Path for the current hatch.
"""
hatch = self.get_hatch()
if hatch is None:
return None
return Path.hatch(hatch, density)
def get_hatch_color(self):
"""
Gets the color to use for hatching.
"""
return self._hatch_color
def set_hatch_color(self, hatch_color):
"""
sets the color to use for hatching.
"""
self._hatch_color = hatch_color
def get_hatch_linewidth(self):
"""
Gets the linewidth to use for hatching.
"""
return self._hatch_linewidth
def get_sketch_params(self):
"""
Returns the sketch parameters for the artist.
Returns
-------
sketch_params : tuple or `None`
A 3-tuple with the following elements:
* `scale`: The amplitude of the wiggle perpendicular to the
source line.
* `length`: The length of the wiggle along the line.
* `randomness`: The scale factor by which the length is
shrunken or expanded.
May return `None` if no sketch parameters were set.
"""
return self._sketch
def set_sketch_params(self, scale=None, length=None, randomness=None):
"""
Sets the sketch parameters.
Parameters
----------
scale : float, optional
The amplitude of the wiggle perpendicular to the source
line, in pixels. If scale is `None`, or not provided, no
sketch filter will be provided.
length : float, optional
The length of the wiggle along the line, in pixels
(default 128)
randomness : float, optional
The scale factor by which the length is shrunken or
expanded (default 16)
"""
self._sketch = (
None if scale is None
else (scale, length or 128., randomness or 16.))
class TimerBase(object):
'''
A base class for providing timer events, useful for things animations.
Backends need to implement a few specific methods in order to use their
own timing mechanisms so that the timer events are integrated into their
event loops.
Mandatory functions that must be implemented:
* `_timer_start`: Contains backend-specific code for starting
the timer
* `_timer_stop`: Contains backend-specific code for stopping
the timer
Optional overrides:
* `_timer_set_single_shot`: Code for setting the timer to
single shot operating mode, if supported by the timer
object. If not, the `Timer` class itself will store the flag
and the `_on_timer` method should be overridden to support
such behavior.
* `_timer_set_interval`: Code for setting the interval on the
timer, if there is a method for doing so on the timer
object.
* `_on_timer`: This is the internal function that any timer
object should call, which will handle the task of running
all callbacks that have been set.
Attributes
----------
interval : scalar
The time between timer events in milliseconds. Default is 1000 ms.
single_shot : bool
Boolean flag indicating whether this timer should operate as single
shot (run once and then stop). Defaults to `False`.
callbacks : List[Tuple[callable, Tuple, Dict]]
Stores list of (func, args, kwargs) tuples that will be called upon
timer events. This list can be manipulated directly, or the
functions `add_callback` and `remove_callback` can be used.
'''
def __init__(self, interval=None, callbacks=None):
#Initialize empty callbacks list and setup default settings if necssary
if callbacks is None:
self.callbacks = []
else:
self.callbacks = callbacks[:] # Create a copy
if interval is None:
self._interval = 1000
else:
self._interval = interval
self._single = False
# Default attribute for holding the GUI-specific timer object
self._timer = None
def __del__(self):
'Need to stop timer and possibly disconnect timer.'
self._timer_stop()
def start(self, interval=None):
'''
Start the timer object. `interval` is optional and will be used
to reset the timer interval first if provided.
'''
if interval is not None:
self._set_interval(interval)
self._timer_start()
def stop(self):
'''
Stop the timer.
'''
self._timer_stop()
def _timer_start(self):
pass
def _timer_stop(self):
pass
def _get_interval(self):
return self._interval
def _set_interval(self, interval):
# Force to int since none of the backends actually support fractional
# milliseconds, and some error or give warnings.
interval = int(interval)
self._interval = interval
self._timer_set_interval()
interval = property(_get_interval, _set_interval)
def _get_single_shot(self):
return self._single
def _set_single_shot(self, ss=True):
self._single = ss
self._timer_set_single_shot()
single_shot = property(_get_single_shot, _set_single_shot)
def add_callback(self, func, *args, **kwargs):
'''
Register `func` to be called by timer when the event fires. Any
additional arguments provided will be passed to `func`.
'''
self.callbacks.append((func, args, kwargs))
def remove_callback(self, func, *args, **kwargs):
'''
Remove `func` from list of callbacks. `args` and `kwargs` are optional
and used to distinguish between copies of the same function registered
to be called with different arguments.
'''
if args or kwargs:
self.callbacks.remove((func, args, kwargs))
else:
funcs = [c[0] for c in self.callbacks]
if func in funcs:
self.callbacks.pop(funcs.index(func))
def _timer_set_interval(self):
"""Used to set interval on underlying timer object."""
def _timer_set_single_shot(self):
"""Used to set single shot on underlying timer object."""
def _on_timer(self):
'''
Runs all function that have been registered as callbacks. Functions
can return False (or 0) if they should not be called any more. If there
are no callbacks, the timer is automatically stopped.
'''
for func, args, kwargs in self.callbacks:
ret = func(*args, **kwargs)
# docstring above explains why we use `if ret == 0` here,
# instead of `if not ret`.
# This will also catch `ret == False` as `False == 0`
# but does not annoy the linters
# https://docs.python.org/3/library/stdtypes.html#boolean-values
if ret == 0:
self.callbacks.remove((func, args, kwargs))
if len(self.callbacks) == 0:
self.stop()
class Event(object):
"""
A matplotlib event. Attach additional attributes as defined in
:meth:`FigureCanvasBase.mpl_connect`. The following attributes
are defined and shown with their default values
Attributes
----------
name : str
the event name
canvas : `FigureCanvasBase`
the backend-specific canvas instance generating the event
guiEvent
the GUI event that triggered the matplotlib event
"""
def __init__(self, name, canvas, guiEvent=None):
self.name = name
self.canvas = canvas
self.guiEvent = guiEvent
@cbook.deprecated("2.1")
class IdleEvent(Event):
"""
An event triggered by the GUI backend when it is idle -- useful
for passive animation
"""
class DrawEvent(Event):
"""
An event triggered by a draw operation on the canvas
In most backends callbacks subscribed to this callback will be
fired after the rendering is complete but before the screen is
updated. Any extra artists drawn to the canvas's renderer will
be reflected without an explicit call to ``blit``.
.. warning ::
Calling ``canvas.draw`` and ``canvas.blit`` in these callbacks may
not be safe with all backends and may cause infinite recursion.
In addition to the :class:`Event` attributes, the following event
attributes are defined:
Attributes
----------
renderer : `RendererBase`
the renderer for the draw event
"""
def __init__(self, name, canvas, renderer):
Event.__init__(self, name, canvas)
self.renderer = renderer
class ResizeEvent(Event):
"""
An event triggered by a canvas resize
In addition to the :class:`Event` attributes, the following event
attributes are defined:
Attributes
----------
width : scalar
width of the canvas in pixels
height : scalar
height of the canvas in pixels
"""
def __init__(self, name, canvas):
Event.__init__(self, name, canvas)
self.width, self.height = canvas.get_width_height()
class CloseEvent(Event):
"""
An event triggered by a figure being closed
"""
def __init__(self, name, canvas, guiEvent=None):
Event.__init__(self, name, canvas, guiEvent)
class LocationEvent(Event):
"""
An event that has a screen location
The following additional attributes are defined and shown with
their default values.
In addition to the :class:`Event` attributes, the following
event attributes are defined:
Attributes
----------
x : scalar
x position - pixels from left of canvas
y : scalar
y position - pixels from bottom of canvas
inaxes : bool
the :class:`~matplotlib.axes.Axes` instance if mouse is over axes
xdata : scalar
x coord of mouse in data coords
ydata : scalar
y coord of mouse in data coords
"""
x = None # x position - pixels from left of canvas
y = None # y position - pixels from right of canvas
inaxes = None # the Axes instance if mouse us over axes
xdata = None # x coord of mouse in data coords
ydata = None # y coord of mouse in data coords
# the last event that was triggered before this one
lastevent = None
def __init__(self, name, canvas, x, y, guiEvent=None):
"""
*x*, *y* in figure coords, 0,0 = bottom, left
"""
Event.__init__(self, name, canvas, guiEvent=guiEvent)
self.x = x
self.y = y
if x is None or y is None:
# cannot check if event was in axes if no x,y info
self.inaxes = None
self._update_enter_leave()
return
# Find all axes containing the mouse
if self.canvas.mouse_grabber is None:
axes_list = [a for a in self.canvas.figure.get_axes()
if a.in_axes(self)]
else:
axes_list = [self.canvas.mouse_grabber]
if axes_list:
self.inaxes = cbook._topmost_artist(axes_list)
try:
trans = self.inaxes.transData.inverted()
xdata, ydata = trans.transform_point((x, y))
except ValueError:
self.xdata = None
self.ydata = None
else:
self.xdata = xdata
self.ydata = ydata
else:
self.inaxes = None
self._update_enter_leave()
def _update_enter_leave(self):
'process the figure/axes enter leave events'
if LocationEvent.lastevent is not None:
last = LocationEvent.lastevent
if last.inaxes != self.inaxes:
# process axes enter/leave events
try:
if last.inaxes is not None:
last.canvas.callbacks.process('axes_leave_event', last)
except:
pass
# See ticket 2901582.
# I think this is a valid exception to the rule
# against catching all exceptions; if anything goes
# wrong, we simply want to move on and process the
# current event.
if self.inaxes is not None:
self.canvas.callbacks.process('axes_enter_event', self)
else:
# process a figure enter event
if self.inaxes is not None:
self.canvas.callbacks.process('axes_enter_event', self)
LocationEvent.lastevent = self
class MouseEvent(LocationEvent):
"""
A mouse event ('button_press_event',
'button_release_event',
'scroll_event',
'motion_notify_event').
In addition to the :class:`Event` and :class:`LocationEvent`
attributes, the following attributes are defined:
Attributes
----------
button : None, scalar, or str
button pressed None, 1, 2, 3, 'up', 'down' (up and down are used
for scroll events). Note that in the nbagg backend, both the
middle and right clicks return 3 since right clicking will bring
up the context menu in some browsers.
key : None, or str
the key depressed when the mouse event triggered (see
:class:`KeyEvent`)
step : scalar
number of scroll steps (positive for 'up', negative for 'down')
Examples
--------
Usage::
def on_press(event):
print('you pressed', event.button, event.xdata, event.ydata)
cid = fig.canvas.mpl_connect('button_press_event', on_press)
"""
x = None # x position - pixels from left of canvas
y = None # y position - pixels from right of canvas
button = None # button pressed None, 1, 2, 3
dblclick = None # whether or not the event is the result of a double click
inaxes = None # the Axes instance if mouse us over axes
xdata = None # x coord of mouse in data coords
ydata = None # y coord of mouse in data coords
step = None # scroll steps for scroll events
def __init__(self, name, canvas, x, y, button=None, key=None,
step=0, dblclick=False, guiEvent=None):
"""
x, y in figure coords, 0,0 = bottom, left
button pressed None, 1, 2, 3, 'up', 'down'
"""
LocationEvent.__init__(self, name, canvas, x, y, guiEvent=guiEvent)
self.button = button
self.key = key
self.step = step
self.dblclick = dblclick
def __str__(self):
return ("MPL MouseEvent: xy=(%d,%d) xydata=(%s,%s) button=%s " +
"dblclick=%s inaxes=%s") % (self.x, self.y, self.xdata,
self.ydata, self.button,
self.dblclick, self.inaxes)
class PickEvent(Event):
"""
a pick event, fired when the user picks a location on the canvas
sufficiently close to an artist.
Attrs: all the :class:`Event` attributes plus
Attributes
----------
mouseevent : `MouseEvent`
the mouse event that generated the pick
artist : `matplotlib.artist.Artist`
the picked artist
other
extra class dependent attrs -- e.g., a
:class:`~matplotlib.lines.Line2D` pick may define different
extra attributes than a
:class:`~matplotlib.collections.PatchCollection` pick event
Examples
--------
Usage::
ax.plot(np.rand(100), 'o', picker=5) # 5 points tolerance
def on_pick(event):
line = event.artist
xdata, ydata = line.get_data()
ind = event.ind
print('on pick line:', np.array([xdata[ind], ydata[ind]]).T)
cid = fig.canvas.mpl_connect('pick_event', on_pick)
"""
def __init__(self, name, canvas, mouseevent, artist,
guiEvent=None, **kwargs):
Event.__init__(self, name, canvas, guiEvent)
self.mouseevent = mouseevent
self.artist = artist
self.__dict__.update(kwargs)
class KeyEvent(LocationEvent):
"""
A key event (key press, key release).
Attach additional attributes as defined in
:meth:`FigureCanvasBase.mpl_connect`.
In addition to the :class:`Event` and :class:`LocationEvent`
attributes, the following attributes are defined:
Attributes
----------
key : None or str
the key(s) pressed. Could be **None**, a single case sensitive ascii
character ("g", "G", "#", etc.), a special key
("control", "shift", "f1", "up", etc.) or a
combination of the above (e.g., "ctrl+alt+g", "ctrl+alt+G").
Notes
-----
Modifier keys will be prefixed to the pressed key and will be in the order
"ctrl", "alt", "super". The exception to this rule is when the pressed key
is itself a modifier key, therefore "ctrl+alt" and "alt+control" can both
be valid key values.
Examples
--------
Usage::
def on_key(event):
print('you pressed', event.key, event.xdata, event.ydata)
cid = fig.canvas.mpl_connect('key_press_event', on_key)
"""
def __init__(self, name, canvas, key, x=0, y=0, guiEvent=None):
LocationEvent.__init__(self, name, canvas, x, y, guiEvent=guiEvent)
self.key = key
class FigureCanvasBase(object):
"""
The canvas the figure renders into.
Public attributes
Attributes
----------
figure : `matplotlib.figure.Figure`
A high-level figure instance
"""
events = [
'resize_event',
'draw_event',
'key_press_event',
'key_release_event',
'button_press_event',
'button_release_event',
'scroll_event',
'motion_notify_event',
'pick_event',
'idle_event',
'figure_enter_event',
'figure_leave_event',
'axes_enter_event',
'axes_leave_event',
'close_event'
]
supports_blit = True
fixed_dpi = None
filetypes = _default_filetypes
if _has_pil:
# JPEG support
register_backend('jpg', 'matplotlib.backends.backend_agg',
'Joint Photographic Experts Group')
register_backend('jpeg', 'matplotlib.backends.backend_agg',
'Joint Photographic Experts Group')
# TIFF support
register_backend('tif', 'matplotlib.backends.backend_agg',
'Tagged Image File Format')
register_backend('tiff', 'matplotlib.backends.backend_agg',
'Tagged Image File Format')
def __init__(self, figure):
self._is_idle_drawing = True
self._is_saving = False
figure.set_canvas(self)
self.figure = figure
# a dictionary from event name to a dictionary that maps cid->func
self.callbacks = cbook.CallbackRegistry()
self.widgetlock = widgets.LockDraw()
self._button = None # the button pressed
self._key = None # the key pressed
self._lastx, self._lasty = None, None
self.button_pick_id = self.mpl_connect('button_press_event', self.pick)
self.scroll_pick_id = self.mpl_connect('scroll_event', self.pick)
self.mouse_grabber = None # the axes currently grabbing mouse
self.toolbar = None # NavigationToolbar2 will set me
self._is_idle_drawing = False
@contextmanager
def _idle_draw_cntx(self):
self._is_idle_drawing = True
yield
self._is_idle_drawing = False
def is_saving(self):
"""
Returns whether the renderer is in the process of saving
to a file, rather than rendering for an on-screen buffer.
"""
return self._is_saving
@cbook.deprecated("2.2")
def onRemove(self, ev):
"""
Mouse event processor which removes the top artist
under the cursor. Connect this to the 'mouse_press_event'
using::
canvas.mpl_connect('mouse_press_event',canvas.onRemove)
"""
# Find the top artist under the cursor
under = cbook._topmost_artist(self.figure.hitlist(ev))
h = None
if under:
h = under[-1]
# Try deleting that artist, or its parent if you
# can't delete the artist
while h:
if h.remove():
self.draw_idle()
break
parent = None
for p in under:
if h in p.get_children():
parent = p
break
h = parent
def pick(self, mouseevent):
if not self.widgetlock.locked():
self.figure.pick(mouseevent)
def blit(self, bbox=None):
"""Blit the canvas in bbox (default entire canvas)."""
def resize(self, w, h):
"""Set the canvas size in pixels."""
def draw_event(self, renderer):
"""Pass a `DrawEvent` to all functions connected to ``draw_event``."""
s = 'draw_event'
event = DrawEvent(s, self, renderer)
self.callbacks.process(s, event)
def resize_event(self):
"""Pass a `ResizeEvent` to all functions connected to ``resize_event``.
"""
s = 'resize_event'
event = ResizeEvent(s, self)
self.callbacks.process(s, event)
self.draw_idle()
def close_event(self, guiEvent=None):
"""Pass a `CloseEvent` to all functions connected to ``close_event``.
"""
s = 'close_event'
try:
event = CloseEvent(s, self, guiEvent=guiEvent)
self.callbacks.process(s, event)
except (TypeError, AttributeError):
pass
# Suppress the TypeError when the python session is being killed.
# It may be that a better solution would be a mechanism to
# disconnect all callbacks upon shutdown.
# AttributeError occurs on OSX with qt4agg upon exiting
# with an open window; 'callbacks' attribute no longer exists.
def key_press_event(self, key, guiEvent=None):
"""Pass a `KeyEvent` to all functions connected to ``key_press_event``.
"""
self._key = key
s = 'key_press_event'
event = KeyEvent(
s, self, key, self._lastx, self._lasty, guiEvent=guiEvent)
self.callbacks.process(s, event)
def key_release_event(self, key, guiEvent=None):
"""
Pass a `KeyEvent` to all functions connected to ``key_release_event``.
"""
s = 'key_release_event'
event = KeyEvent(
s, self, key, self._lastx, self._lasty, guiEvent=guiEvent)
self.callbacks.process(s, event)
self._key = None
def pick_event(self, mouseevent, artist, **kwargs):
"""
This method will be called by artists who are picked and will
fire off :class:`PickEvent` callbacks registered listeners
"""
s = 'pick_event'
event = PickEvent(s, self, mouseevent, artist,
guiEvent=mouseevent.guiEvent,
**kwargs)
self.callbacks.process(s, event)
def scroll_event(self, x, y, step, guiEvent=None):
"""
Backend derived classes should call this function on any
scroll wheel event. x,y are the canvas coords: 0,0 is lower,
left. button and key are as defined in MouseEvent.
This method will be call all functions connected to the
'scroll_event' with a :class:`MouseEvent` instance.
"""
if step >= 0:
self._button = 'up'
else:
self._button = 'down'
s = 'scroll_event'
mouseevent = MouseEvent(s, self, x, y, self._button, self._key,
step=step, guiEvent=guiEvent)
self.callbacks.process(s, mouseevent)
def button_press_event(self, x, y, button, dblclick=False, guiEvent=None):
"""
Backend derived classes should call this function on any mouse
button press. x,y are the canvas coords: 0,0 is lower, left.
button and key are as defined in :class:`MouseEvent`.
This method will be call all functions connected to the
'button_press_event' with a :class:`MouseEvent` instance.
"""
self._button = button
s = 'button_press_event'
mouseevent = MouseEvent(s, self, x, y, button, self._key,
dblclick=dblclick, guiEvent=guiEvent)
self.callbacks.process(s, mouseevent)
def button_release_event(self, x, y, button, guiEvent=None):
"""
Backend derived classes should call this function on any mouse
button release.
This method will call all functions connected to the
'button_release_event' with a :class:`MouseEvent` instance.
Parameters
----------
x : scalar
the canvas coordinates where 0=left
y : scalar
the canvas coordinates where 0=bottom
guiEvent
the native UI event that generated the mpl event
"""
s = 'button_release_event'
event = MouseEvent(s, self, x, y, button, self._key, guiEvent=guiEvent)
self.callbacks.process(s, event)
self._button = None
def motion_notify_event(self, x, y, guiEvent=None):
"""
Backend derived classes should call this function on any
motion-notify-event.
This method will call all functions connected to the
'motion_notify_event' with a :class:`MouseEvent` instance.
Parameters
----------
x : scalar
the canvas coordinates where 0=left
y : scalar
the canvas coordinates where 0=bottom
guiEvent
the native UI event that generated the mpl event
"""
self._lastx, self._lasty = x, y
s = 'motion_notify_event'
event = MouseEvent(s, self, x, y, self._button, self._key,
guiEvent=guiEvent)
self.callbacks.process(s, event)
def leave_notify_event(self, guiEvent=None):
"""
Backend derived classes should call this function when leaving
canvas
Parameters
----------
guiEvent
the native UI event that generated the mpl event
"""
self.callbacks.process('figure_leave_event', LocationEvent.lastevent)
LocationEvent.lastevent = None
self._lastx, self._lasty = None, None
def enter_notify_event(self, guiEvent=None, xy=None):
"""
Backend derived classes should call this function when entering
canvas
Parameters
----------
guiEvent
the native UI event that generated the mpl event
xy : tuple of 2 scalars
the coordinate location of the pointer when the canvas is
entered
"""
if xy is not None:
x, y = xy
self._lastx, self._lasty = x, y
event = Event('figure_enter_event', self, guiEvent)
self.callbacks.process('figure_enter_event', event)
@cbook.deprecated("2.1")
def idle_event(self, guiEvent=None):
"""Called when GUI is idle."""
s = 'idle_event'
event = IdleEvent(s, self, guiEvent=guiEvent)
self.callbacks.process(s, event)
def grab_mouse(self, ax):
"""
Set the child axes which are currently grabbing the mouse events.
Usually called by the widgets themselves.
It is an error to call this if the mouse is already grabbed by
another axes.
"""
if self.mouse_grabber not in (None, ax):
raise RuntimeError("Another Axes already grabs mouse input")
self.mouse_grabber = ax
def release_mouse(self, ax):
"""
Release the mouse grab held by the axes, ax.
Usually called by the widgets.
It is ok to call this even if you ax doesn't have the mouse
grab currently.
"""
if self.mouse_grabber is ax:
self.mouse_grabber = None
def draw(self, *args, **kwargs):
"""Render the :class:`~matplotlib.figure.Figure`."""
def draw_idle(self, *args, **kwargs):
"""
:meth:`draw` only if idle; defaults to draw but backends can override
"""
if not self._is_idle_drawing:
with self._idle_draw_cntx():
self.draw(*args, **kwargs)
def draw_cursor(self, event):
"""
Draw a cursor in the event.axes if inaxes is not None. Use
native GUI drawing for efficiency if possible
"""
def get_width_height(self):
"""
Return the figure width and height in points or pixels
(depending on the backend), truncated to integers
"""
return int(self.figure.bbox.width), int(self.figure.bbox.height)
@classmethod
def get_supported_filetypes(cls):
"""Return dict of savefig file formats supported by this backend"""
return cls.filetypes
@classmethod
def get_supported_filetypes_grouped(cls):
"""Return a dict of savefig file formats supported by this backend,
where the keys are a file type name, such as 'Joint Photographic
Experts Group', and the values are a list of filename extensions used
for that filetype, such as ['jpg', 'jpeg']."""
groupings = {}
for ext, name in six.iteritems(cls.filetypes):
groupings.setdefault(name, []).append(ext)
groupings[name].sort()
return groupings
def _get_output_canvas(self, fmt):
"""
Return a canvas suitable for saving figures to a specified file format.
If necessary, this function will switch to a registered backend that
supports the format.
"""
method_name = 'print_%s' % fmt
# Return the current canvas if it supports the requested format.
if hasattr(self, method_name):
return self
# Return a default canvas for the requested format, if it exists.
canvas_class = get_registered_canvas_class(fmt)
if canvas_class:
return self.switch_backends(canvas_class)
# Else report error for unsupported format.
raise ValueError(
"Format {!r} is not supported (supported formats: {})"
.format(fmt, ", ".join(sorted(self.get_supported_filetypes()))))
def print_figure(self, filename, dpi=None, facecolor=None, edgecolor=None,
orientation='portrait', format=None, **kwargs):
"""
Render the figure to hardcopy. Set the figure patch face and edge
colors. This is useful because some of the GUIs have a gray figure
face color background and you'll probably want to override this on
hardcopy.
Parameters
----------
filename
can also be a file object on image backends
orientation : {'landscape', 'portrait'}, optional
only currently applies to PostScript printing.
dpi : scalar, optional
the dots per inch to save the figure in; if None, use savefig.dpi
facecolor : color spec or None, optional
the facecolor of the figure; if None, defaults to savefig.facecolor
edgecolor : color spec or None, optional
the edgecolor of the figure; if None, defaults to savefig.edgecolor
format : str, optional
when set, forcibly set the file format to save to
bbox_inches : str or `~matplotlib.transforms.Bbox`, optional
Bbox in inches. Only the given portion of the figure is
saved. If 'tight', try to figure out the tight bbox of
the figure. If None, use savefig.bbox
pad_inches : scalar, optional
Amount of padding around the figure when bbox_inches is
'tight'. If None, use savefig.pad_inches
bbox_extra_artists : list of `~matplotlib.artist.Artist`, optional
A list of extra artists that will be considered when the
tight bbox is calculated.
"""
self._is_saving = True
# Remove the figure manager, if any, to avoid resizing the GUI widget.
# Having *no* manager and a *None* manager are currently different (see
# Figure.show); should probably be normalized to None at some point.
_no_manager = object()
if hasattr(self, 'manager'):
manager = self.manager
del self.manager
else:
manager = _no_manager
if format is None:
# get format from filename, or from backend's default filetype
if isinstance(filename, getattr(os, "PathLike", ())):
filename = os.fspath(filename)
if isinstance(filename, six.string_types):
format = os.path.splitext(filename)[1][1:]
if format is None or format == '':
format = self.get_default_filetype()
if isinstance(filename, six.string_types):
filename = filename.rstrip('.') + '.' + format
format = format.lower()
# get canvas object and print method for format
canvas = self._get_output_canvas(format)
print_method = getattr(canvas, 'print_%s' % format)
if dpi is None:
dpi = rcParams['savefig.dpi']
if dpi == 'figure':
dpi = getattr(self.figure, '_original_dpi', self.figure.dpi)
if facecolor is None:
facecolor = rcParams['savefig.facecolor']
if edgecolor is None:
edgecolor = rcParams['savefig.edgecolor']
origDPI = self.figure.dpi
origfacecolor = self.figure.get_facecolor()
origedgecolor = self.figure.get_edgecolor()
self.figure.dpi = dpi
self.figure.set_facecolor(facecolor)
self.figure.set_edgecolor(edgecolor)
bbox_inches = kwargs.pop("bbox_inches", None)
if bbox_inches is None:
bbox_inches = rcParams['savefig.bbox']
if bbox_inches:
# call adjust_bbox to save only the given area
if bbox_inches == "tight":
# When bbox_inches == "tight", it saves the figure twice. The
# first save command (to a BytesIO) is just to estimate the
# bounding box of the figure.
result = print_method(
io.BytesIO(),
dpi=dpi,
facecolor=facecolor,
edgecolor=edgecolor,
orientation=orientation,
dryrun=True,
**kwargs)
renderer = self.figure._cachedRenderer
bbox_inches = self.figure.get_tightbbox(renderer)
bbox_artists = kwargs.pop("bbox_extra_artists", None)
if bbox_artists is None:
bbox_artists = self.figure.get_default_bbox_extra_artists()
bbox_filtered = []
for a in bbox_artists:
bbox = a.get_window_extent(renderer)
if a.get_clip_on():
clip_box = a.get_clip_box()
if clip_box is not None:
bbox = Bbox.intersection(bbox, clip_box)
clip_path = a.get_clip_path()
if clip_path is not None and bbox is not None:
clip_path = clip_path.get_fully_transformed_path()
bbox = Bbox.intersection(bbox,
clip_path.get_extents())
if bbox is not None and (bbox.width != 0 or
bbox.height != 0):
bbox_filtered.append(bbox)
if bbox_filtered:
_bbox = Bbox.union(bbox_filtered)
trans = Affine2D().scale(1.0 / self.figure.dpi)
bbox_extra = TransformedBbox(_bbox, trans)
bbox_inches = Bbox.union([bbox_inches, bbox_extra])
pad = kwargs.pop("pad_inches", None)
if pad is None:
pad = rcParams['savefig.pad_inches']
bbox_inches = bbox_inches.padded(pad)
restore_bbox = tight_bbox.adjust_bbox(self.figure, bbox_inches,
canvas.fixed_dpi)
_bbox_inches_restore = (bbox_inches, restore_bbox)
else:
_bbox_inches_restore = None
try:
result = print_method(
filename,
dpi=dpi,
facecolor=facecolor,
edgecolor=edgecolor,
orientation=orientation,
bbox_inches_restore=_bbox_inches_restore,
**kwargs)
finally:
if bbox_inches and restore_bbox:
restore_bbox()
self.figure.dpi = origDPI
self.figure.set_facecolor(origfacecolor)
self.figure.set_edgecolor(origedgecolor)
self.figure.set_canvas(self)
if manager is not _no_manager:
self.manager = manager
self._is_saving = False
return result
@classmethod
def get_default_filetype(cls):
"""
Get the default savefig file format as specified in rcParam
``savefig.format``. Returned string excludes period. Overridden
in backends that only support a single file type.
"""
return rcParams['savefig.format']
def get_window_title(self):
"""
Get the title text of the window containing the figure.
Return None if there is no window (e.g., a PS backend).
"""
if hasattr(self, "manager"):
return self.manager.get_window_title()
def set_window_title(self, title):
"""
Set the title text of the window containing the figure. Note that
this has no effect if there is no window (e.g., a PS backend).
"""
if hasattr(self, "manager"):
self.manager.set_window_title(title)
def get_default_filename(self):
"""
Return a string, which includes extension, suitable for use as
a default filename.
"""
default_basename = self.get_window_title() or 'image'
default_basename = default_basename.replace(' ', '_')
default_filetype = self.get_default_filetype()
default_filename = default_basename + '.' + default_filetype
save_dir = os.path.expanduser(rcParams['savefig.directory'])
# ensure non-existing filename in save dir
i = 1
while os.path.isfile(os.path.join(save_dir, default_filename)):
# attach numerical count to basename
default_filename = (
'{}-{}.{}'.format(default_basename, i, default_filetype))
i += 1
return default_filename
def switch_backends(self, FigureCanvasClass):
"""
Instantiate an instance of FigureCanvasClass
This is used for backend switching, e.g., to instantiate a
FigureCanvasPS from a FigureCanvasGTK. Note, deep copying is
not done, so any changes to one of the instances (e.g., setting
figure size or line props), will be reflected in the other
"""
newCanvas = FigureCanvasClass(self.figure)
newCanvas._is_saving = self._is_saving
return newCanvas
def mpl_connect(self, s, func):
"""
Connect event with string *s* to *func*. The signature of *func* is::
def func(event)
where event is a :class:`matplotlib.backend_bases.Event`. The
following events are recognized
- 'button_press_event'
- 'button_release_event'
- 'draw_event'
- 'key_press_event'
- 'key_release_event'
- 'motion_notify_event'
- 'pick_event'
- 'resize_event'
- 'scroll_event'
- 'figure_enter_event',
- 'figure_leave_event',
- 'axes_enter_event',
- 'axes_leave_event'
- 'close_event'
For the location events (button and key press/release), if the
mouse is over the axes, the variable ``event.inaxes`` will be
set to the :class:`~matplotlib.axes.Axes` the event occurs is
over, and additionally, the variables ``event.xdata`` and
``event.ydata`` will be defined. This is the mouse location
in data coords. See
:class:`~matplotlib.backend_bases.KeyEvent` and
:class:`~matplotlib.backend_bases.MouseEvent` for more info.
Return value is a connection id that can be used with
:meth:`~matplotlib.backend_bases.Event.mpl_disconnect`.
Examples
--------
Usage::
def on_press(event):
print('you pressed', event.button, event.xdata, event.ydata)
cid = canvas.mpl_connect('button_press_event', on_press)
"""
if s == 'idle_event':
cbook.warn_deprecated(1.5,
"idle_event is only implemented for the wx backend, and will "
"be removed in matplotlib 2.1. Use the animations module "
"instead.")
return self.callbacks.connect(s, func)
def mpl_disconnect(self, cid):
"""
Disconnect callback id cid
Examples
--------
Usage::
cid = canvas.mpl_connect('button_press_event', on_press)
#...later
canvas.mpl_disconnect(cid)
"""
return self.callbacks.disconnect(cid)
def new_timer(self, *args, **kwargs):
"""
Creates a new backend-specific subclass of
:class:`backend_bases.Timer`. This is useful for getting periodic
events through the backend's native event loop. Implemented only for
backends with GUIs.
Other Parameters
----------------
interval : scalar
Timer interval in milliseconds
callbacks : List[Tuple[callable, Tuple, Dict]]
Sequence of (func, args, kwargs) where ``func(*args, **kwargs)``
will be executed by the timer every *interval*.
callbacks which return ``False`` or ``0`` will be removed from the
timer.
Examples
--------
>>> timer = fig.canvas.new_timer(callbacks=[(f1, (1, ), {'a': 3}),])
"""
return TimerBase(*args, **kwargs)
def flush_events(self):
"""Flush the GUI events for the figure.
Interactive backends need to reimplement this method.
"""
def start_event_loop(self, timeout=0):
"""Start a blocking event loop.
Such an event loop is used by interactive functions, such as `ginput`
and `waitforbuttonpress`, to wait for events.
The event loop blocks until a callback function triggers
`stop_event_loop`, or *timeout* is reached.
If *timeout* is negative, never timeout.
Only interactive backends need to reimplement this method and it relies
on `flush_events` being properly implemented.
Interactive backends should implement this in a more native way.
"""
if timeout <= 0:
timeout = np.inf
timestep = 0.01
counter = 0
self._looping = True
while self._looping and counter * timestep < timeout:
self.flush_events()
time.sleep(timestep)
counter += 1
def stop_event_loop(self):
"""Stop the current blocking event loop.
Interactive backends need to reimplement this to match
`start_event_loop`
"""
self._looping = False
start_event_loop_default = cbook.deprecated(
"2.1", name="start_event_loop_default")(start_event_loop)
stop_event_loop_default = cbook.deprecated(
"2.1", name="stop_event_loop_default")(stop_event_loop)
def key_press_handler(event, canvas, toolbar=None):
"""
Implement the default mpl key bindings for the canvas and toolbar
described at :ref:`key-event-handling`
Parameters
----------
event : :class:`KeyEvent`
a key press/release event
canvas : :class:`FigureCanvasBase`
the backend-specific canvas instance
toolbar : :class:`NavigationToolbar2`
the navigation cursor toolbar
"""
# these bindings happen whether you are over an axes or not
if event.key is None:
return
# Load key-mappings from your matplotlibrc file.
fullscreen_keys = rcParams['keymap.fullscreen']
home_keys = rcParams['keymap.home']
back_keys = rcParams['keymap.back']
forward_keys = rcParams['keymap.forward']
pan_keys = rcParams['keymap.pan']
zoom_keys = rcParams['keymap.zoom']
save_keys = rcParams['keymap.save']
quit_keys = rcParams['keymap.quit']
grid_keys = rcParams['keymap.grid']
grid_minor_keys = rcParams['keymap.grid_minor']
toggle_yscale_keys = rcParams['keymap.yscale']
toggle_xscale_keys = rcParams['keymap.xscale']
all_keys = rcParams['keymap.all_axes']
# toggle fullscreen mode ('f', 'ctrl + f')
if event.key in fullscreen_keys:
try:
canvas.manager.full_screen_toggle()
except AttributeError:
pass
# quit the figure (default key 'ctrl+w')
if event.key in quit_keys:
Gcf.destroy_fig(canvas.figure)
if toolbar is not None:
# home or reset mnemonic (default key 'h', 'home' and 'r')
if event.key in home_keys:
toolbar.home()
# forward / backward keys to enable left handed quick navigation
# (default key for backward: 'left', 'backspace' and 'c')
elif event.key in back_keys:
toolbar.back()
# (default key for forward: 'right' and 'v')
elif event.key in forward_keys:
toolbar.forward()
# pan mnemonic (default key 'p')
elif event.key in pan_keys:
toolbar.pan()
toolbar._set_cursor(event)
# zoom mnemonic (default key 'o')
elif event.key in zoom_keys:
toolbar.zoom()
toolbar._set_cursor(event)
# saving current figure (default key 's')
elif event.key in save_keys:
toolbar.save_figure()
if event.inaxes is None:
return
# these bindings require the mouse to be over an axes to trigger
def _get_uniform_gridstate(ticks):
# Return True/False if all grid lines are on or off, None if they are
# not all in the same state.
if all(tick.gridOn for tick in ticks):
return True
elif not any(tick.gridOn for tick in ticks):
return False
else:
return None
ax = event.inaxes
# toggle major grids in current axes (default key 'g')
# Both here and below (for 'G'), we do nothing if *any* grid (major or
# minor, x or y) is not in a uniform state, to avoid messing up user
# customization.
if (event.key in grid_keys
# Exclude minor grids not in a uniform state.
and None not in [_get_uniform_gridstate(ax.xaxis.minorTicks),
_get_uniform_gridstate(ax.yaxis.minorTicks)]):
x_state = _get_uniform_gridstate(ax.xaxis.majorTicks)
y_state = _get_uniform_gridstate(ax.yaxis.majorTicks)
cycle = [(False, False), (True, False), (True, True), (False, True)]
try:
x_state, y_state = (
cycle[(cycle.index((x_state, y_state)) + 1) % len(cycle)])
except ValueError:
# Exclude major grids not in a uniform state.
pass
else:
# If turning major grids off, also turn minor grids off.
ax.grid(x_state, which="major" if x_state else "both", axis="x")
ax.grid(y_state, which="major" if y_state else "both", axis="y")
canvas.draw_idle()
# toggle major and minor grids in current axes (default key 'G')
if (event.key in grid_minor_keys
# Exclude major grids not in a uniform state.
and None not in [_get_uniform_gridstate(ax.xaxis.majorTicks),
_get_uniform_gridstate(ax.yaxis.majorTicks)]):
x_state = _get_uniform_gridstate(ax.xaxis.minorTicks)
y_state = _get_uniform_gridstate(ax.yaxis.minorTicks)
cycle = [(False, False), (True, False), (True, True), (False, True)]
try:
x_state, y_state = (
cycle[(cycle.index((x_state, y_state)) + 1) % len(cycle)])
except ValueError:
# Exclude minor grids not in a uniform state.
pass
else:
ax.grid(x_state, which="both", axis="x")
ax.grid(y_state, which="both", axis="y")
canvas.draw_idle()
# toggle scaling of y-axes between 'log and 'linear' (default key 'l')
elif event.key in toggle_yscale_keys:
scale = ax.get_yscale()
if scale == 'log':
ax.set_yscale('linear')
ax.figure.canvas.draw_idle()
elif scale == 'linear':
try:
ax.set_yscale('log')
except ValueError as exc:
warnings.warn(str(exc))
ax.set_yscale('linear')
ax.figure.canvas.draw_idle()
# toggle scaling of x-axes between 'log and 'linear' (default key 'k')
elif event.key in toggle_xscale_keys:
scalex = ax.get_xscale()
if scalex == 'log':
ax.set_xscale('linear')
ax.figure.canvas.draw_idle()
elif scalex == 'linear':
try:
ax.set_xscale('log')
except ValueError as exc:
warnings.warn(str(exc))
ax.set_xscale('linear')
ax.figure.canvas.draw_idle()
elif (event.key.isdigit() and event.key != '0') or event.key in all_keys:
# keys in list 'all' enables all axes (default key 'a'),
# otherwise if key is a number only enable this particular axes
# if it was the axes, where the event was raised
if not (event.key in all_keys):
n = int(event.key) - 1
for i, a in enumerate(canvas.figure.get_axes()):
# consider axes, in which the event was raised
# FIXME: Why only this axes?
if event.x is not None and event.y is not None \
and a.in_axes(event):
if event.key in all_keys:
a.set_navigate(True)
else:
a.set_navigate(i == n)
class NonGuiException(Exception):
pass
class FigureManagerBase(object):
"""
Helper class for pyplot mode, wraps everything up into a neat bundle
Attributes
----------
canvas : :class:`FigureCanvasBase`
The backend-specific canvas instance
num : int or str
The figure number
key_press_handler_id : int
The default key handler cid, when using the toolmanager. Can be used
to disable default key press handling ::
figure.canvas.mpl_disconnect(
figure.canvas.manager.key_press_handler_id)
"""
def __init__(self, canvas, num):
self.canvas = canvas
canvas.manager = self # store a pointer to parent
self.num = num
self.key_press_handler_id = None
if rcParams['toolbar'] != 'toolmanager':
self.key_press_handler_id = self.canvas.mpl_connect(
'key_press_event',
self.key_press)
def show(self):
"""
For GUI backends, show the figure window and redraw.
For non-GUI backends, raise an exception to be caught
by :meth:`~matplotlib.figure.Figure.show`, for an
optional warning.
"""
raise NonGuiException()
def destroy(self):
pass
def full_screen_toggle(self):
pass
def resize(self, w, h):
""""For GUI backends, resize the window (in pixels)."""
def key_press(self, event):
"""
Implement the default mpl key bindings defined at
:ref:`key-event-handling`
"""
if rcParams['toolbar'] != 'toolmanager':
key_press_handler(event, self.canvas, self.canvas.toolbar)
@cbook.deprecated("2.2")
def show_popup(self, msg):
"""Display message in a popup -- GUI only."""
def get_window_title(self):
"""Get the title text of the window containing the figure.
Return None for non-GUI (e.g., PS) backends.
"""
return 'image'
def set_window_title(self, title):
"""Set the title text of the window containing the figure.
This has no effect for non-GUI (e.g., PS) backends.
"""
cursors = tools.cursors
class NavigationToolbar2(object):
"""
Base class for the navigation cursor, version 2
backends must implement a canvas that handles connections for
'button_press_event' and 'button_release_event'. See
:meth:`FigureCanvasBase.mpl_connect` for more information
They must also define
:meth:`save_figure`
save the current figure
:meth:`set_cursor`
if you want the pointer icon to change
:meth:`_init_toolbar`
create your toolbar widget
:meth:`draw_rubberband` (optional)
draw the zoom to rect "rubberband" rectangle
:meth:`press` (optional)
whenever a mouse button is pressed, you'll be notified with
the event
:meth:`release` (optional)
whenever a mouse button is released, you'll be notified with
the event
:meth:`set_message` (optional)
display message
:meth:`set_history_buttons` (optional)
you can change the history back / forward buttons to
indicate disabled / enabled state.
That's it, we'll do the rest!
"""
# list of toolitems to add to the toolbar, format is:
# (
# text, # the text of the button (often not visible to users)
# tooltip_text, # the tooltip shown on hover (where possible)
# image_file, # name of the image for the button (without the extension)
# name_of_method, # name of the method in NavigationToolbar2 to call
# )
toolitems = (
('Home', 'Reset original view', 'home', 'home'),
('Back', 'Back to previous view', 'back', 'back'),
('Forward', 'Forward to next view', 'forward', 'forward'),
(None, None, None, None),
('Pan', 'Pan axes with left mouse, zoom with right', 'move', 'pan'),
('Zoom', 'Zoom to rectangle', 'zoom_to_rect', 'zoom'),
('Subplots', 'Configure subplots', 'subplots', 'configure_subplots'),
(None, None, None, None),
('Save', 'Save the figure', 'filesave', 'save_figure'),
)
def __init__(self, canvas):
self.canvas = canvas
canvas.toolbar = self
self._nav_stack = cbook.Stack()
self._xypress = None # the location and axis info at the time
# of the press
self._idPress = None
self._idRelease = None
self._active = None
# This cursor will be set after the initial draw.
self._lastCursor = cursors.POINTER
self._init_toolbar()
self._idDrag = self.canvas.mpl_connect(
'motion_notify_event', self.mouse_move)
self._ids_zoom = []
self._zoom_mode = None
self._button_pressed = None # determined by the button pressed
# at start
self.mode = '' # a mode string for the status bar
self.set_history_buttons()
def set_message(self, s):
"""Display a message on toolbar or in status bar."""
def back(self, *args):
"""move back up the view lim stack"""
self._nav_stack.back()
self.set_history_buttons()
self._update_view()
@cbook.deprecated("2.1", alternative="canvas.draw_idle")
def dynamic_update(self):
self.canvas.draw_idle()
def draw_rubberband(self, event, x0, y0, x1, y1):
"""Draw a rectangle rubberband to indicate zoom limits.
Note that it is not guaranteed that ``x0 <= x1`` and ``y0 <= y1``.
"""
def remove_rubberband(self):
"""Remove the rubberband."""
def forward(self, *args):
"""Move forward in the view lim stack."""
self._nav_stack.forward()
self.set_history_buttons()
self._update_view()
def home(self, *args):
"""Restore the original view."""
self._nav_stack.home()
self.set_history_buttons()
self._update_view()
def _init_toolbar(self):
"""
This is where you actually build the GUI widgets (called by
__init__). The icons ``home.xpm``, ``back.xpm``, ``forward.xpm``,
``hand.xpm``, ``zoom_to_rect.xpm`` and ``filesave.xpm`` are standard
across backends (there are ppm versions in CVS also).
You just need to set the callbacks
home : self.home
back : self.back
forward : self.forward
hand : self.pan
zoom_to_rect : self.zoom
filesave : self.save_figure
You only need to define the last one - the others are in the base
class implementation.
"""
raise NotImplementedError
def _set_cursor(self, event):
if not event.inaxes or not self._active:
if self._lastCursor != cursors.POINTER:
self.set_cursor(cursors.POINTER)
self._lastCursor = cursors.POINTER
else:
if (self._active == 'ZOOM'
and self._lastCursor != cursors.SELECT_REGION):
self.set_cursor(cursors.SELECT_REGION)
self._lastCursor = cursors.SELECT_REGION
elif (self._active == 'PAN' and
self._lastCursor != cursors.MOVE):
self.set_cursor(cursors.MOVE)
self._lastCursor = cursors.MOVE
def mouse_move(self, event):
self._set_cursor(event)
if event.inaxes and event.inaxes.get_navigate():
try:
s = event.inaxes.format_coord(event.xdata, event.ydata)
except (ValueError, OverflowError):
pass
else:
artists = [a for a in event.inaxes.mouseover_set
if a.contains(event) and a.get_visible()]
if artists:
a = cbook._topmost_artist(artists)
if a is not event.inaxes.patch:
data = a.get_cursor_data(event)
if data is not None:
s += ' [%s]' % a.format_cursor_data(data)
if len(self.mode):
self.set_message('%s, %s' % (self.mode, s))
else:
self.set_message(s)
else:
self.set_message(self.mode)
def pan(self, *args):
"""Activate the pan/zoom tool. pan with left button, zoom with right"""
# set the pointer icon and button press funcs to the
# appropriate callbacks
if self._active == 'PAN':
self._active = None
else:
self._active = 'PAN'
if self._idPress is not None:
self._idPress = self.canvas.mpl_disconnect(self._idPress)
self.mode = ''
if self._idRelease is not None:
self._idRelease = self.canvas.mpl_disconnect(self._idRelease)
self.mode = ''
if self._active:
self._idPress = self.canvas.mpl_connect(
'button_press_event', self.press_pan)
self._idRelease = self.canvas.mpl_connect(
'button_release_event', self.release_pan)
self.mode = 'pan/zoom'
self.canvas.widgetlock(self)
else:
self.canvas.widgetlock.release(self)
for a in self.canvas.figure.get_axes():
a.set_navigate_mode(self._active)
self.set_message(self.mode)
def press(self, event):
"""Called whenever a mouse button is pressed."""
def press_pan(self, event):
"""Callback for mouse button press in pan/zoom mode."""
if event.button == 1:
self._button_pressed = 1
elif event.button == 3:
self._button_pressed = 3
else:
self._button_pressed = None
return
if self._nav_stack() is None:
# set the home button to this view
self.push_current()
x, y = event.x, event.y
self._xypress = []
for i, a in enumerate(self.canvas.figure.get_axes()):
if (x is not None and y is not None and a.in_axes(event) and
a.get_navigate() and a.can_pan()):
a.start_pan(x, y, event.button)
self._xypress.append((a, i))
self.canvas.mpl_disconnect(self._idDrag)
self._idDrag = self.canvas.mpl_connect('motion_notify_event',
self.drag_pan)
self.press(event)
def press_zoom(self, event):
"""Callback for mouse button press in zoom to rect mode."""
# If we're already in the middle of a zoom, pressing another
# button works to "cancel"
if self._ids_zoom != []:
for zoom_id in self._ids_zoom:
self.canvas.mpl_disconnect(zoom_id)
self.release(event)
self.draw()
self._xypress = None
self._button_pressed = None
self._ids_zoom = []
return
if event.button == 1:
self._button_pressed = 1
elif event.button == 3:
self._button_pressed = 3
else:
self._button_pressed = None
return
if self._nav_stack() is None:
# set the home button to this view
self.push_current()
x, y = event.x, event.y
self._xypress = []
for i, a in enumerate(self.canvas.figure.get_axes()):
if (x is not None and y is not None and a.in_axes(event) and
a.get_navigate() and a.can_zoom()):
self._xypress.append((x, y, a, i, a._get_view()))
id1 = self.canvas.mpl_connect('motion_notify_event', self.drag_zoom)
id2 = self.canvas.mpl_connect('key_press_event',
self._switch_on_zoom_mode)
id3 = self.canvas.mpl_connect('key_release_event',
self._switch_off_zoom_mode)
self._ids_zoom = id1, id2, id3
self._zoom_mode = event.key
self.press(event)
def _switch_on_zoom_mode(self, event):
self._zoom_mode = event.key
self.mouse_move(event)
def _switch_off_zoom_mode(self, event):
self._zoom_mode = None
self.mouse_move(event)
def push_current(self):
"""Push the current view limits and position onto the stack."""
self._nav_stack.push(
WeakKeyDictionary(
{ax: (ax._get_view(),
# Store both the original and modified positions.
(ax.get_position(True).frozen(),
ax.get_position().frozen()))
for ax in self.canvas.figure.axes}))
self.set_history_buttons()
def release(self, event):
"""Callback for mouse button release."""
def release_pan(self, event):
"""Callback for mouse button release in pan/zoom mode."""
if self._button_pressed is None:
return
self.canvas.mpl_disconnect(self._idDrag)
self._idDrag = self.canvas.mpl_connect(
'motion_notify_event', self.mouse_move)
for a, ind in self._xypress:
a.end_pan()
if not self._xypress:
return
self._xypress = []
self._button_pressed = None
self.push_current()
self.release(event)
self.draw()
def drag_pan(self, event):
"""Callback for dragging in pan/zoom mode."""
for a, ind in self._xypress:
#safer to use the recorded button at the press than current button:
#multiple button can get pressed during motion...
a.drag_pan(self._button_pressed, event.key, event.x, event.y)
self.canvas.draw_idle()
def drag_zoom(self, event):
"""Callback for dragging in zoom mode."""
if self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, view = self._xypress[0]
(x1, y1), (x2, y2) = np.clip(
[[lastx, lasty], [x, y]], a.bbox.min, a.bbox.max)
if self._zoom_mode == "x":
y1, y2 = a.bbox.intervaly
elif self._zoom_mode == "y":
x1, x2 = a.bbox.intervalx
self.draw_rubberband(event, x1, y1, x2, y2)
def release_zoom(self, event):
"""Callback for mouse button release in zoom to rect mode."""
for zoom_id in self._ids_zoom:
self.canvas.mpl_disconnect(zoom_id)
self._ids_zoom = []
self.remove_rubberband()
if not self._xypress:
return
last_a = []
for cur_xypress in self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, view = cur_xypress
# ignore singular clicks - 5 pixels is a threshold
# allows the user to "cancel" a zoom action
# by zooming by less than 5 pixels
if ((abs(x - lastx) < 5 and self._zoom_mode!="y") or
(abs(y - lasty) < 5 and self._zoom_mode!="x")):
self._xypress = None
self.release(event)
self.draw()
return
# detect twinx,y axes and avoid double zooming
twinx, twiny = False, False
if last_a:
for la in last_a:
if a.get_shared_x_axes().joined(a, la):
twinx = True
if a.get_shared_y_axes().joined(a, la):
twiny = True
last_a.append(a)
if self._button_pressed == 1:
direction = 'in'
elif self._button_pressed == 3:
direction = 'out'
else:
continue
a._set_view_from_bbox((lastx, lasty, x, y), direction,
self._zoom_mode, twinx, twiny)
self.draw()
self._xypress = None
self._button_pressed = None
self._zoom_mode = None
self.push_current()
self.release(event)
def draw(self):
"""Redraw the canvases, update the locators."""
for a in self.canvas.figure.get_axes():
xaxis = getattr(a, 'xaxis', None)
yaxis = getattr(a, 'yaxis', None)
locators = []
if xaxis is not None:
locators.append(xaxis.get_major_locator())
locators.append(xaxis.get_minor_locator())
if yaxis is not None:
locators.append(yaxis.get_major_locator())
locators.append(yaxis.get_minor_locator())
for loc in locators:
loc.refresh()
self.canvas.draw_idle()
def _update_view(self):
"""Update the viewlim and position from the view and
position stack for each axes.
"""
nav_info = self._nav_stack()
if nav_info is None:
return
# Retrieve all items at once to avoid any risk of GC deleting an Axes
# while in the middle of the loop below.
items = list(nav_info.items())
for ax, (view, (pos_orig, pos_active)) in items:
ax._set_view(view)
# Restore both the original and modified positions
ax._set_position(pos_orig, 'original')
ax._set_position(pos_active, 'active')
self.canvas.draw_idle()
def save_figure(self, *args):
"""Save the current figure."""
raise NotImplementedError
def set_cursor(self, cursor):
"""Set the current cursor to one of the :class:`Cursors` enums values.
If required by the backend, this method should trigger an update in
the backend event loop after the cursor is set, as this method may be
called e.g. before a long-running task during which the GUI is not
updated.
"""
def update(self):
"""Reset the axes stack."""
self._nav_stack.clear()
self.set_history_buttons()
def zoom(self, *args):
"""Activate zoom to rect mode."""
if self._active == 'ZOOM':
self._active = None
else:
self._active = 'ZOOM'
if self._idPress is not None:
self._idPress = self.canvas.mpl_disconnect(self._idPress)
self.mode = ''
if self._idRelease is not None:
self._idRelease = self.canvas.mpl_disconnect(self._idRelease)
self.mode = ''
if self._active:
self._idPress = self.canvas.mpl_connect('button_press_event',
self.press_zoom)
self._idRelease = self.canvas.mpl_connect('button_release_event',
self.release_zoom)
self.mode = 'zoom rect'
self.canvas.widgetlock(self)
else:
self.canvas.widgetlock.release(self)
for a in self.canvas.figure.get_axes():
a.set_navigate_mode(self._active)
self.set_message(self.mode)
def set_history_buttons(self):
"""Enable or disable the back/forward button."""
class ToolContainerBase(object):
"""
Base class for all tool containers, e.g. toolbars.
Attributes
----------
toolmanager : `ToolManager`
The tools with which this `ToolContainer` wants to communicate.
"""
_icon_extension = '.png'
"""
Toolcontainer button icon image format extension
**String**: Image extension
"""
def __init__(self, toolmanager):
self.toolmanager = toolmanager
self.toolmanager.toolmanager_connect('tool_removed_event',
self._remove_tool_cbk)
def _tool_toggled_cbk(self, event):
"""
Captures the 'tool_trigger_[name]'
This only gets used for toggled tools
"""
self.toggle_toolitem(event.tool.name, event.tool.toggled)
def add_tool(self, tool, group, position=-1):
"""
Adds a tool to this container
Parameters
----------
tool : tool_like
The tool to add, see `ToolManager.get_tool`.
group : str
The name of the group to add this tool to.
position : int (optional)
The position within the group to place this tool. Defaults to end.
"""
tool = self.toolmanager.get_tool(tool)
image = self._get_image_filename(tool.image)
toggle = getattr(tool, 'toggled', None) is not None
self.add_toolitem(tool.name, group, position,
image, tool.description, toggle)
if toggle:
self.toolmanager.toolmanager_connect('tool_trigger_%s' % tool.name,
self._tool_toggled_cbk)
# If initially toggled
if tool.toggled:
self.toggle_toolitem(tool.name, True)
def _remove_tool_cbk(self, event):
"""Captures the 'tool_removed_event' signal and removes the tool."""
self.remove_toolitem(event.tool.name)
def _get_image_filename(self, image):
"""Find the image based on its name."""
if not image:
return None
basedir = os.path.join(rcParams['datapath'], 'images')
possible_images = (
image,
image + self._icon_extension,
os.path.join(basedir, image),
os.path.join(basedir, image) + self._icon_extension)
for fname in possible_images:
if os.path.isfile(fname):
return fname
def trigger_tool(self, name):
"""
Trigger the tool
Parameters
----------
name : String
Name (id) of the tool triggered from within the container
"""
self.toolmanager.trigger_tool(name, sender=self)
def add_toolitem(self, name, group, position, image, description, toggle):
"""
Add a toolitem to the container
This method must get implemented per backend
The callback associated with the button click event,
must be **EXACTLY** `self.trigger_tool(name)`
Parameters
----------
name : string
Name of the tool to add, this gets used as the tool's ID and as the
default label of the buttons
group : String
Name of the group that this tool belongs to
position : Int
Position of the tool within its group, if -1 it goes at the End
image_file : String
Filename of the image for the button or `None`
description : String
Description of the tool, used for the tooltips
toggle : Bool
* `True` : The button is a toggle (change the pressed/unpressed
state between consecutive clicks)
* `False` : The button is a normal button (returns to unpressed
state after release)
"""
raise NotImplementedError
def toggle_toolitem(self, name, toggled):
"""
Toggle the toolitem without firing event
Parameters
----------
name : String
Id of the tool to toggle
toggled : bool
Whether to set this tool as toggled or not.
"""
raise NotImplementedError
def remove_toolitem(self, name):
"""
Remove a toolitem from the `ToolContainer`
This method must get implemented per backend
Called when `ToolManager` emits a `tool_removed_event`
Parameters
----------
name : string
Name of the tool to remove
"""
raise NotImplementedError
class StatusbarBase(object):
"""Base class for the statusbar"""
def __init__(self, toolmanager):
self.toolmanager = toolmanager
self.toolmanager.toolmanager_connect('tool_message_event',
self._message_cbk)
def _message_cbk(self, event):
"""Captures the 'tool_message_event' and set the message"""
self.set_message(event.message)
def set_message(self, s):
"""
Display a message on toolbar or in status bar
Parameters
----------
s : str
Message text
"""
pass
| 114,229 | 32.75591 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/pylab.py
|
"""
This is a procedural interface to the matplotlib object-oriented
plotting library.
The following plotting commands are provided; the majority have
MATLAB |reg| [*]_ analogs and similar arguments.
.. |reg| unicode:: 0xAE
_Plotting commands
acorr - plot the autocorrelation function
annotate - annotate something in the figure
arrow - add an arrow to the axes
axes - Create a new axes
axhline - draw a horizontal line across axes
axvline - draw a vertical line across axes
axhspan - draw a horizontal bar across axes
axvspan - draw a vertical bar across axes
axis - Set or return the current axis limits
autoscale - turn axis autoscaling on or off, and apply it
bar - make a bar chart
barh - a horizontal bar chart
broken_barh - a set of horizontal bars with gaps
box - set the axes frame on/off state
boxplot - make a box and whisker plot
violinplot - make a violin plot
cla - clear current axes
clabel - label a contour plot
clf - clear a figure window
clim - adjust the color limits of the current image
close - close a figure window
colorbar - add a colorbar to the current figure
cohere - make a plot of coherence
contour - make a contour plot
contourf - make a filled contour plot
csd - make a plot of cross spectral density
delaxes - delete an axes from the current figure
draw - Force a redraw of the current figure
errorbar - make an errorbar graph
figlegend - make legend on the figure rather than the axes
figimage - make a figure image
figtext - add text in figure coords
figure - create or change active figure
fill - make filled polygons
findobj - recursively find all objects matching some criteria
gca - return the current axes
gcf - return the current figure
gci - get the current image, or None
getp - get a graphics property
grid - set whether gridding is on
hist - make a histogram
ioff - turn interaction mode off
ion - turn interaction mode on
isinteractive - return True if interaction mode is on
imread - load image file into array
imsave - save array as an image file
imshow - plot image data
legend - make an axes legend
locator_params - adjust parameters used in locating axis ticks
loglog - a log log plot
matshow - display a matrix in a new figure preserving aspect
margins - set margins used in autoscaling
pause - pause for a specified interval
pcolor - make a pseudocolor plot
pcolormesh - make a pseudocolor plot using a quadrilateral mesh
pie - make a pie chart
plot - make a line plot
plot_date - plot dates
plotfile - plot column data from an ASCII tab/space/comma delimited file
pie - pie charts
polar - make a polar plot on a PolarAxes
psd - make a plot of power spectral density
quiver - make a direction field (arrows) plot
rc - control the default params
rgrids - customize the radial grids and labels for polar
savefig - save the current figure
scatter - make a scatter plot
setp - set a graphics property
semilogx - log x axis
semilogy - log y axis
show - show the figures
specgram - a spectrogram plot
spy - plot sparsity pattern using markers or image
stem - make a stem plot
subplot - make one subplot (numrows, numcols, axesnum)
subplots - make a figure with a set of (numrows, numcols) subplots
subplots_adjust - change the params controlling the subplot positions of current figure
subplot_tool - launch the subplot configuration tool
suptitle - add a figure title
table - add a table to the plot
text - add some text at location x,y to the current axes
thetagrids - customize the radial theta grids and labels for polar
tick_params - control the appearance of ticks and tick labels
ticklabel_format - control the format of tick labels
title - add a title to the current axes
tricontour - make a contour plot on a triangular grid
tricontourf - make a filled contour plot on a triangular grid
tripcolor - make a pseudocolor plot on a triangular grid
triplot - plot a triangular grid
xcorr - plot the autocorrelation function of x and y
xlim - set/get the xlimits
ylim - set/get the ylimits
xticks - set/get the xticks
yticks - set/get the yticks
xlabel - add an xlabel to the current axes
ylabel - add a ylabel to the current axes
autumn - set the default colormap to autumn
bone - set the default colormap to bone
cool - set the default colormap to cool
copper - set the default colormap to copper
flag - set the default colormap to flag
gray - set the default colormap to gray
hot - set the default colormap to hot
hsv - set the default colormap to hsv
jet - set the default colormap to jet
pink - set the default colormap to pink
prism - set the default colormap to prism
spring - set the default colormap to spring
summer - set the default colormap to summer
winter - set the default colormap to winter
_Event handling
connect - register an event handler
disconnect - remove a connected event handler
_Matrix commands
cumprod - the cumulative product along a dimension
cumsum - the cumulative sum along a dimension
detrend - remove the mean or besdt fit line from an array
diag - the k-th diagonal of matrix
diff - the n-th differnce of an array
eig - the eigenvalues and eigen vectors of v
eye - a matrix where the k-th diagonal is ones, else zero
find - return the indices where a condition is nonzero
fliplr - flip the rows of a matrix up/down
flipud - flip the columns of a matrix left/right
linspace - a linear spaced vector of N values from min to max inclusive
logspace - a log spaced vector of N values from min to max inclusive
meshgrid - repeat x and y to make regular matrices
ones - an array of ones
rand - an array from the uniform distribution [0,1]
randn - an array from the normal distribution
rot90 - rotate matrix k*90 degress counterclockwise
squeeze - squeeze an array removing any dimensions of length 1
tri - a triangular matrix
tril - a lower triangular matrix
triu - an upper triangular matrix
vander - the Vandermonde matrix of vector x
svd - singular value decomposition
zeros - a matrix of zeros
_Probability
normpdf - The Gaussian probability density function
rand - random numbers from the uniform distribution
randn - random numbers from the normal distribution
_Statistics
amax - the maximum along dimension m
amin - the minimum along dimension m
corrcoef - correlation coefficient
cov - covariance matrix
mean - the mean along dimension m
median - the median along dimension m
norm - the norm of vector x
prod - the product along dimension m
ptp - the max-min along dimension m
std - the standard deviation along dimension m
asum - the sum along dimension m
ksdensity - the kernel density estimate
_Time series analysis
bartlett - M-point Bartlett window
blackman - M-point Blackman window
cohere - the coherence using average periodiogram
csd - the cross spectral density using average periodiogram
fft - the fast Fourier transform of vector x
hamming - M-point Hamming window
hanning - M-point Hanning window
hist - compute the histogram of x
kaiser - M length Kaiser window
psd - the power spectral density using average periodiogram
sinc - the sinc function of array x
_Dates
date2num - convert python datetimes to numeric representation
drange - create an array of numbers for date plots
num2date - convert numeric type (float days since 0001) to datetime
_Other
angle - the angle of a complex array
griddata - interpolate irregularly distributed data to a regular grid
load - Deprecated--please use loadtxt.
loadtxt - load ASCII data into array.
polyfit - fit x, y to an n-th order polynomial
polyval - evaluate an n-th order polynomial
roots - the roots of the polynomial coefficients in p
save - Deprecated--please use savetxt.
savetxt - save an array to an ASCII file.
trapz - trapezoidal integration
__end
.. [*] MATLAB is a registered trademark of The MathWorks, Inc.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import warnings
from matplotlib.cbook import (
flatten, exception_to_str, silent_list, iterable, dedent)
import matplotlib as mpl
from matplotlib.dates import (
date2num, num2date, datestr2num, strpdate2num, drange, epoch2num,
num2epoch, mx2num, DateFormatter, IndexDateFormatter, DateLocator,
RRuleLocator, YearLocator, MonthLocator, WeekdayLocator, DayLocator,
HourLocator, MinuteLocator, SecondLocator, rrule, MO, TU, WE, TH, FR,
SA, SU, YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, SECONDLY,
relativedelta)
# bring all the symbols in so folks can import them from
# pylab in one fell swoop
## We are still importing too many things from mlab; more cleanup is needed.
from matplotlib.mlab import (
amap, base_repr, binary_repr, bivariate_normal, center_matrix, csv2rec,
demean, detrend, detrend_linear, detrend_mean, detrend_none, dist,
dist_point_to_segment, distances_along_curve, entropy, exp_safe,
fftsurr, find, frange, get_sparse_matrix, get_xyz_where, griddata,
identity, inside_poly, is_closed_polygon, ispower2, isvector, l1norm,
l2norm, log2, longest_contiguous_ones, longest_ones, movavg, norm_flat,
normpdf, path_length, poly_below, poly_between, prctile, prctile_rank,
rec2csv, rec_append_fields, rec_drop_fields, rec_join, rk4, rms_flat,
segments_intersect, slopes, stineman_interp, vector_lengths,
window_hanning, window_none)
from matplotlib import cbook, mlab, pyplot as plt
from matplotlib.pyplot import *
from numpy import *
from numpy.fft import *
from numpy.random import *
from numpy.linalg import *
import numpy as np
import numpy.ma as ma
# don't let numpy's datetime hide stdlib
import datetime
# This is needed, or bytes will be numpy.random.bytes from
# "from numpy.random import *" above
bytes = six.moves.builtins.bytes
| 10,489 | 37.996283 | 89 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/_animation_data.py
|
# Javascript template for HTMLWriter
JS_INCLUDE = """
<link rel="stylesheet"
href="https://maxcdn.bootstrapcdn.com/font-awesome/4.4.0/
css/font-awesome.min.css">
<script language="javascript">
/* Define the Animation class */
function Animation(frames, img_id, slider_id, interval, loop_select_id){
this.img_id = img_id;
this.slider_id = slider_id;
this.loop_select_id = loop_select_id;
this.interval = interval;
this.current_frame = 0;
this.direction = 0;
this.timer = null;
this.frames = new Array(frames.length);
for (var i=0; i<frames.length; i++)
{
this.frames[i] = new Image();
this.frames[i].src = frames[i];
}
document.getElementById(this.slider_id).max = this.frames.length - 1;
this.set_frame(this.current_frame);
}
Animation.prototype.get_loop_state = function(){
var button_group = document[this.loop_select_id].state;
for (var i = 0; i < button_group.length; i++) {
var button = button_group[i];
if (button.checked) {
return button.value;
}
}
return undefined;
}
Animation.prototype.set_frame = function(frame){
this.current_frame = frame;
document.getElementById(this.img_id).src =
this.frames[this.current_frame].src;
document.getElementById(this.slider_id).value = this.current_frame;
}
Animation.prototype.next_frame = function()
{
this.set_frame(Math.min(this.frames.length - 1, this.current_frame + 1));
}
Animation.prototype.previous_frame = function()
{
this.set_frame(Math.max(0, this.current_frame - 1));
}
Animation.prototype.first_frame = function()
{
this.set_frame(0);
}
Animation.prototype.last_frame = function()
{
this.set_frame(this.frames.length - 1);
}
Animation.prototype.slower = function()
{
this.interval /= 0.7;
if(this.direction > 0){this.play_animation();}
else if(this.direction < 0){this.reverse_animation();}
}
Animation.prototype.faster = function()
{
this.interval *= 0.7;
if(this.direction > 0){this.play_animation();}
else if(this.direction < 0){this.reverse_animation();}
}
Animation.prototype.anim_step_forward = function()
{
this.current_frame += 1;
if(this.current_frame < this.frames.length){
this.set_frame(this.current_frame);
}else{
var loop_state = this.get_loop_state();
if(loop_state == "loop"){
this.first_frame();
}else if(loop_state == "reflect"){
this.last_frame();
this.reverse_animation();
}else{
this.pause_animation();
this.last_frame();
}
}
}
Animation.prototype.anim_step_reverse = function()
{
this.current_frame -= 1;
if(this.current_frame >= 0){
this.set_frame(this.current_frame);
}else{
var loop_state = this.get_loop_state();
if(loop_state == "loop"){
this.last_frame();
}else if(loop_state == "reflect"){
this.first_frame();
this.play_animation();
}else{
this.pause_animation();
this.first_frame();
}
}
}
Animation.prototype.pause_animation = function()
{
this.direction = 0;
if (this.timer){
clearInterval(this.timer);
this.timer = null;
}
}
Animation.prototype.play_animation = function()
{
this.pause_animation();
this.direction = 1;
var t = this;
if (!this.timer) this.timer = setInterval(function() {
t.anim_step_forward();
}, this.interval);
}
Animation.prototype.reverse_animation = function()
{
this.pause_animation();
this.direction = -1;
var t = this;
if (!this.timer) this.timer = setInterval(function() {
t.anim_step_reverse();
}, this.interval);
}
</script>
"""
# HTML template for HTMLWriter
DISPLAY_TEMPLATE = """
<div class="animation" align="center">
<img id="_anim_img{id}">
<br>
<input id="_anim_slider{id}" type="range" style="width:350px"
name="points" min="0" max="1" step="1" value="0"
onchange="anim{id}.set_frame(parseInt(this.value));"></input>
<br>
<button onclick="anim{id}.slower()"><i class="fa fa-minus"></i></button>
<button onclick="anim{id}.first_frame()"><i class="fa fa-fast-backward">
</i></button>
<button onclick="anim{id}.previous_frame()">
<i class="fa fa-step-backward"></i></button>
<button onclick="anim{id}.reverse_animation()">
<i class="fa fa-play fa-flip-horizontal"></i></button>
<button onclick="anim{id}.pause_animation()"><i class="fa fa-pause">
</i></button>
<button onclick="anim{id}.play_animation()"><i class="fa fa-play"></i>
</button>
<button onclick="anim{id}.next_frame()"><i class="fa fa-step-forward">
</i></button>
<button onclick="anim{id}.last_frame()"><i class="fa fa-fast-forward">
</i></button>
<button onclick="anim{id}.faster()"><i class="fa fa-plus"></i></button>
<form action="#n" name="_anim_loop_select{id}" class="anim_control">
<input type="radio" name="state"
value="once" {once_checked}> Once </input>
<input type="radio" name="state"
value="loop" {loop_checked}> Loop </input>
<input type="radio" name="state"
value="reflect" {reflect_checked}> Reflect </input>
</form>
</div>
<script language="javascript">
/* Instantiate the Animation class. */
/* The IDs given should match those used in the template above. */
(function() {{
var img_id = "_anim_img{id}";
var slider_id = "_anim_slider{id}";
var loop_select_id = "_anim_loop_select{id}";
var frames = new Array({Nframes});
{fill_frames}
/* set a timeout to make sure all the above elements are created before
the object is initialized. */
setTimeout(function() {{
anim{id} = new Animation(frames, img_id, slider_id, {interval},
loop_select_id);
}}, 0);
}})()
</script>
"""
INCLUDED_FRAMES = """
for (var i=0; i<{Nframes}; i++){{
frames[i] = "{frame_dir}/frame" + ("0000000" + i).slice(-7) +
".{frame_format}";
}}
"""
| 6,157 | 28.184834 | 77 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/quiver.py
|
"""
Support for plotting vector fields.
Presently this contains Quiver and Barb. Quiver plots an arrow in the
direction of the vector, with the size of the arrow related to the
magnitude of the vector.
Barbs are like quiver in that they point along a vector, but
the magnitude of the vector is given schematically by the presence of barbs
or flags on the barb.
This will also become a home for things such as standard
deviation ellipses, which can and will be derived very easily from
the Quiver code.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import math
import weakref
import numpy as np
from numpy import ma
import matplotlib.collections as mcollections
import matplotlib.transforms as transforms
import matplotlib.text as mtext
import matplotlib.artist as martist
from matplotlib.artist import allow_rasterization
from matplotlib import docstring
import matplotlib.font_manager as font_manager
from matplotlib.cbook import delete_masked_points
from matplotlib.patches import CirclePolygon
_quiver_doc = """
Plot a 2-D field of arrows.
Call signatures::
quiver(U, V, **kw)
quiver(U, V, C, **kw)
quiver(X, Y, U, V, **kw)
quiver(X, Y, U, V, C, **kw)
*U* and *V* are the arrow data, *X* and *Y* set the location of the
arrows, and *C* sets the color of the arrows. These arguments may be 1-D or
2-D arrays or sequences.
If *X* and *Y* are absent, they will be generated as a uniform grid.
If *U* and *V* are 2-D arrays and *X* and *Y* are 1-D, and if ``len(X)`` and
``len(Y)`` match the column and row dimensions of *U*, then *X* and *Y* will be
expanded with :func:`numpy.meshgrid`.
The default settings auto-scales the length of the arrows to a reasonable size.
To change this behavior see the *scale* and *scale_units* kwargs.
The defaults give a slightly swept-back arrow; to make the head a
triangle, make *headaxislength* the same as *headlength*. To make the
arrow more pointed, reduce *headwidth* or increase *headlength* and
*headaxislength*. To make the head smaller relative to the shaft,
scale down all the head parameters. You will probably do best to leave
minshaft alone.
*linewidths* and *edgecolors* can be used to customize the arrow
outlines.
Parameters
----------
X : 1D or 2D array, sequence, optional
The x coordinates of the arrow locations
Y : 1D or 2D array, sequence, optional
The y coordinates of the arrow locations
U : 1D or 2D array or masked array, sequence
The x components of the arrow vectors
V : 1D or 2D array or masked array, sequence
The y components of the arrow vectors
C : 1D or 2D array, sequence, optional
The arrow colors
units : [ 'width' | 'height' | 'dots' | 'inches' | 'x' | 'y' | 'xy' ]
The arrow dimensions (except for *length*) are measured in multiples of
this unit.
'width' or 'height': the width or height of the axis
'dots' or 'inches': pixels or inches, based on the figure dpi
'x', 'y', or 'xy': respectively *X*, *Y*, or :math:`\\sqrt{X^2 + Y^2}`
in data units
The arrows scale differently depending on the units. For
'x' or 'y', the arrows get larger as one zooms in; for other
units, the arrow size is independent of the zoom state. For
'width or 'height', the arrow size increases with the width and
height of the axes, respectively, when the window is resized;
for 'dots' or 'inches', resizing does not change the arrows.
angles : [ 'uv' | 'xy' ], array, optional
Method for determining the angle of the arrows. Default is 'uv'.
'uv': the arrow axis aspect ratio is 1 so that
if *U*==*V* the orientation of the arrow on the plot is 45 degrees
counter-clockwise from the horizontal axis (positive to the right).
'xy': arrows point from (x,y) to (x+u, y+v).
Use this for plotting a gradient field, for example.
Alternatively, arbitrary angles may be specified as an array
of values in degrees, counter-clockwise from the horizontal axis.
Note: inverting a data axis will correspondingly invert the
arrows only with ``angles='xy'``.
scale : None, float, optional
Number of data units per arrow length unit, e.g., m/s per plot width; a
smaller scale parameter makes the arrow longer. Default is *None*.
If *None*, a simple autoscaling algorithm is used, based on the average
vector length and the number of vectors. The arrow length unit is given by
the *scale_units* parameter
scale_units : [ 'width' | 'height' | 'dots' | 'inches' | 'x' | 'y' | 'xy' ], \
None, optional
If the *scale* kwarg is *None*, the arrow length unit. Default is *None*.
e.g. *scale_units* is 'inches', *scale* is 2.0, and
``(u,v) = (1,0)``, then the vector will be 0.5 inches long.
If *scale_units* is 'width'/'height', then the vector will be half the
width/height of the axes.
If *scale_units* is 'x' then the vector will be 0.5 x-axis
units. To plot vectors in the x-y plane, with u and v having
the same units as x and y, use
``angles='xy', scale_units='xy', scale=1``.
width : scalar, optional
Shaft width in arrow units; default depends on choice of units,
above, and number of vectors; a typical starting value is about
0.005 times the width of the plot.
headwidth : scalar, optional
Head width as multiple of shaft width, default is 3
headlength : scalar, optional
Head length as multiple of shaft width, default is 5
headaxislength : scalar, optional
Head length at shaft intersection, default is 4.5
minshaft : scalar, optional
Length below which arrow scales, in units of head length. Do not
set this to less than 1, or small arrows will look terrible!
Default is 1
minlength : scalar, optional
Minimum length as a multiple of shaft width; if an arrow length
is less than this, plot a dot (hexagon) of this diameter instead.
Default is 1.
pivot : [ 'tail' | 'mid' | 'middle' | 'tip' ], optional
The part of the arrow that is at the grid point; the arrow rotates
about this point, hence the name *pivot*.
color : [ color | color sequence ], optional
This is a synonym for the
:class:`~matplotlib.collections.PolyCollection` facecolor kwarg.
If *C* has been set, *color* has no effect.
Notes
-----
Additional :class:`~matplotlib.collections.PolyCollection`
keyword arguments:
%(PolyCollection)s
See Also
--------
quiverkey : Add a key to a quiver plot
""" % docstring.interpd.params
_quiverkey_doc = """
Add a key to a quiver plot.
Call signature::
quiverkey(Q, X, Y, U, label, **kw)
Arguments:
*Q*:
The Quiver instance returned by a call to quiver.
*X*, *Y*:
The location of the key; additional explanation follows.
*U*:
The length of the key
*label*:
A string with the length and units of the key
Keyword arguments:
*angle* = 0
The angle of the key arrow. Measured in degrees anti-clockwise from the
x-axis.
*coordinates* = [ 'axes' | 'figure' | 'data' | 'inches' ]
Coordinate system and units for *X*, *Y*: 'axes' and 'figure' are
normalized coordinate systems with 0,0 in the lower left and 1,1
in the upper right; 'data' are the axes data coordinates (used for
the locations of the vectors in the quiver plot itself); 'inches'
is position in the figure in inches, with 0,0 at the lower left
corner.
*color*:
overrides face and edge colors from *Q*.
*labelpos* = [ 'N' | 'S' | 'E' | 'W' ]
Position the label above, below, to the right, to the left of the
arrow, respectively.
*labelsep*:
Distance in inches between the arrow and the label. Default is
0.1
*labelcolor*:
defaults to default :class:`~matplotlib.text.Text` color.
*fontproperties*:
A dictionary with keyword arguments accepted by the
:class:`~matplotlib.font_manager.FontProperties` initializer:
*family*, *style*, *variant*, *size*, *weight*
Any additional keyword arguments are used to override vector
properties taken from *Q*.
The positioning of the key depends on *X*, *Y*, *coordinates*, and
*labelpos*. If *labelpos* is 'N' or 'S', *X*, *Y* give the position
of the middle of the key arrow. If *labelpos* is 'E', *X*, *Y*
positions the head, and if *labelpos* is 'W', *X*, *Y* positions the
tail; in either of these two cases, *X*, *Y* is somewhere in the
middle of the arrow+label key object.
"""
class QuiverKey(martist.Artist):
""" Labelled arrow for use as a quiver plot scale key."""
halign = {'N': 'center', 'S': 'center', 'E': 'left', 'W': 'right'}
valign = {'N': 'bottom', 'S': 'top', 'E': 'center', 'W': 'center'}
pivot = {'N': 'middle', 'S': 'middle', 'E': 'tip', 'W': 'tail'}
def __init__(self, Q, X, Y, U, label, **kw):
martist.Artist.__init__(self)
self.Q = Q
self.X = X
self.Y = Y
self.U = U
self.angle = kw.pop('angle', 0)
self.coord = kw.pop('coordinates', 'axes')
self.color = kw.pop('color', None)
self.label = label
self._labelsep_inches = kw.pop('labelsep', 0.1)
self.labelsep = (self._labelsep_inches * Q.ax.figure.dpi)
# try to prevent closure over the real self
weak_self = weakref.ref(self)
def on_dpi_change(fig):
self_weakref = weak_self()
if self_weakref is not None:
self_weakref.labelsep = (self_weakref._labelsep_inches*fig.dpi)
self_weakref._initialized = False # simple brute force update
# works because _init is
# called at the start of
# draw.
self._cid = Q.ax.figure.callbacks.connect('dpi_changed',
on_dpi_change)
self.labelpos = kw.pop('labelpos', 'N')
self.labelcolor = kw.pop('labelcolor', None)
self.fontproperties = kw.pop('fontproperties', dict())
self.kw = kw
_fp = self.fontproperties
# boxprops = dict(facecolor='red')
self.text = mtext.Text(
text=label, # bbox=boxprops,
horizontalalignment=self.halign[self.labelpos],
verticalalignment=self.valign[self.labelpos],
fontproperties=font_manager.FontProperties(**_fp))
if self.labelcolor is not None:
self.text.set_color(self.labelcolor)
self._initialized = False
self.zorder = Q.zorder + 0.1
def remove(self):
"""
Overload the remove method
"""
self.Q.ax.figure.callbacks.disconnect(self._cid)
self._cid = None
# pass the remove call up the stack
martist.Artist.remove(self)
__init__.__doc__ = _quiverkey_doc
def _init(self):
if True: # not self._initialized:
if not self.Q._initialized:
self.Q._init()
self._set_transform()
_pivot = self.Q.pivot
self.Q.pivot = self.pivot[self.labelpos]
# Hack: save and restore the Umask
_mask = self.Q.Umask
self.Q.Umask = ma.nomask
self.verts = self.Q._make_verts(np.array([self.U]),
np.zeros((1,)),
self.angle)
self.Q.Umask = _mask
self.Q.pivot = _pivot
kw = self.Q.polykw
kw.update(self.kw)
self.vector = mcollections.PolyCollection(
self.verts,
offsets=[(self.X, self.Y)],
transOffset=self.get_transform(),
**kw)
if self.color is not None:
self.vector.set_color(self.color)
self.vector.set_transform(self.Q.get_transform())
self.vector.set_figure(self.get_figure())
self._initialized = True
def _text_x(self, x):
if self.labelpos == 'E':
return x + self.labelsep
elif self.labelpos == 'W':
return x - self.labelsep
else:
return x
def _text_y(self, y):
if self.labelpos == 'N':
return y + self.labelsep
elif self.labelpos == 'S':
return y - self.labelsep
else:
return y
@allow_rasterization
def draw(self, renderer):
self._init()
self.vector.draw(renderer)
x, y = self.get_transform().transform_point((self.X, self.Y))
self.text.set_x(self._text_x(x))
self.text.set_y(self._text_y(y))
self.text.draw(renderer)
self.stale = False
def _set_transform(self):
if self.coord == 'data':
self.set_transform(self.Q.ax.transData)
elif self.coord == 'axes':
self.set_transform(self.Q.ax.transAxes)
elif self.coord == 'figure':
self.set_transform(self.Q.ax.figure.transFigure)
elif self.coord == 'inches':
self.set_transform(self.Q.ax.figure.dpi_scale_trans)
else:
raise ValueError('unrecognized coordinates')
def set_figure(self, fig):
martist.Artist.set_figure(self, fig)
self.text.set_figure(fig)
def contains(self, mouseevent):
# Maybe the dictionary should allow one to
# distinguish between a text hit and a vector hit.
if (self.text.contains(mouseevent)[0] or
self.vector.contains(mouseevent)[0]):
return True, {}
return False, {}
quiverkey_doc = _quiverkey_doc
# This is a helper function that parses out the various combination of
# arguments for doing colored vector plots. Pulling it out here
# allows both Quiver and Barbs to use it
def _parse_args(*args):
X, Y, U, V, C = [None] * 5
args = list(args)
# The use of atleast_1d allows for handling scalar arguments while also
# keeping masked arrays
if len(args) == 3 or len(args) == 5:
C = np.atleast_1d(args.pop(-1))
V = np.atleast_1d(args.pop(-1))
U = np.atleast_1d(args.pop(-1))
if U.ndim == 1:
nr, nc = 1, U.shape[0]
else:
nr, nc = U.shape
if len(args) == 2: # remaining after removing U,V,C
X, Y = [np.array(a).ravel() for a in args]
if len(X) == nc and len(Y) == nr:
X, Y = [a.ravel() for a in np.meshgrid(X, Y)]
else:
indexgrid = np.meshgrid(np.arange(nc), np.arange(nr))
X, Y = [np.ravel(a) for a in indexgrid]
return X, Y, U, V, C
def _check_consistent_shapes(*arrays):
all_shapes = set(a.shape for a in arrays)
if len(all_shapes) != 1:
raise ValueError('The shapes of the passed in arrays do not match.')
class Quiver(mcollections.PolyCollection):
"""
Specialized PolyCollection for arrows.
The only API method is set_UVC(), which can be used
to change the size, orientation, and color of the
arrows; their locations are fixed when the class is
instantiated. Possibly this method will be useful
in animations.
Much of the work in this class is done in the draw()
method so that as much information as possible is available
about the plot. In subsequent draw() calls, recalculation
is limited to things that might have changed, so there
should be no performance penalty from putting the calculations
in the draw() method.
"""
_PIVOT_VALS = ('tail', 'mid', 'middle', 'tip')
@docstring.Substitution(_quiver_doc)
def __init__(self, ax, *args, **kw):
"""
The constructor takes one required argument, an Axes
instance, followed by the args and kwargs described
by the following pylab interface documentation:
%s
"""
self.ax = ax
X, Y, U, V, C = _parse_args(*args)
self.X = X
self.Y = Y
self.XY = np.hstack((X[:, np.newaxis], Y[:, np.newaxis]))
self.N = len(X)
self.scale = kw.pop('scale', None)
self.headwidth = kw.pop('headwidth', 3)
self.headlength = float(kw.pop('headlength', 5))
self.headaxislength = kw.pop('headaxislength', 4.5)
self.minshaft = kw.pop('minshaft', 1)
self.minlength = kw.pop('minlength', 1)
self.units = kw.pop('units', 'width')
self.scale_units = kw.pop('scale_units', None)
self.angles = kw.pop('angles', 'uv')
self.width = kw.pop('width', None)
self.color = kw.pop('color', 'k')
pivot = kw.pop('pivot', 'tail').lower()
# validate pivot
if pivot not in self._PIVOT_VALS:
raise ValueError(
'pivot must be one of {keys}, you passed {inp}'.format(
keys=self._PIVOT_VALS, inp=pivot))
# normalize to 'middle'
if pivot == 'mid':
pivot = 'middle'
self.pivot = pivot
self.transform = kw.pop('transform', ax.transData)
kw.setdefault('facecolors', self.color)
kw.setdefault('linewidths', (0,))
mcollections.PolyCollection.__init__(self, [], offsets=self.XY,
transOffset=self.transform,
closed=False,
**kw)
self.polykw = kw
self.set_UVC(U, V, C)
self._initialized = False
self.keyvec = None
self.keytext = None
# try to prevent closure over the real self
weak_self = weakref.ref(self)
def on_dpi_change(fig):
self_weakref = weak_self()
if self_weakref is not None:
self_weakref._new_UV = True # vertices depend on width, span
# which in turn depend on dpi
self_weakref._initialized = False # simple brute force update
# works because _init is
# called at the start of
# draw.
self._cid = self.ax.figure.callbacks.connect('dpi_changed',
on_dpi_change)
def remove(self):
"""
Overload the remove method
"""
# disconnect the call back
self.ax.figure.callbacks.disconnect(self._cid)
self._cid = None
# pass the remove call up the stack
mcollections.PolyCollection.remove(self)
def _init(self):
"""
Initialization delayed until first draw;
allow time for axes setup.
"""
# It seems that there are not enough event notifications
# available to have this work on an as-needed basis at present.
if True: # not self._initialized:
trans = self._set_transform()
ax = self.ax
sx, sy = trans.inverted().transform_point(
(ax.bbox.width, ax.bbox.height))
self.span = sx
if self.width is None:
sn = np.clip(math.sqrt(self.N), 8, 25)
self.width = 0.06 * self.span / sn
# _make_verts sets self.scale if not already specified
if not self._initialized and self.scale is None:
self._make_verts(self.U, self.V, self.angles)
self._initialized = True
def get_datalim(self, transData):
trans = self.get_transform()
transOffset = self.get_offset_transform()
full_transform = (trans - transData) + (transOffset - transData)
XY = full_transform.transform(self.XY)
bbox = transforms.Bbox.null()
bbox.update_from_data_xy(XY, ignore=True)
return bbox
@allow_rasterization
def draw(self, renderer):
self._init()
verts = self._make_verts(self.U, self.V, self.angles)
self.set_verts(verts, closed=False)
self._new_UV = False
mcollections.PolyCollection.draw(self, renderer)
self.stale = False
def set_UVC(self, U, V, C=None):
# We need to ensure we have a copy, not a reference
# to an array that might change before draw().
U = ma.masked_invalid(U, copy=True).ravel()
V = ma.masked_invalid(V, copy=True).ravel()
mask = ma.mask_or(U.mask, V.mask, copy=False, shrink=True)
if C is not None:
C = ma.masked_invalid(C, copy=True).ravel()
mask = ma.mask_or(mask, C.mask, copy=False, shrink=True)
if mask is ma.nomask:
C = C.filled()
else:
C = ma.array(C, mask=mask, copy=False)
self.U = U.filled(1)
self.V = V.filled(1)
self.Umask = mask
if C is not None:
self.set_array(C)
self._new_UV = True
self.stale = True
def _dots_per_unit(self, units):
"""
Return a scale factor for converting from units to pixels
"""
ax = self.ax
if units in ('x', 'y', 'xy'):
if units == 'x':
dx0 = ax.viewLim.width
dx1 = ax.bbox.width
elif units == 'y':
dx0 = ax.viewLim.height
dx1 = ax.bbox.height
else: # 'xy' is assumed
dxx0 = ax.viewLim.width
dxx1 = ax.bbox.width
dyy0 = ax.viewLim.height
dyy1 = ax.bbox.height
dx1 = np.hypot(dxx1, dyy1)
dx0 = np.hypot(dxx0, dyy0)
dx = dx1 / dx0
else:
if units == 'width':
dx = ax.bbox.width
elif units == 'height':
dx = ax.bbox.height
elif units == 'dots':
dx = 1.0
elif units == 'inches':
dx = ax.figure.dpi
else:
raise ValueError('unrecognized units')
return dx
def _set_transform(self):
"""
Sets the PolygonCollection transform to go
from arrow width units to pixels.
"""
dx = self._dots_per_unit(self.units)
self._trans_scale = dx # pixels per arrow width unit
trans = transforms.Affine2D().scale(dx)
self.set_transform(trans)
return trans
def _angles_lengths(self, U, V, eps=1):
xy = self.ax.transData.transform(self.XY)
uv = np.hstack((U[:, np.newaxis], V[:, np.newaxis]))
xyp = self.ax.transData.transform(self.XY + eps * uv)
dxy = xyp - xy
angles = np.arctan2(dxy[:, 1], dxy[:, 0])
lengths = np.hypot(*dxy.T) / eps
return angles, lengths
def _make_verts(self, U, V, angles):
uv = (U + V * 1j)
str_angles = angles if isinstance(angles, six.string_types) else ''
if str_angles == 'xy' and self.scale_units == 'xy':
# Here eps is 1 so that if we get U, V by diffing
# the X, Y arrays, the vectors will connect the
# points, regardless of the axis scaling (including log).
angles, lengths = self._angles_lengths(U, V, eps=1)
elif str_angles == 'xy' or self.scale_units == 'xy':
# Calculate eps based on the extents of the plot
# so that we don't end up with roundoff error from
# adding a small number to a large.
eps = np.abs(self.ax.dataLim.extents).max() * 0.001
angles, lengths = self._angles_lengths(U, V, eps=eps)
if str_angles and self.scale_units == 'xy':
a = lengths
else:
a = np.abs(uv)
if self.scale is None:
sn = max(10, math.sqrt(self.N))
if self.Umask is not ma.nomask:
amean = a[~self.Umask].mean()
else:
amean = a.mean()
# crude auto-scaling
# scale is typical arrow length as a multiple of the arrow width
scale = 1.8 * amean * sn / self.span
if self.scale_units is None:
if self.scale is None:
self.scale = scale
widthu_per_lenu = 1.0
else:
if self.scale_units == 'xy':
dx = 1
else:
dx = self._dots_per_unit(self.scale_units)
widthu_per_lenu = dx / self._trans_scale
if self.scale is None:
self.scale = scale * widthu_per_lenu
length = a * (widthu_per_lenu / (self.scale * self.width))
X, Y = self._h_arrows(length)
if str_angles == 'xy':
theta = angles
elif str_angles == 'uv':
theta = np.angle(uv)
else:
theta = ma.masked_invalid(np.deg2rad(angles)).filled(0)
theta = theta.reshape((-1, 1)) # for broadcasting
xy = (X + Y * 1j) * np.exp(1j * theta) * self.width
xy = xy[:, :, np.newaxis]
XY = np.concatenate((xy.real, xy.imag), axis=2)
if self.Umask is not ma.nomask:
XY = ma.array(XY)
XY[self.Umask] = ma.masked
# This might be handled more efficiently with nans, given
# that nans will end up in the paths anyway.
return XY
def _h_arrows(self, length):
""" length is in arrow width units """
# It might be possible to streamline the code
# and speed it up a bit by using complex (x,y)
# instead of separate arrays; but any gain would be slight.
minsh = self.minshaft * self.headlength
N = len(length)
length = length.reshape(N, 1)
# This number is chosen based on when pixel values overflow in Agg
# causing rendering errors
# length = np.minimum(length, 2 ** 16)
np.clip(length, 0, 2 ** 16, out=length)
# x, y: normal horizontal arrow
x = np.array([0, -self.headaxislength,
-self.headlength, 0],
np.float64)
x = x + np.array([0, 1, 1, 1]) * length
y = 0.5 * np.array([1, 1, self.headwidth, 0], np.float64)
y = np.repeat(y[np.newaxis, :], N, axis=0)
# x0, y0: arrow without shaft, for short vectors
x0 = np.array([0, minsh - self.headaxislength,
minsh - self.headlength, minsh], np.float64)
y0 = 0.5 * np.array([1, 1, self.headwidth, 0], np.float64)
ii = [0, 1, 2, 3, 2, 1, 0, 0]
X = x.take(ii, 1)
Y = y.take(ii, 1)
Y[:, 3:-1] *= -1
X0 = x0.take(ii)
Y0 = y0.take(ii)
Y0[3:-1] *= -1
shrink = length / minsh if minsh != 0. else 0.
X0 = shrink * X0[np.newaxis, :]
Y0 = shrink * Y0[np.newaxis, :]
short = np.repeat(length < minsh, 8, axis=1)
# Now select X0, Y0 if short, otherwise X, Y
np.copyto(X, X0, where=short)
np.copyto(Y, Y0, where=short)
if self.pivot == 'middle':
X -= 0.5 * X[:, 3, np.newaxis]
elif self.pivot == 'tip':
X = X - X[:, 3, np.newaxis] # numpy bug? using -= does not
# work here unless we multiply
# by a float first, as with 'mid'.
elif self.pivot != 'tail':
raise ValueError(("Quiver.pivot must have value in {{'middle', "
"'tip', 'tail'}} not {0}").format(self.pivot))
tooshort = length < self.minlength
if tooshort.any():
# Use a heptagonal dot:
th = np.arange(0, 8, 1, np.float64) * (np.pi / 3.0)
x1 = np.cos(th) * self.minlength * 0.5
y1 = np.sin(th) * self.minlength * 0.5
X1 = np.repeat(x1[np.newaxis, :], N, axis=0)
Y1 = np.repeat(y1[np.newaxis, :], N, axis=0)
tooshort = np.repeat(tooshort, 8, 1)
np.copyto(X, X1, where=tooshort)
np.copyto(Y, Y1, where=tooshort)
# Mask handling is deferred to the caller, _make_verts.
return X, Y
quiver_doc = _quiver_doc
_barbs_doc = r"""
Plot a 2-D field of barbs.
Call signatures::
barb(U, V, **kw)
barb(U, V, C, **kw)
barb(X, Y, U, V, **kw)
barb(X, Y, U, V, C, **kw)
Arguments:
*X*, *Y*:
The x and y coordinates of the barb locations
(default is head of barb; see *pivot* kwarg)
*U*, *V*:
Give the x and y components of the barb shaft
*C*:
An optional array used to map colors to the barbs
All arguments may be 1-D or 2-D arrays or sequences. If *X* and *Y*
are absent, they will be generated as a uniform grid. If *U* and *V*
are 2-D arrays but *X* and *Y* are 1-D, and if ``len(X)`` and ``len(Y)``
match the column and row dimensions of *U*, then *X* and *Y* will be
expanded with :func:`numpy.meshgrid`.
*U*, *V*, *C* may be masked arrays, but masked *X*, *Y* are not
supported at present.
Keyword arguments:
*length*:
Length of the barb in points; the other parts of the barb
are scaled against this.
Default is 7.
*pivot*: [ 'tip' | 'middle' | float ]
The part of the arrow that is at the grid point; the arrow rotates
about this point, hence the name *pivot*. Default is 'tip'. Can
also be a number, which shifts the start of the barb that many
points from the origin.
*barbcolor*: [ color | color sequence ]
Specifies the color all parts of the barb except any flags. This
parameter is analogous to the *edgecolor* parameter for polygons,
which can be used instead. However this parameter will override
facecolor.
*flagcolor*: [ color | color sequence ]
Specifies the color of any flags on the barb. This parameter is
analogous to the *facecolor* parameter for polygons, which can be
used instead. However this parameter will override facecolor. If
this is not set (and *C* has not either) then *flagcolor* will be
set to match *barbcolor* so that the barb has a uniform color. If
*C* has been set, *flagcolor* has no effect.
*sizes*:
A dictionary of coefficients specifying the ratio of a given
feature to the length of the barb. Only those values one wishes to
override need to be included. These features include:
- 'spacing' - space between features (flags, full/half barbs)
- 'height' - height (distance from shaft to top) of a flag or
full barb
- 'width' - width of a flag, twice the width of a full barb
- 'emptybarb' - radius of the circle used for low magnitudes
*fill_empty*:
A flag on whether the empty barbs (circles) that are drawn should
be filled with the flag color. If they are not filled, they will
be drawn such that no color is applied to the center. Default is
False
*rounding*:
A flag to indicate whether the vector magnitude should be rounded
when allocating barb components. If True, the magnitude is
rounded to the nearest multiple of the half-barb increment. If
False, the magnitude is simply truncated to the next lowest
multiple. Default is True
*barb_increments*:
A dictionary of increments specifying values to associate with
different parts of the barb. Only those values one wishes to
override need to be included.
- 'half' - half barbs (Default is 5)
- 'full' - full barbs (Default is 10)
- 'flag' - flags (default is 50)
*flip_barb*:
Either a single boolean flag or an array of booleans. Single
boolean indicates whether the lines and flags should point
opposite to normal for all barbs. An array (which should be the
same size as the other data arrays) indicates whether to flip for
each individual barb. Normal behavior is for the barbs and lines
to point right (comes from wind barbs having these features point
towards low pressure in the Northern Hemisphere.) Default is
False
Barbs are traditionally used in meteorology as a way to plot the speed
and direction of wind observations, but can technically be used to
plot any two dimensional vector quantity. As opposed to arrows, which
give vector magnitude by the length of the arrow, the barbs give more
quantitative information about the vector magnitude by putting slanted
lines or a triangle for various increments in magnitude, as show
schematically below::
: /\ \\
: / \ \\
: / \ \ \\
: / \ \ \\
: ------------------------------
.. note the double \\ at the end of each line to make the figure
.. render correctly
The largest increment is given by a triangle (or "flag"). After those
come full lines (barbs). The smallest increment is a half line. There
is only, of course, ever at most 1 half line. If the magnitude is
small and only needs a single half-line and no full lines or
triangles, the half-line is offset from the end of the barb so that it
can be easily distinguished from barbs with a single full line. The
magnitude for the barb shown above would nominally be 65, using the
standard increments of 50, 10, and 5.
linewidths and edgecolors can be used to customize the barb.
Additional :class:`~matplotlib.collections.PolyCollection` keyword
arguments:
%(PolyCollection)s
""" % docstring.interpd.params
docstring.interpd.update(barbs_doc=_barbs_doc)
class Barbs(mcollections.PolyCollection):
'''
Specialized PolyCollection for barbs.
The only API method is :meth:`set_UVC`, which can be used to
change the size, orientation, and color of the arrows. Locations
are changed using the :meth:`set_offsets` collection method.
Possibly this method will be useful in animations.
There is one internal function :meth:`_find_tails` which finds
exactly what should be put on the barb given the vector magnitude.
From there :meth:`_make_barbs` is used to find the vertices of the
polygon to represent the barb based on this information.
'''
# This may be an abuse of polygons here to render what is essentially maybe
# 1 triangle and a series of lines. It works fine as far as I can tell
# however.
@docstring.interpd
def __init__(self, ax, *args, **kw):
"""
The constructor takes one required argument, an Axes
instance, followed by the args and kwargs described
by the following pylab interface documentation:
%(barbs_doc)s
"""
self._pivot = kw.pop('pivot', 'tip')
self._length = kw.pop('length', 7)
barbcolor = kw.pop('barbcolor', None)
flagcolor = kw.pop('flagcolor', None)
self.sizes = kw.pop('sizes', dict())
self.fill_empty = kw.pop('fill_empty', False)
self.barb_increments = kw.pop('barb_increments', dict())
self.rounding = kw.pop('rounding', True)
self.flip = kw.pop('flip_barb', False)
transform = kw.pop('transform', ax.transData)
# Flagcolor and barbcolor provide convenience parameters for
# setting the facecolor and edgecolor, respectively, of the barb
# polygon. We also work here to make the flag the same color as the
# rest of the barb by default
if None in (barbcolor, flagcolor):
kw['edgecolors'] = 'face'
if flagcolor:
kw['facecolors'] = flagcolor
elif barbcolor:
kw['facecolors'] = barbcolor
else:
# Set to facecolor passed in or default to black
kw.setdefault('facecolors', 'k')
else:
kw['edgecolors'] = barbcolor
kw['facecolors'] = flagcolor
# Explicitly set a line width if we're not given one, otherwise
# polygons are not outlined and we get no barbs
if 'linewidth' not in kw and 'lw' not in kw:
kw['linewidth'] = 1
# Parse out the data arrays from the various configurations supported
x, y, u, v, c = _parse_args(*args)
self.x = x
self.y = y
xy = np.hstack((x[:, np.newaxis], y[:, np.newaxis]))
# Make a collection
barb_size = self._length ** 2 / 4 # Empirically determined
mcollections.PolyCollection.__init__(self, [], (barb_size,),
offsets=xy,
transOffset=transform, **kw)
self.set_transform(transforms.IdentityTransform())
self.set_UVC(u, v, c)
def _find_tails(self, mag, rounding=True, half=5, full=10, flag=50):
'''
Find how many of each of the tail pieces is necessary. Flag
specifies the increment for a flag, barb for a full barb, and half for
half a barb. Mag should be the magnitude of a vector (i.e., >= 0).
This returns a tuple of:
(*number of flags*, *number of barbs*, *half_flag*, *empty_flag*)
*half_flag* is a boolean whether half of a barb is needed,
since there should only ever be one half on a given
barb. *empty_flag* flag is an array of flags to easily tell if
a barb is empty (too low to plot any barbs/flags.
'''
# If rounding, round to the nearest multiple of half, the smallest
# increment
if rounding:
mag = half * (mag / half + 0.5).astype(int)
num_flags = np.floor(mag / flag).astype(int)
mag = np.mod(mag, flag)
num_barb = np.floor(mag / full).astype(int)
mag = np.mod(mag, full)
half_flag = mag >= half
empty_flag = ~(half_flag | (num_flags > 0) | (num_barb > 0))
return num_flags, num_barb, half_flag, empty_flag
def _make_barbs(self, u, v, nflags, nbarbs, half_barb, empty_flag, length,
pivot, sizes, fill_empty, flip):
'''
This function actually creates the wind barbs. *u* and *v*
are components of the vector in the *x* and *y* directions,
respectively.
*nflags*, *nbarbs*, and *half_barb*, empty_flag* are,
*respectively, the number of flags, number of barbs, flag for
*half a barb, and flag for empty barb, ostensibly obtained
*from :meth:`_find_tails`.
*length* is the length of the barb staff in points.
*pivot* specifies the point on the barb around which the
entire barb should be rotated. Right now, valid options are
'tip' and 'middle'. Can also be a number, which shifts the start
of the barb that many points from the origin.
*sizes* is a dictionary of coefficients specifying the ratio
of a given feature to the length of the barb. These features
include:
- *spacing*: space between features (flags, full/half
barbs)
- *height*: distance from shaft of top of a flag or full
barb
- *width* - width of a flag, twice the width of a full barb
- *emptybarb* - radius of the circle used for low
magnitudes
*fill_empty* specifies whether the circle representing an
empty barb should be filled or not (this changes the drawing
of the polygon).
*flip* is a flag indicating whether the features should be flipped to
the other side of the barb (useful for winds in the southern
hemisphere).
This function returns list of arrays of vertices, defining a polygon
for each of the wind barbs. These polygons have been rotated to
properly align with the vector direction.
'''
# These control the spacing and size of barb elements relative to the
# length of the shaft
spacing = length * sizes.get('spacing', 0.125)
full_height = length * sizes.get('height', 0.4)
full_width = length * sizes.get('width', 0.25)
empty_rad = length * sizes.get('emptybarb', 0.15)
# Controls y point where to pivot the barb.
pivot_points = dict(tip=0.0, middle=-length / 2.)
# Check for flip
if flip:
full_height = -full_height
endx = 0.0
try:
endy = float(pivot)
except ValueError:
endy = pivot_points[pivot.lower()]
# Get the appropriate angle for the vector components. The offset is
# due to the way the barb is initially drawn, going down the y-axis.
# This makes sense in a meteorological mode of thinking since there 0
# degrees corresponds to north (the y-axis traditionally)
angles = -(ma.arctan2(v, u) + np.pi / 2)
# Used for low magnitude. We just get the vertices, so if we make it
# out here, it can be reused. The center set here should put the
# center of the circle at the location(offset), rather than at the
# same point as the barb pivot; this seems more sensible.
circ = CirclePolygon((0, 0), radius=empty_rad).get_verts()
if fill_empty:
empty_barb = circ
else:
# If we don't want the empty one filled, we make a degenerate
# polygon that wraps back over itself
empty_barb = np.concatenate((circ, circ[::-1]))
barb_list = []
for index, angle in np.ndenumerate(angles):
# If the vector magnitude is too weak to draw anything, plot an
# empty circle instead
if empty_flag[index]:
# We can skip the transform since the circle has no preferred
# orientation
barb_list.append(empty_barb)
continue
poly_verts = [(endx, endy)]
offset = length
# Add vertices for each flag
for i in range(nflags[index]):
# The spacing that works for the barbs is a little to much for
# the flags, but this only occurs when we have more than 1
# flag.
if offset != length:
offset += spacing / 2.
poly_verts.extend(
[[endx, endy + offset],
[endx + full_height, endy - full_width / 2 + offset],
[endx, endy - full_width + offset]])
offset -= full_width + spacing
# Add vertices for each barb. These really are lines, but works
# great adding 3 vertices that basically pull the polygon out and
# back down the line
for i in range(nbarbs[index]):
poly_verts.extend(
[(endx, endy + offset),
(endx + full_height, endy + offset + full_width / 2),
(endx, endy + offset)])
offset -= spacing
# Add the vertices for half a barb, if needed
if half_barb[index]:
# If the half barb is the first on the staff, traditionally it
# is offset from the end to make it easy to distinguish from a
# barb with a full one
if offset == length:
poly_verts.append((endx, endy + offset))
offset -= 1.5 * spacing
poly_verts.extend(
[(endx, endy + offset),
(endx + full_height / 2, endy + offset + full_width / 4),
(endx, endy + offset)])
# Rotate the barb according the angle. Making the barb first and
# then rotating it made the math for drawing the barb really easy.
# Also, the transform framework makes doing the rotation simple.
poly_verts = transforms.Affine2D().rotate(-angle).transform(
poly_verts)
barb_list.append(poly_verts)
return barb_list
def set_UVC(self, U, V, C=None):
self.u = ma.masked_invalid(U, copy=False).ravel()
self.v = ma.masked_invalid(V, copy=False).ravel()
if C is not None:
c = ma.masked_invalid(C, copy=False).ravel()
x, y, u, v, c = delete_masked_points(self.x.ravel(),
self.y.ravel(),
self.u, self.v, c)
_check_consistent_shapes(x, y, u, v, c)
else:
x, y, u, v = delete_masked_points(self.x.ravel(), self.y.ravel(),
self.u, self.v)
_check_consistent_shapes(x, y, u, v)
magnitude = np.hypot(u, v)
flags, barbs, halves, empty = self._find_tails(magnitude,
self.rounding,
**self.barb_increments)
# Get the vertices for each of the barbs
plot_barbs = self._make_barbs(u, v, flags, barbs, halves, empty,
self._length, self._pivot, self.sizes,
self.fill_empty, self.flip)
self.set_verts(plot_barbs)
# Set the color array
if C is not None:
self.set_array(c)
# Update the offsets in case the masked data changed
xy = np.hstack((x[:, np.newaxis], y[:, np.newaxis]))
self._offsets = xy
self.stale = True
def set_offsets(self, xy):
"""
Set the offsets for the barb polygons. This saves the offsets passed
in and actually sets version masked as appropriate for the existing
U/V data. *offsets* should be a sequence.
ACCEPTS: sequence of pairs of floats
"""
self.x = xy[:, 0]
self.y = xy[:, 1]
x, y, u, v = delete_masked_points(self.x.ravel(), self.y.ravel(),
self.u, self.v)
_check_consistent_shapes(x, y, u, v)
xy = np.hstack((x[:, np.newaxis], y[:, np.newaxis]))
mcollections.PolyCollection.set_offsets(self, xy)
self.stale = True
set_offsets.__doc__ = mcollections.PolyCollection.set_offsets.__doc__
barbs_doc = _barbs_doc
| 46,083 | 37.467446 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/figure.py
|
"""
The figure module provides the top-level
:class:`~matplotlib.artist.Artist`, the :class:`Figure`, which
contains all the plot elements. The following classes are defined
:class:`SubplotParams`
control the default spacing of the subplots
:class:`Figure`
top level container for all plot elements
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import logging
import warnings
import numpy as np
from matplotlib import rcParams
from matplotlib import docstring
from matplotlib import __version__ as _mpl_version
import matplotlib.artist as martist
from matplotlib.artist import Artist, allow_rasterization
import matplotlib.cbook as cbook
from matplotlib.cbook import Stack, iterable
from matplotlib import image as mimage
from matplotlib.image import FigureImage
import matplotlib.colorbar as cbar
from matplotlib.axes import Axes, SubplotBase, subplot_class_factory
from matplotlib.blocking_input import BlockingMouseInput, BlockingKeyMouseInput
from matplotlib.gridspec import GridSpec
import matplotlib.legend as mlegend
from matplotlib.patches import Rectangle
from matplotlib.projections import (get_projection_names,
process_projection_requirements)
from matplotlib.text import Text, _process_text_args
from matplotlib.transforms import (Affine2D, Bbox, BboxTransformTo,
TransformedBbox)
import matplotlib._layoutbox as layoutbox
from matplotlib.backend_bases import NonGuiException
_log = logging.getLogger(__name__)
docstring.interpd.update(projection_names=get_projection_names())
def _stale_figure_callback(self, val):
if self.figure:
self.figure.stale = val
class AxesStack(Stack):
"""
Specialization of the Stack to handle all tracking of Axes in a Figure.
This stack stores ``key, (ind, axes)`` pairs, where:
* **key** should be a hash of the args and kwargs
used in generating the Axes.
* **ind** is a serial number for tracking the order
in which axes were added.
The AxesStack is a callable, where ``ax_stack()`` returns
the current axes. Alternatively the :meth:`current_key_axes` will
return the current key and associated axes.
"""
def __init__(self):
Stack.__init__(self)
self._ind = 0
def as_list(self):
"""
Return a list of the Axes instances that have been added to the figure
"""
ia_list = [a for k, a in self._elements]
ia_list.sort()
return [a for i, a in ia_list]
def get(self, key):
"""
Return the Axes instance that was added with *key*.
If it is not present, return None.
"""
item = dict(self._elements).get(key)
if item is None:
return None
cbook.warn_deprecated(
"2.1",
"Adding an axes using the same arguments as a previous axes "
"currently reuses the earlier instance. In a future version, "
"a new instance will always be created and returned. Meanwhile, "
"this warning can be suppressed, and the future behavior ensured, "
"by passing a unique label to each axes instance.")
return item[1]
def _entry_from_axes(self, e):
ind, k = {a: (ind, k) for k, (ind, a) in self._elements}[e]
return (k, (ind, e))
def remove(self, a):
"""Remove the axes from the stack."""
Stack.remove(self, self._entry_from_axes(a))
def bubble(self, a):
"""
Move the given axes, which must already exist in the
stack, to the top.
"""
return Stack.bubble(self, self._entry_from_axes(a))
def add(self, key, a):
"""
Add Axes *a*, with key *key*, to the stack, and return the stack.
If *key* is unhashable, replace it by a unique, arbitrary object.
If *a* is already on the stack, don't add it again, but
return *None*.
"""
# All the error checking may be unnecessary; but this method
# is called so seldom that the overhead is negligible.
if not isinstance(a, Axes):
raise ValueError("second argument, {!r}, is not an Axes".format(a))
try:
hash(key)
except TypeError:
key = object()
a_existing = self.get(key)
if a_existing is not None:
Stack.remove(self, (key, a_existing))
warnings.warn(
"key {!r} already existed; Axes is being replaced".format(key))
# I don't think the above should ever happen.
if a in self:
return None
self._ind += 1
return Stack.push(self, (key, (self._ind, a)))
def current_key_axes(self):
"""
Return a tuple of ``(key, axes)`` for the active axes.
If no axes exists on the stack, then returns ``(None, None)``.
"""
if not len(self._elements):
return self._default, self._default
else:
key, (index, axes) = self._elements[self._pos]
return key, axes
def __call__(self):
return self.current_key_axes()[1]
def __contains__(self, a):
return a in self.as_list()
class SubplotParams(object):
"""
A class to hold the parameters for a subplot
"""
def __init__(self, left=None, bottom=None, right=None, top=None,
wspace=None, hspace=None):
"""
All dimensions are fraction of the figure width or height.
All values default to their rc params
The following attributes are available
left : 0.125
The left side of the subplots of the figure
right : 0.9
The right side of the subplots of the figure
bottom : 0.1
The bottom of the subplots of the figure
top : 0.9
The top of the subplots of the figure
wspace : 0.2
The amount of width reserved for space between subplots,
expressed as a fraction of the average axis width
hspace : 0.2
The amount of height reserved for space between subplots,
expressed as a fraction of the average axis height
"""
self.validate = True
self.update(left, bottom, right, top, wspace, hspace)
def update(self, left=None, bottom=None, right=None, top=None,
wspace=None, hspace=None):
"""
Update the current values. If any kwarg is None, default to
the current value, if set, otherwise to rc
"""
thisleft = getattr(self, 'left', None)
thisright = getattr(self, 'right', None)
thistop = getattr(self, 'top', None)
thisbottom = getattr(self, 'bottom', None)
thiswspace = getattr(self, 'wspace', None)
thishspace = getattr(self, 'hspace', None)
self._update_this('left', left)
self._update_this('right', right)
self._update_this('bottom', bottom)
self._update_this('top', top)
self._update_this('wspace', wspace)
self._update_this('hspace', hspace)
def reset():
self.left = thisleft
self.right = thisright
self.top = thistop
self.bottom = thisbottom
self.wspace = thiswspace
self.hspace = thishspace
if self.validate:
if self.left >= self.right:
reset()
raise ValueError('left cannot be >= right')
if self.bottom >= self.top:
reset()
raise ValueError('bottom cannot be >= top')
def _update_this(self, s, val):
if val is None:
val = getattr(self, s, None)
if val is None:
key = 'figure.subplot.' + s
val = rcParams[key]
setattr(self, s, val)
class Figure(Artist):
"""
The Figure instance supports callbacks through a *callbacks* attribute
which is a `.CallbackRegistry` instance. The events you can connect to
are 'dpi_changed', and the callback will be called with ``func(fig)`` where
fig is the `Figure` instance.
Attributes
----------
patch
The `.Rectangle` instance representing the figure patch.
suppressComposite
For multiple figure images, the figure will make composite images
depending on the renderer option_image_nocomposite function. If
*suppressComposite* is a boolean, this will override the renderer.
"""
def __str__(self):
return "Figure(%gx%g)" % tuple(self.bbox.size)
def __repr__(self):
return "<{clsname} size {h:g}x{w:g} with {naxes} Axes>".format(
clsname=self.__class__.__name__,
h=self.bbox.size[0], w=self.bbox.size[1],
naxes=len(self.axes),
)
def __init__(self,
figsize=None, # defaults to rc figure.figsize
dpi=None, # defaults to rc figure.dpi
facecolor=None, # defaults to rc figure.facecolor
edgecolor=None, # defaults to rc figure.edgecolor
linewidth=0.0, # the default linewidth of the frame
frameon=None, # whether or not to draw the figure frame
subplotpars=None, # default to rc
tight_layout=None, # default to rc figure.autolayout
constrained_layout=None, # default to rc
#figure.constrained_layout.use
):
"""
Parameters
----------
figsize : 2-tuple of floats
``(width, height)`` tuple in inches
dpi : float
Dots per inch
facecolor
The figure patch facecolor; defaults to rc ``figure.facecolor``
edgecolor
The figure patch edge color; defaults to rc ``figure.edgecolor``
linewidth : float
The figure patch edge linewidth; the default linewidth of the frame
frameon : bool
If ``False``, suppress drawing the figure frame
subplotpars : :class:`SubplotParams`
Subplot parameters, defaults to rc
tight_layout : bool
If ``False`` use *subplotpars*; if ``True`` adjust subplot
parameters using `.tight_layout` with default padding.
When providing a dict containing the keys
``pad``, ``w_pad``, ``h_pad``, and ``rect``, the default
`.tight_layout` paddings will be overridden.
Defaults to rc ``figure.autolayout``.
constrained_layout : bool
If ``True`` use constrained layout to adjust positioning of plot
elements. Like ``tight_layout``, but designed to be more
flexible. See
:doc:`/tutorials/intermediate/constrainedlayout_guide`
for examples. (Note: does not work with :meth:`.subplot` or
:meth:`.subplot2grid`.)
Defaults to rc ``figure.constrained_layout.use``.
"""
Artist.__init__(self)
# remove the non-figure artist _axes property
# as it makes no sense for a figure to be _in_ an axes
# this is used by the property methods in the artist base class
# which are over-ridden in this class
del self._axes
self.callbacks = cbook.CallbackRegistry()
if figsize is None:
figsize = rcParams['figure.figsize']
if dpi is None:
dpi = rcParams['figure.dpi']
if facecolor is None:
facecolor = rcParams['figure.facecolor']
if edgecolor is None:
edgecolor = rcParams['figure.edgecolor']
if frameon is None:
frameon = rcParams['figure.frameon']
if not np.isfinite(figsize).all():
raise ValueError('figure size must be finite not '
'{}'.format(figsize))
self.bbox_inches = Bbox.from_bounds(0, 0, *figsize)
self.dpi_scale_trans = Affine2D().scale(dpi, dpi)
# do not use property as it will trigger
self._dpi = dpi
self.bbox = TransformedBbox(self.bbox_inches, self.dpi_scale_trans)
self.frameon = frameon
self.transFigure = BboxTransformTo(self.bbox)
self.patch = Rectangle(
xy=(0, 0), width=1, height=1,
facecolor=facecolor, edgecolor=edgecolor, linewidth=linewidth)
self._set_artist_props(self.patch)
self.patch.set_aa(False)
self._hold = rcParams['axes.hold']
if self._hold is None:
self._hold = True
self.canvas = None
self._suptitle = None
if subplotpars is None:
subplotpars = SubplotParams()
self.subplotpars = subplotpars
# constrained_layout:
self._layoutbox = None
# set in set_constrained_layout_pads()
self.set_constrained_layout(constrained_layout)
self.set_tight_layout(tight_layout)
self._axstack = AxesStack() # track all figure axes and current axes
self.clf()
self._cachedRenderer = None
# groupers to keep track of x and y labels we want to align.
# see self.align_xlabels and self.align_ylabels and
# axis._get_tick_boxes_siblings
self._align_xlabel_grp = cbook.Grouper()
self._align_ylabel_grp = cbook.Grouper()
@property
@cbook.deprecated("2.1", alternative="Figure.patch")
def figurePatch(self):
return self.patch
# TODO: I'd like to dynamically add the _repr_html_ method
# to the figure in the right context, but then IPython doesn't
# use it, for some reason.
def _repr_html_(self):
# We can't use "isinstance" here, because then we'd end up importing
# webagg unconditiionally.
if (self.canvas is not None and
'WebAgg' in self.canvas.__class__.__name__):
from matplotlib.backends import backend_webagg
return backend_webagg.ipython_inline_display(self)
def show(self, warn=True):
"""
If using a GUI backend with pyplot, display the figure window.
If the figure was not created using
:func:`~matplotlib.pyplot.figure`, it will lack a
:class:`~matplotlib.backend_bases.FigureManagerBase`, and
will raise an AttributeError.
Parameters
----------
warm : bool
If ``True``, issue warning when called on a non-GUI backend
Notes
-----
For non-GUI backends, this does nothing, in which case a warning will
be issued if *warn* is ``True`` (default).
"""
try:
manager = getattr(self.canvas, 'manager')
except AttributeError as err:
raise AttributeError("%s\n"
"Figure.show works only "
"for figures managed by pyplot, normally "
"created by pyplot.figure()." % err)
if manager is not None:
try:
manager.show()
return
except NonGuiException:
pass
if warn:
import warnings
warnings.warn(
"matplotlib is currently using a non-GUI backend, "
"so cannot show the figure")
def _get_axes(self):
return self._axstack.as_list()
axes = property(fget=_get_axes, doc="Read-only: list of axes in Figure")
def _get_dpi(self):
return self._dpi
def _set_dpi(self, dpi, forward=True):
"""
Parameters
----------
dpi : float
forward : bool
Passed on to `~.Figure.set_size_inches`
"""
self._dpi = dpi
self.dpi_scale_trans.clear().scale(dpi, dpi)
w, h = self.get_size_inches()
self.set_size_inches(w, h, forward=forward)
self.callbacks.process('dpi_changed', self)
dpi = property(_get_dpi, _set_dpi)
def get_tight_layout(self):
"""
Return whether and how `.tight_layout` is called when drawing.
"""
return self._tight
def set_tight_layout(self, tight):
"""
Set whether and how `.tight_layout` is called when drawing.
Parameters
----------
tight : bool or dict with keys "pad", "w_pad", "h_pad", "rect" or None
If a bool, sets whether to call `.tight_layout` upon drawing.
If ``None``, use the ``figure.autolayout`` rcparam instead.
If a dict, pass it as kwargs to `.tight_layout`, overriding the
default paddings.
..
ACCEPTS: [ bool
| dict with keys "pad", "w_pad", "h_pad", "rect"
| None ]
"""
if tight is None:
tight = rcParams['figure.autolayout']
self._tight = bool(tight)
self._tight_parameters = tight if isinstance(tight, dict) else {}
self.stale = True
def get_constrained_layout(self):
"""
Return a boolean: True means constrained layout is being used.
See :doc:`/tutorials/intermediate/constrainedlayout_guide`
"""
return self._constrained
def set_constrained_layout(self, constrained):
"""
Set whether ``constrained_layout`` is used upon drawing. If None,
the rcParams['figure.constrained_layout.use'] value will be used.
When providing a dict containing the keys `w_pad`, `h_pad`
the default ``constrained_layout`` paddings will be
overridden. These pads are in inches and default to 3.0/72.0.
``w_pad`` is the width padding and ``h_pad`` is the height padding.
ACCEPTS: [True | False | dict | None ]
See :doc:`/tutorials/intermediate/constrainedlayout_guide`
"""
self._constrained_layout_pads = dict()
self._constrained_layout_pads['w_pad'] = None
self._constrained_layout_pads['h_pad'] = None
self._constrained_layout_pads['wspace'] = None
self._constrained_layout_pads['hspace'] = None
if constrained is None:
constrained = rcParams['figure.constrained_layout.use']
self._constrained = bool(constrained)
if isinstance(constrained, dict):
self.set_constrained_layout_pads(**constrained)
else:
self.set_constrained_layout_pads()
self.stale = True
def set_constrained_layout_pads(self, **kwargs):
"""
Set padding for ``constrained_layout``. Note the kwargs can be passed
as a dictionary ``fig.set_constrained_layout(**paddict)``.
See :doc:`/tutorials/intermediate/constrainedlayout_guide`
Parameters
----------
w_pad : scalar
Width padding in inches. This is the pad around axes
and is meant to make sure there is enough room for fonts to
look good. Defaults to 3 pts = 0.04167 inches
h_pad : scalar
Height padding in inches. Defaults to 3 pts.
wspace: scalar
Width padding between subplots, expressed as a fraction of the
subplot width. The total padding ends up being w_pad + wspace.
hspace: scalar
Height padding between subplots, expressed as a fraction of the
subplot width. The total padding ends up being h_pad + hspace.
"""
todo = ['w_pad', 'h_pad', 'wspace', 'hspace']
for td in todo:
if td in kwargs and kwargs[td] is not None:
self._constrained_layout_pads[td] = kwargs[td]
else:
self._constrained_layout_pads[td] = (
rcParams['figure.constrained_layout.' + td])
def get_constrained_layout_pads(self, relative=False):
"""
Get padding for ``constrained_layout``.
Returns a list of `w_pad, h_pad` in inches and
`wspace` and `hspace` as fractions of the subplot.
See :doc:`/tutorials/intermediate/constrainedlayout_guide`
Parameters
----------
relative : boolean
If `True`, then convert from inches to figure relative.
"""
w_pad = self._constrained_layout_pads['w_pad']
h_pad = self._constrained_layout_pads['h_pad']
wspace = self._constrained_layout_pads['wspace']
hspace = self._constrained_layout_pads['hspace']
if relative and ((w_pad is not None) or (h_pad is not None)):
renderer0 = layoutbox.get_renderer(self)
dpi = renderer0.dpi
w_pad = w_pad * dpi / renderer0.width
h_pad = h_pad * dpi / renderer0.height
return w_pad, h_pad, wspace, hspace
def autofmt_xdate(self, bottom=0.2, rotation=30, ha='right', which=None):
"""
Date ticklabels often overlap, so it is useful to rotate them
and right align them. Also, a common use case is a number of
subplots with shared xaxes where the x-axis is date data. The
ticklabels are often long, and it helps to rotate them on the
bottom subplot and turn them off on other subplots, as well as
turn off xlabels.
Parameters
----------
bottom : scalar
The bottom of the subplots for :meth:`subplots_adjust`
rotation : angle in degrees
The rotation of the xtick labels
ha : string
The horizontal alignment of the xticklabels
which : {None, 'major', 'minor', 'both'}
Selects which ticklabels to rotate (default is None which works
same as major)
"""
allsubplots = all(hasattr(ax, 'is_last_row') for ax in self.axes)
if len(self.axes) == 1:
for label in self.axes[0].get_xticklabels(which=which):
label.set_ha(ha)
label.set_rotation(rotation)
else:
if allsubplots:
for ax in self.get_axes():
if ax.is_last_row():
for label in ax.get_xticklabels(which=which):
label.set_ha(ha)
label.set_rotation(rotation)
else:
for label in ax.get_xticklabels(which=which):
label.set_visible(False)
ax.set_xlabel('')
if allsubplots:
self.subplots_adjust(bottom=bottom)
self.stale = True
def get_children(self):
"""Get a list of artists contained in the figure."""
children = [self.patch]
children.extend(self.artists)
children.extend(self.axes)
children.extend(self.lines)
children.extend(self.patches)
children.extend(self.texts)
children.extend(self.images)
children.extend(self.legends)
return children
def contains(self, mouseevent):
"""
Test whether the mouse event occurred on the figure.
Returns True, {}.
"""
if callable(self._contains):
return self._contains(self, mouseevent)
inside = self.bbox.contains(mouseevent.x, mouseevent.y)
return inside, {}
def get_window_extent(self, *args, **kwargs):
"""
Return figure bounding box in display space; arguments are ignored.
"""
return self.bbox
def suptitle(self, t, **kwargs):
"""
Add a centered title to the figure.
kwargs are :class:`matplotlib.text.Text` properties. Using figure
coordinates, the defaults are:
x : 0.5
The x location of the text in figure coords
y : 0.98
The y location of the text in figure coords
horizontalalignment : 'center'
The horizontal alignment of the text
verticalalignment : 'top'
The vertical alignment of the text
If the `fontproperties` keyword argument is given then the
rcParams defaults for `fontsize` (`figure.titlesize`) and
`fontweight` (`figure.titleweight`) will be ignored in favour
of the `FontProperties` defaults.
A :class:`matplotlib.text.Text` instance is returned.
Example::
fig.suptitle('this is the figure title', fontsize=12)
"""
x = kwargs.pop('x', 0.5)
y = kwargs.pop('y', 0.98)
if ('horizontalalignment' not in kwargs) and ('ha' not in kwargs):
kwargs['horizontalalignment'] = 'center'
if ('verticalalignment' not in kwargs) and ('va' not in kwargs):
kwargs['verticalalignment'] = 'top'
if 'fontproperties' not in kwargs:
if 'fontsize' not in kwargs and 'size' not in kwargs:
kwargs['size'] = rcParams['figure.titlesize']
if 'fontweight' not in kwargs and 'weight' not in kwargs:
kwargs['weight'] = rcParams['figure.titleweight']
sup = self.text(x, y, t, **kwargs)
if self._suptitle is not None:
self._suptitle.set_text(t)
self._suptitle.set_position((x, y))
self._suptitle.update_from(sup)
sup.remove()
else:
self._suptitle = sup
if self._layoutbox is not None:
# assign a layout box to the suptitle...
figlb = self._layoutbox
self._suptitle._layoutbox = layoutbox.LayoutBox(
parent=figlb,
name=figlb.name+'.suptitle')
for child in figlb.children:
if not (child == self._suptitle._layoutbox):
w_pad, h_pad, wspace, hspace = \
self.get_constrained_layout_pads(
relative=True)
layoutbox.vstack([self._suptitle._layoutbox, child],
padding=h_pad*2., strength='required')
self.stale = True
return self._suptitle
def set_canvas(self, canvas):
"""
Set the canvas that contains the figure
ACCEPTS: a FigureCanvas instance
"""
self.canvas = canvas
@cbook.deprecated("2.0")
def hold(self, b=None):
"""
Set the hold state. If hold is None (default), toggle the
hold state. Else set the hold state to boolean value b.
e.g.::
hold() # toggle hold
hold(True) # hold is on
hold(False) # hold is off
All "hold" machinery is deprecated.
"""
if b is None:
self._hold = not self._hold
else:
self._hold = b
def figimage(self, X,
xo=0,
yo=0,
alpha=None,
norm=None,
cmap=None,
vmin=None,
vmax=None,
origin=None,
resize=False,
**kwargs):
"""
Adds a non-resampled image to the figure.
call signatures::
figimage(X, **kwargs)
adds a non-resampled array *X* to the figure.
::
figimage(X, xo, yo)
with pixel offsets *xo*, *yo*,
*X* must be a float array:
* If *X* is MxN, assume luminance (grayscale)
* If *X* is MxNx3, assume RGB
* If *X* is MxNx4, assume RGBA
Optional keyword arguments:
========= =========================================================
Keyword Description
========= =========================================================
resize a boolean, True or False. If "True", then re-size the
Figure to match the given image size.
xo or yo An integer, the *x* and *y* image offset in pixels
cmap a :class:`matplotlib.colors.Colormap` instance, e.g.,
cm.jet. If *None*, default to the rc ``image.cmap``
value
norm a :class:`matplotlib.colors.Normalize` instance. The
default is normalization(). This scales luminance -> 0-1
vmin|vmax are used to scale a luminance image to 0-1. If either
is *None*, the min and max of the luminance values will
be used. Note if you pass a norm instance, the settings
for *vmin* and *vmax* will be ignored.
alpha the alpha blending value, default is *None*
origin [ 'upper' | 'lower' ] Indicates where the [0,0] index of
the array is in the upper left or lower left corner of
the axes. Defaults to the rc image.origin value
========= =========================================================
figimage complements the axes image
(:meth:`~matplotlib.axes.Axes.imshow`) which will be resampled
to fit the current axes. If you want a resampled image to
fill the entire figure, you can define an
:class:`~matplotlib.axes.Axes` with extent [0,0,1,1].
An :class:`matplotlib.image.FigureImage` instance is returned.
Additional kwargs are Artist kwargs passed on to
:class:`~matplotlib.image.FigureImage`
"""
if not self._hold:
self.clf()
if resize:
dpi = self.get_dpi()
figsize = [x / dpi for x in (X.shape[1], X.shape[0])]
self.set_size_inches(figsize, forward=True)
im = FigureImage(self, cmap, norm, xo, yo, origin, **kwargs)
im.stale_callback = _stale_figure_callback
im.set_array(X)
im.set_alpha(alpha)
if norm is None:
im.set_clim(vmin, vmax)
self.images.append(im)
im._remove_method = lambda h: self.images.remove(h)
self.stale = True
return im
def set_size_inches(self, w, h=None, forward=True):
"""Set the figure size in inches (1in == 2.54cm)
Usage ::
fig.set_size_inches(w, h) # OR
fig.set_size_inches((w, h))
optional kwarg *forward=True* will cause the canvas size to be
automatically updated; e.g., you can resize the figure window
from the shell
ACCEPTS: a w, h tuple with w, h in inches
See Also
--------
matplotlib.Figure.get_size_inches
"""
# the width and height have been passed in as a tuple to the first
# argument, so unpack them
if h is None:
w, h = w
if not all(np.isfinite(_) for _ in (w, h)):
raise ValueError('figure size must be finite not '
'({}, {})'.format(w, h))
self.bbox_inches.p1 = w, h
if forward:
canvas = getattr(self, 'canvas')
if canvas is not None:
ratio = getattr(self.canvas, '_dpi_ratio', 1)
dpival = self.dpi / ratio
canvasw = w * dpival
canvash = h * dpival
manager = getattr(self.canvas, 'manager', None)
if manager is not None:
manager.resize(int(canvasw), int(canvash))
self.stale = True
def get_size_inches(self):
"""
Returns the current size of the figure in inches (1in == 2.54cm)
as an numpy array.
Returns
-------
size : ndarray
The size of the figure in inches
See Also
--------
matplotlib.Figure.set_size_inches
"""
return np.array(self.bbox_inches.p1)
def get_edgecolor(self):
"""Get the edge color of the Figure rectangle."""
return self.patch.get_edgecolor()
def get_facecolor(self):
"""Get the face color of the Figure rectangle."""
return self.patch.get_facecolor()
def get_figwidth(self):
"""Return the figwidth as a float."""
return self.bbox_inches.width
def get_figheight(self):
"""Return the figheight as a float."""
return self.bbox_inches.height
def get_dpi(self):
"""Return the dpi as a float."""
return self.dpi
def get_frameon(self):
"""Get the boolean indicating frameon."""
return self.frameon
def set_edgecolor(self, color):
"""
Set the edge color of the Figure rectangle
ACCEPTS: any matplotlib color - see help(colors)
"""
self.patch.set_edgecolor(color)
def set_facecolor(self, color):
"""
Set the face color of the Figure rectangle
ACCEPTS: any matplotlib color - see help(colors)
"""
self.patch.set_facecolor(color)
def set_dpi(self, val):
"""
Set the dots-per-inch of the figure
ACCEPTS: float
"""
self.dpi = val
self.stale = True
def set_figwidth(self, val, forward=True):
"""
Set the width of the figure in inches
ACCEPTS: float
"""
self.set_size_inches(val, self.get_figheight(), forward=forward)
def set_figheight(self, val, forward=True):
"""
Set the height of the figure in inches
ACCEPTS: float
"""
self.set_size_inches(self.get_figwidth(), val, forward=forward)
def set_frameon(self, b):
"""
Set whether the figure frame (background) is displayed or invisible
ACCEPTS: boolean
"""
self.frameon = b
self.stale = True
def delaxes(self, ax):
"""
Remove the `.Axes` *ax* from the figure and update the current axes.
"""
self._axstack.remove(ax)
for func in self._axobservers:
func(self)
self.stale = True
def _make_key(self, *args, **kwargs):
"""Make a hashable key out of args and kwargs."""
def fixitems(items):
# items may have arrays and lists in them, so convert them
# to tuples for the key
ret = []
for k, v in items:
# some objects can define __getitem__ without being
# iterable and in those cases the conversion to tuples
# will fail. So instead of using the iterable(v) function
# we simply try and convert to a tuple, and proceed if not.
try:
v = tuple(v)
except Exception:
pass
ret.append((k, v))
return tuple(ret)
def fixlist(args):
ret = []
for a in args:
if iterable(a):
a = tuple(a)
ret.append(a)
return tuple(ret)
key = fixlist(args), fixitems(six.iteritems(kwargs))
return key
def add_axes(self, *args, **kwargs):
"""
Add an axes at position *rect* [*left*, *bottom*, *width*,
*height*] where all quantities are in fractions of figure
width and height.
Parameters
----------
rect : sequence of float
A 4-length sequence of [left, bottom, width, height] quantities.
projection :
['aitoff' | 'hammer' | 'lambert' | 'mollweide' | \
'polar' | 'rectilinear'], optional
The projection type of the axes.
polar : boolean, optional
If True, equivalent to projection='polar'.
**kwargs
This method also takes the keyword arguments for
:class:`~matplotlib.axes.Axes`.
Returns
-------
axes : Axes
The added axes.
Examples
--------
A simple example::
rect = l,b,w,h
fig.add_axes(rect)
fig.add_axes(rect, frameon=False, facecolor='g')
fig.add_axes(rect, polar=True)
fig.add_axes(rect, projection='polar')
fig.add_axes(ax)
If the figure already has an axes with the same parameters, then it
will simply make that axes current and return it. This behavior
has been deprecated as of Matplotlib 2.1. Meanwhile, if you do
not want this behavior (i.e., you want to force the creation of a
new Axes), you must use a unique set of args and kwargs. The axes
:attr:`~matplotlib.axes.Axes.label` attribute has been exposed for this
purpose: if you want two axes that are otherwise identical to be added
to the figure, make sure you give them unique labels::
fig.add_axes(rect, label='axes1')
fig.add_axes(rect, label='axes2')
In rare circumstances, add_axes may be called with a single
argument, an Axes instance already created in the present
figure but not in the figure's list of axes. For example,
if an axes has been removed with :meth:`delaxes`, it can
be restored with::
fig.add_axes(ax)
In all cases, the :class:`~matplotlib.axes.Axes` instance
will be returned.
"""
if not len(args):
return
# shortcut the projection "key" modifications later on, if an axes
# with the exact args/kwargs exists, return it immediately.
key = self._make_key(*args, **kwargs)
ax = self._axstack.get(key)
if ax is not None:
self.sca(ax)
return ax
if isinstance(args[0], Axes):
a = args[0]
if a.get_figure() is not self:
raise ValueError(
"The Axes must have been created in the present figure")
else:
rect = args[0]
if not np.isfinite(rect).all():
raise ValueError('all entries in rect must be finite '
'not {}'.format(rect))
projection_class, kwargs, key = process_projection_requirements(
self, *args, **kwargs)
# check that an axes of this type doesn't already exist, if it
# does, set it as active and return it
ax = self._axstack.get(key)
if isinstance(ax, projection_class):
self.sca(ax)
return ax
# create the new axes using the axes class given
a = projection_class(self, rect, **kwargs)
self._axstack.add(key, a)
self.sca(a)
a._remove_method = self.__remove_ax
self.stale = True
a.stale_callback = _stale_figure_callback
return a
def add_subplot(self, *args, **kwargs):
"""
Add a subplot.
Parameters
----------
*args
Either a 3-digit integer or three separate integers
describing the position of the subplot. If the three
integers are R, C, and P in order, the subplot will take
the Pth position on a grid with R rows and C columns.
projection : ['aitoff' | 'hammer' | 'lambert' | \
'mollweide' | 'polar' | 'rectilinear'], optional
The projection type of the axes.
polar : boolean, optional
If True, equivalent to projection='polar'.
**kwargs
This method also takes the keyword arguments for
:class:`~matplotlib.axes.Axes`.
Returns
-------
axes : Axes
The axes of the subplot.
Notes
-----
If the figure already has a subplot with key (*args*,
*kwargs*) then it will simply make that subplot current and
return it. This behavior is deprecated.
Examples
--------
::
fig.add_subplot(111)
# equivalent but more general
fig.add_subplot(1, 1, 1)
# add subplot with red background
fig.add_subplot(212, facecolor='r')
# add a polar subplot
fig.add_subplot(111, projection='polar')
# add Subplot instance sub
fig.add_subplot(sub)
See Also
--------
matplotlib.pyplot.subplot : for an explanation of the args.
"""
if not len(args):
return
if len(args) == 1 and isinstance(args[0], int):
if not 100 <= args[0] <= 999:
raise ValueError("Integer subplot specification must be a "
"three-digit number, not {}".format(args[0]))
args = tuple(map(int, str(args[0])))
if isinstance(args[0], SubplotBase):
a = args[0]
if a.get_figure() is not self:
raise ValueError(
"The Subplot must have been created in the present figure")
# make a key for the subplot (which includes the axes object id
# in the hash)
key = self._make_key(*args, **kwargs)
else:
projection_class, kwargs, key = process_projection_requirements(
self, *args, **kwargs)
# try to find the axes with this key in the stack
ax = self._axstack.get(key)
if ax is not None:
if isinstance(ax, projection_class):
# the axes already existed, so set it as active & return
self.sca(ax)
return ax
else:
# Undocumented convenience behavior:
# subplot(111); subplot(111, projection='polar')
# will replace the first with the second.
# Without this, add_subplot would be simpler and
# more similar to add_axes.
self._axstack.remove(ax)
a = subplot_class_factory(projection_class)(self, *args, **kwargs)
self._axstack.add(key, a)
self.sca(a)
a._remove_method = self.__remove_ax
self.stale = True
a.stale_callback = _stale_figure_callback
return a
def subplots(self, nrows=1, ncols=1, sharex=False, sharey=False,
squeeze=True, subplot_kw=None, gridspec_kw=None):
"""
Add a set of subplots to this figure.
Parameters
----------
nrows, ncols : int, default: 1
Number of rows/cols of the subplot grid.
sharex, sharey : bool or {'none', 'all', 'row', 'col'}, default: False
Controls sharing of properties among x (`sharex`) or y (`sharey`)
axes:
- True or 'all': x- or y-axis will be shared among all
subplots.
- False or 'none': each subplot x- or y-axis will be
independent.
- 'row': each subplot row will share an x- or y-axis.
- 'col': each subplot column will share an x- or y-axis.
When subplots have a shared x-axis along a column, only the x tick
labels of the bottom subplot are visible. Similarly, when
subplots have a shared y-axis along a row, only the y tick labels
of the first column subplot are visible.
squeeze : bool, default: True
- If True, extra dimensions are squeezed out from the returned
axis object:
- if only one subplot is constructed (nrows=ncols=1), the
resulting single Axes object is returned as a scalar.
- for Nx1 or 1xN subplots, the returned object is a 1D numpy
object array of Axes objects are returned as numpy 1D
arrays.
- for NxM, subplots with N>1 and M>1 are returned as a 2D
arrays.
- If False, no squeezing at all is done: the returned Axes object
is always a 2D array containing Axes instances, even if it ends
up being 1x1.
subplot_kw : dict, default: {}
Dict with keywords passed to the
:meth:`~matplotlib.figure.Figure.add_subplot` call used to create
each subplots.
gridspec_kw : dict, default: {}
Dict with keywords passed to the
:class:`~matplotlib.gridspec.GridSpec` constructor used to create
the grid the subplots are placed on.
Returns
-------
ax : single Axes object or array of Axes objects
The added axes. The dimensions of the resulting array can be
controlled with the squeeze keyword, see above.
See Also
--------
pyplot.subplots : pyplot API; docstring includes examples.
"""
# for backwards compatibility
if isinstance(sharex, bool):
sharex = "all" if sharex else "none"
if isinstance(sharey, bool):
sharey = "all" if sharey else "none"
share_values = ["all", "row", "col", "none"]
if sharex not in share_values:
# This check was added because it is very easy to type
# `subplots(1, 2, 1)` when `subplot(1, 2, 1)` was intended.
# In most cases, no error will ever occur, but mysterious behavior
# will result because what was intended to be the subplot index is
# instead treated as a bool for sharex.
if isinstance(sharex, int):
warnings.warn(
"sharex argument to subplots() was an integer. "
"Did you intend to use subplot() (without 's')?")
raise ValueError("sharex [%s] must be one of %s" %
(sharex, share_values))
if sharey not in share_values:
raise ValueError("sharey [%s] must be one of %s" %
(sharey, share_values))
if subplot_kw is None:
subplot_kw = {}
if gridspec_kw is None:
gridspec_kw = {}
if self.get_constrained_layout():
gs = GridSpec(nrows, ncols, figure=self, **gridspec_kw)
else:
# this should turn constrained_layout off if we don't want it
gs = GridSpec(nrows, ncols, figure=None, **gridspec_kw)
# Create array to hold all axes.
axarr = np.empty((nrows, ncols), dtype=object)
for row in range(nrows):
for col in range(ncols):
shared_with = {"none": None, "all": axarr[0, 0],
"row": axarr[row, 0], "col": axarr[0, col]}
subplot_kw["sharex"] = shared_with[sharex]
subplot_kw["sharey"] = shared_with[sharey]
axarr[row, col] = self.add_subplot(gs[row, col], **subplot_kw)
# turn off redundant tick labeling
if sharex in ["col", "all"]:
# turn off all but the bottom row
for ax in axarr[:-1, :].flat:
ax.xaxis.set_tick_params(which='both',
labelbottom=False, labeltop=False)
ax.xaxis.offsetText.set_visible(False)
if sharey in ["row", "all"]:
# turn off all but the first column
for ax in axarr[:, 1:].flat:
ax.yaxis.set_tick_params(which='both',
labelleft=False, labelright=False)
ax.yaxis.offsetText.set_visible(False)
if squeeze:
# Discarding unneeded dimensions that equal 1. If we only have one
# subplot, just return it instead of a 1-element array.
return axarr.item() if axarr.size == 1 else axarr.squeeze()
else:
# Returned axis array will be always 2-d, even if nrows=ncols=1.
return axarr
def __remove_ax(self, ax):
def _reset_loc_form(axis):
axis.set_major_formatter(axis.get_major_formatter())
axis.set_major_locator(axis.get_major_locator())
axis.set_minor_formatter(axis.get_minor_formatter())
axis.set_minor_locator(axis.get_minor_locator())
def _break_share_link(ax, grouper):
siblings = grouper.get_siblings(ax)
if len(siblings) > 1:
grouper.remove(ax)
for last_ax in siblings:
if ax is last_ax:
continue
return last_ax
return None
self.delaxes(ax)
last_ax = _break_share_link(ax, ax._shared_y_axes)
if last_ax is not None:
_reset_loc_form(last_ax.yaxis)
last_ax = _break_share_link(ax, ax._shared_x_axes)
if last_ax is not None:
_reset_loc_form(last_ax.xaxis)
def clf(self, keep_observers=False):
"""
Clear the figure.
Set *keep_observers* to True if, for example,
a gui widget is tracking the axes in the figure.
"""
self.suppressComposite = None
self.callbacks = cbook.CallbackRegistry()
for ax in tuple(self.axes): # Iterate over the copy.
ax.cla()
self.delaxes(ax) # removes ax from self._axstack
toolbar = getattr(self.canvas, 'toolbar', None)
if toolbar is not None:
toolbar.update()
self._axstack.clear()
self.artists = []
self.lines = []
self.patches = []
self.texts = []
self.images = []
self.legends = []
if not keep_observers:
self._axobservers = []
self._suptitle = None
if self.get_constrained_layout():
layoutbox.nonetree(self._layoutbox)
self.stale = True
def clear(self, keep_observers=False):
"""
Clear the figure -- synonym for :meth:`clf`.
"""
self.clf(keep_observers=keep_observers)
@allow_rasterization
def draw(self, renderer):
"""
Render the figure using :class:`matplotlib.backend_bases.RendererBase`
instance *renderer*.
"""
# draw the figure bounding box, perhaps none for white figure
if not self.get_visible():
return
artists = sorted(
(artist for artist in (self.patches + self.lines + self.artists
+ self.images + self.axes + self.texts
+ self.legends)
if not artist.get_animated()),
key=lambda artist: artist.get_zorder())
try:
renderer.open_group('figure')
if self.get_constrained_layout() and self.axes:
if True:
self.execute_constrained_layout(renderer)
else:
pass
if self.get_tight_layout() and self.axes:
try:
self.tight_layout(renderer,
**self._tight_parameters)
except ValueError:
pass
# ValueError can occur when resizing a window.
if self.frameon:
self.patch.draw(renderer)
mimage._draw_list_compositing_images(
renderer, self, artists, self.suppressComposite)
renderer.close_group('figure')
finally:
self.stale = False
self._cachedRenderer = renderer
self.canvas.draw_event(renderer)
def draw_artist(self, a):
"""
Draw :class:`matplotlib.artist.Artist` instance *a* only.
This is available only after the figure is drawn.
"""
if self._cachedRenderer is None:
raise AttributeError("draw_artist can only be used after an "
"initial draw which caches the renderer")
a.draw(self._cachedRenderer)
def get_axes(self):
return self.axes
@docstring.dedent_interpd
def legend(self, *args, **kwargs):
"""
Place a legend on the figure.
To make a legend from existing artists on every axes::
legend()
To make a legend for a list of lines and labels::
legend( (line1, line2, line3),
('label1', 'label2', 'label3'),
loc='upper right')
These can also be specified by keyword::
legend(handles=(line1, line2, line3),
labels=('label1', 'label2', 'label3'),
loc='upper right')
Parameters
----------
handles : sequence of `.Artist`, optional
A list of Artists (lines, patches) to be added to the legend.
Use this together with *labels*, if you need full control on what
is shown in the legend and the automatic mechanism described above
is not sufficient.
The length of handles and labels should be the same in this
case. If they are not, they are truncated to the smaller length.
labels : sequence of strings, optional
A list of labels to show next to the artists.
Use this together with *handles*, if you need full control on what
is shown in the legend and the automatic mechanism described above
is not sufficient.
Other Parameters
----------------
loc : int or string or pair of floats, default: 'upper right'
The location of the legend. Possible codes are:
=============== =============
Location String Location Code
=============== =============
'best' 0
'upper right' 1
'upper left' 2
'lower left' 3
'lower right' 4
'right' 5
'center left' 6
'center right' 7
'lower center' 8
'upper center' 9
'center' 10
=============== =============
Alternatively can be a 2-tuple giving ``x, y`` of the lower-left
corner of the legend in axes coordinates (in which case
``bbox_to_anchor`` will be ignored).
bbox_to_anchor : `.BboxBase` or pair of floats
Specify any arbitrary location for the legend in `bbox_transform`
coordinates (default Axes coordinates).
For example, to put the legend's upper right hand corner in the
center of the axes the following keywords can be used::
loc='upper right', bbox_to_anchor=(0.5, 0.5)
ncol : integer
The number of columns that the legend has. Default is 1.
prop : None or :class:`matplotlib.font_manager.FontProperties` or dict
The font properties of the legend. If None (default), the current
:data:`matplotlib.rcParams` will be used.
fontsize : int or float or {'xx-small', 'x-small', 'small', 'medium', \
'large', 'x-large', 'xx-large'}
Controls the font size of the legend. If the value is numeric the
size will be the absolute font size in points. String values are
relative to the current default font size. This argument is only
used if `prop` is not specified.
numpoints : None or int
The number of marker points in the legend when creating a legend
entry for a `.Line2D` (line).
Default is ``None``, which will take the value from
:rc:`legend.numpoints`.
scatterpoints : None or int
The number of marker points in the legend when creating
a legend entry for a `.PathCollection` (scatter plot).
Default is ``None``, which will take the value from
:rc:`legend.scatterpoints`.
scatteryoffsets : iterable of floats
The vertical offset (relative to the font size) for the markers
created for a scatter plot legend entry. 0.0 is at the base the
legend text, and 1.0 is at the top. To draw all markers at the
same height, set to ``[0.5]``. Default is ``[0.375, 0.5, 0.3125]``.
markerscale : None or int or float
The relative size of legend markers compared with the originally
drawn ones.
Default is ``None``, which will take the value from
:rc:`legend.markerscale`.
markerfirst : bool
If *True*, legend marker is placed to the left of the legend label.
If *False*, legend marker is placed to the right of the legend
label.
Default is *True*.
frameon : None or bool
Control whether the legend should be drawn on a patch
(frame).
Default is ``None``, which will take the value from
:rc:`legend.frameon`.
fancybox : None or bool
Control whether round edges should be enabled around the
:class:`~matplotlib.patches.FancyBboxPatch` which makes up the
legend's background.
Default is ``None``, which will take the value from
:rc:`legend.fancybox`.
shadow : None or bool
Control whether to draw a shadow behind the legend.
Default is ``None``, which will take the value from
:rc:`legend.shadow`.
framealpha : None or float
Control the alpha transparency of the legend's background.
Default is ``None``, which will take the value from
:rc:`legend.framealpha`. If shadow is activated and
*framealpha* is ``None``, the default value is ignored.
facecolor : None or "inherit" or a color spec
Control the legend's background color.
Default is ``None``, which will take the value from
:rc:`legend.facecolor`. If ``"inherit"``, it will take
:rc:`axes.facecolor`.
edgecolor : None or "inherit" or a color spec
Control the legend's background patch edge color.
Default is ``None``, which will take the value from
:rc:`legend.edgecolor` If ``"inherit"``, it will take
:rc:`axes.edgecolor`.
mode : {"expand", None}
If `mode` is set to ``"expand"`` the legend will be horizontally
expanded to fill the axes area (or `bbox_to_anchor` if defines
the legend's size).
bbox_transform : None or :class:`matplotlib.transforms.Transform`
The transform for the bounding box (`bbox_to_anchor`). For a value
of ``None`` (default) the Axes'
:data:`~matplotlib.axes.Axes.transAxes` transform will be used.
title : str or None
The legend's title. Default is no title (``None``).
borderpad : float or None
The fractional whitespace inside the legend border.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.borderpad`.
labelspacing : float or None
The vertical space between the legend entries.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.labelspacing`.
handlelength : float or None
The length of the legend handles.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.handlelength`.
handletextpad : float or None
The pad between the legend handle and text.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.handletextpad`.
borderaxespad : float or None
The pad between the axes and legend border.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.borderaxespad`.
columnspacing : float or None
The spacing between columns.
Measured in font-size units.
Default is ``None``, which will take the value from
:rc:`legend.columnspacing`.
handler_map : dict or None
The custom dictionary mapping instances or types to a legend
handler. This `handler_map` updates the default handler map
found at :func:`matplotlib.legend.Legend.get_legend_handler_map`.
Returns
-------
:class:`matplotlib.legend.Legend` instance
Notes
-----
Not all kinds of artist are supported by the legend command. See
:ref:`sphx_glr_tutorials_intermediate_legend_guide.py` for details.
"""
handles, labels, extra_args, kwargs = mlegend._parse_legend_args(
self.axes,
*args,
**kwargs)
# check for third arg
if len(extra_args):
# cbook.warn_deprecated(
# "2.1",
# "Figure.legend will accept no more than two "
# "positional arguments in the future. Use "
# "'fig.legend(handles, labels, loc=location)' "
# "instead.")
# kwargs['loc'] = extra_args[0]
# extra_args = extra_args[1:]
pass
l = mlegend.Legend(self, handles, labels, *extra_args, **kwargs)
self.legends.append(l)
l._remove_method = lambda h: self.legends.remove(h)
self.stale = True
return l
@docstring.dedent_interpd
def text(self, x, y, s, *args, **kwargs):
"""
Add text to figure.
Call signature::
text(x, y, s, fontdict=None, **kwargs)
Add text to figure at location *x*, *y* (relative 0-1
coords). See :func:`~matplotlib.pyplot.text` for the meaning
of the other arguments.
kwargs control the :class:`~matplotlib.text.Text` properties:
%(Text)s
"""
override = _process_text_args({}, *args, **kwargs)
t = Text(x=x, y=y, text=s)
t.update(override)
self._set_artist_props(t)
self.texts.append(t)
t._remove_method = lambda h: self.texts.remove(h)
self.stale = True
return t
def _set_artist_props(self, a):
if a != self:
a.set_figure(self)
a.stale_callback = _stale_figure_callback
a.set_transform(self.transFigure)
@docstring.dedent_interpd
def gca(self, **kwargs):
"""
Get the current axes, creating one if necessary
The following kwargs are supported for ensuring the returned axes
adheres to the given projection etc., and for axes creation if
the active axes does not exist:
%(Axes)s
"""
ckey, cax = self._axstack.current_key_axes()
# if there exists an axes on the stack see if it maches
# the desired axes configuration
if cax is not None:
# if no kwargs are given just return the current axes
# this is a convenience for gca() on axes such as polar etc.
if not kwargs:
return cax
# if the user has specified particular projection detail
# then build up a key which can represent this
else:
# we don't want to modify the original kwargs
# so take a copy so that we can do what we like to it
kwargs_copy = kwargs.copy()
projection_class, _, key = process_projection_requirements(
self, **kwargs_copy)
# let the returned axes have any gridspec by removing it from
# the key
ckey = ckey[1:]
key = key[1:]
# if the cax matches this key then return the axes, otherwise
# continue and a new axes will be created
if key == ckey and isinstance(cax, projection_class):
return cax
else:
warnings.warn('Requested projection is different from '
'current axis projection, creating new axis '
'with requested projection.', stacklevel=2)
# no axes found, so create one which spans the figure
return self.add_subplot(1, 1, 1, **kwargs)
def sca(self, a):
"""Set the current axes to be a and return a."""
self._axstack.bubble(a)
for func in self._axobservers:
func(self)
return a
def _gci(self):
"""
Helper for :func:`~matplotlib.pyplot.gci`. Do not use elsewhere.
"""
# Look first for an image in the current Axes:
cax = self._axstack.current_key_axes()[1]
if cax is None:
return None
im = cax._gci()
if im is not None:
return im
# If there is no image in the current Axes, search for
# one in a previously created Axes. Whether this makes
# sense is debatable, but it is the documented behavior.
for ax in reversed(self.axes):
im = ax._gci()
if im is not None:
return im
return None
def __getstate__(self):
state = super(Figure, self).__getstate__()
# print('\n\n\nStarting pickle')
# the axobservers cannot currently be pickled.
# Additionally, the canvas cannot currently be pickled, but this has
# the benefit of meaning that a figure can be detached from one canvas,
# and re-attached to another.
for attr_to_pop in ('_axobservers', 'show',
'canvas', '_cachedRenderer'):
state.pop(attr_to_pop, None)
# add version information to the state
state['__mpl_version__'] = _mpl_version
# check to see if the figure has a manager and whether it is registered
# with pyplot
if getattr(self.canvas, 'manager', None) is not None:
manager = self.canvas.manager
import matplotlib._pylab_helpers
if manager in list(six.itervalues(
matplotlib._pylab_helpers.Gcf.figs)):
state['_restore_to_pylab'] = True
# set all the layoutbox information to None. kiwisolver
# objects can't be pickeled, so we lose the layout options
# at this point.
state.pop('_layoutbox', None)
# suptitle:
if self._suptitle is not None:
self._suptitle._layoutbox = None
return state
def __setstate__(self, state):
version = state.pop('__mpl_version__')
restore_to_pylab = state.pop('_restore_to_pylab', False)
if version != _mpl_version:
import warnings
warnings.warn("This figure was saved with matplotlib version %s "
"and is unlikely to function correctly." %
(version, ))
self.__dict__ = state
# re-initialise some of the unstored state information
self._axobservers = []
self.canvas = None
self._layoutbox = None
if restore_to_pylab:
# lazy import to avoid circularity
import matplotlib.pyplot as plt
import matplotlib._pylab_helpers as pylab_helpers
allnums = plt.get_fignums()
num = max(allnums) + 1 if allnums else 1
mgr = plt._backend_mod.new_figure_manager_given_figure(num, self)
# XXX The following is a copy and paste from pyplot. Consider
# factoring to pylab_helpers
if self.get_label():
mgr.set_window_title(self.get_label())
# make this figure current on button press event
def make_active(event):
pylab_helpers.Gcf.set_active(mgr)
mgr._cidgcf = mgr.canvas.mpl_connect('button_press_event',
make_active)
pylab_helpers.Gcf.set_active(mgr)
self.number = num
plt.draw_if_interactive()
self.stale = True
def add_axobserver(self, func):
"""Whenever the axes state change, ``func(self)`` will be called."""
self._axobservers.append(func)
def savefig(self, fname, **kwargs):
"""
Save the current figure.
Call signature::
savefig(fname, dpi=None, facecolor='w', edgecolor='w',
orientation='portrait', papertype=None, format=None,
transparent=False, bbox_inches=None, pad_inches=0.1,
frameon=None)
The output formats available depend on the backend being used.
Parameters
----------
fname : str or file-like object
A string containing a path to a filename, or a Python
file-like object, or possibly some backend-dependent object
such as :class:`~matplotlib.backends.backend_pdf.PdfPages`.
If *format* is *None* and *fname* is a string, the output
format is deduced from the extension of the filename. If
the filename has no extension, the value of the rc parameter
``savefig.format`` is used.
If *fname* is not a string, remember to specify *format* to
ensure that the correct backend is used.
Other Parameters
----------------
dpi : [ *None* | scalar > 0 | 'figure']
The resolution in dots per inch. If *None* it will default to
the value ``savefig.dpi`` in the matplotlibrc file. If 'figure'
it will set the dpi to be the value of the figure.
facecolor : color spec or None, optional
the facecolor of the figure; if None, defaults to savefig.facecolor
edgecolor : color spec or None, optional
the edgecolor of the figure; if None, defaults to savefig.edgecolor
orientation : {'landscape', 'portrait'}
not supported on all backends; currently only on postscript output
papertype : str
One of 'letter', 'legal', 'executive', 'ledger', 'a0' through
'a10', 'b0' through 'b10'. Only supported for postscript
output.
format : str
One of the file extensions supported by the active
backend. Most backends support png, pdf, ps, eps and svg.
transparent : bool
If *True*, the axes patches will all be transparent; the
figure patch will also be transparent unless facecolor
and/or edgecolor are specified via kwargs.
This is useful, for example, for displaying
a plot on top of a colored background on a web page. The
transparency of these patches will be restored to their
original values upon exit of this function.
frameon : bool
If *True*, the figure patch will be colored, if *False*, the
figure background will be transparent. If not provided, the
rcParam 'savefig.frameon' will be used.
bbox_inches : str or `~matplotlib.transforms.Bbox`, optional
Bbox in inches. Only the given portion of the figure is
saved. If 'tight', try to figure out the tight bbox of
the figure. If None, use savefig.bbox
pad_inches : scalar, optional
Amount of padding around the figure when bbox_inches is
'tight'. If None, use savefig.pad_inches
bbox_extra_artists : list of `~matplotlib.artist.Artist`, optional
A list of extra artists that will be considered when the
tight bbox is calculated.
"""
kwargs.setdefault('dpi', rcParams['savefig.dpi'])
frameon = kwargs.pop('frameon', rcParams['savefig.frameon'])
transparent = kwargs.pop('transparent',
rcParams['savefig.transparent'])
if transparent:
kwargs.setdefault('facecolor', 'none')
kwargs.setdefault('edgecolor', 'none')
original_axes_colors = []
for ax in self.axes:
patch = ax.patch
original_axes_colors.append((patch.get_facecolor(),
patch.get_edgecolor()))
patch.set_facecolor('none')
patch.set_edgecolor('none')
else:
kwargs.setdefault('facecolor', rcParams['savefig.facecolor'])
kwargs.setdefault('edgecolor', rcParams['savefig.edgecolor'])
if frameon:
original_frameon = self.get_frameon()
self.set_frameon(frameon)
self.canvas.print_figure(fname, **kwargs)
if frameon:
self.set_frameon(original_frameon)
if transparent:
for ax, cc in zip(self.axes, original_axes_colors):
ax.patch.set_facecolor(cc[0])
ax.patch.set_edgecolor(cc[1])
@docstring.dedent_interpd
def colorbar(self, mappable, cax=None, ax=None, use_gridspec=True, **kw):
"""
Create a colorbar for a ScalarMappable instance, *mappable*.
Documentation for the pylab thin wrapper:
%(colorbar_doc)s
"""
if ax is None:
ax = self.gca()
# Store the value of gca so that we can set it back later on.
current_ax = self.gca()
if cax is None:
if use_gridspec and isinstance(ax, SubplotBase) \
and (not self.get_constrained_layout()):
cax, kw = cbar.make_axes_gridspec(ax, **kw)
else:
cax, kw = cbar.make_axes(ax, **kw)
cax._hold = True
# need to remove kws that cannot be passed to Colorbar
NON_COLORBAR_KEYS = ['fraction', 'pad', 'shrink', 'aspect', 'anchor',
'panchor']
cb_kw = {k: v for k, v in kw.items() if k not in NON_COLORBAR_KEYS}
cb = cbar.colorbar_factory(cax, mappable, **cb_kw)
self.sca(current_ax)
self.stale = True
return cb
def subplots_adjust(self, *args, **kwargs):
"""
Call signature::
subplots_adjust(left=None, bottom=None, right=None, top=None,
wspace=None, hspace=None)
Update the :class:`SubplotParams` with *kwargs* (defaulting to rc when
*None*) and update the subplot locations.
"""
self.subplotpars.update(*args, **kwargs)
for ax in self.axes:
if not isinstance(ax, SubplotBase):
# Check if sharing a subplots axis
if isinstance(ax._sharex, SubplotBase):
ax._sharex.update_params()
ax.set_position(ax._sharex.figbox)
elif isinstance(ax._sharey, SubplotBase):
ax._sharey.update_params()
ax.set_position(ax._sharey.figbox)
else:
ax.update_params()
ax.set_position(ax.figbox)
self.stale = True
def ginput(self, n=1, timeout=30, show_clicks=True, mouse_add=1,
mouse_pop=3, mouse_stop=2):
"""
Blocking call to interact with a figure.
Wait until the user clicks *n* times on the figure, and return the
coordinates of each click in a list.
The buttons used for the various actions (adding points, removing
points, terminating the inputs) can be overridden via the
arguments *mouse_add*, *mouse_pop* and *mouse_stop*, that give
the associated mouse button: 1 for left, 2 for middle, 3 for
right.
Parameters
----------
n : int, optional, default: 1
Number of mouse clicks to accumulate. If negative, accumulate
clicks until the input is terminated manually.
timeout : scalar, optional, default: 30
Number of seconds to wait before timing out. If zero or negative
will never timeout.
show_clicks : bool, optional, default: False
If True, show a red cross at the location of each click.
mouse_add : int, one of (1, 2, 3), optional, default: 1 (left click)
Mouse button used to add points.
mouse_pop : int, one of (1, 2, 3), optional, default: 3 (right click)
Mouse button used to remove the most recently added point.
mouse_stop : int, one of (1, 2, 3), optional, default: 2 (middle click)
Mouse button used to stop input.
Returns
-------
points : list of tuples
A list of the clicked (x, y) coordinates.
Notes
-----
The keyboard can also be used to select points in case your mouse
does not have one or more of the buttons. The delete and backspace
keys act like right clicking (i.e., remove last point), the enter key
terminates input and any other key (not already used by the window
manager) selects a point.
"""
blocking_mouse_input = BlockingMouseInput(self,
mouse_add=mouse_add,
mouse_pop=mouse_pop,
mouse_stop=mouse_stop)
return blocking_mouse_input(n=n, timeout=timeout,
show_clicks=show_clicks)
def waitforbuttonpress(self, timeout=-1):
"""
Blocking call to interact with the figure.
This will return True is a key was pressed, False if a mouse
button was pressed and None if *timeout* was reached without
either being pressed.
If *timeout* is negative, does not timeout.
"""
blocking_input = BlockingKeyMouseInput(self)
return blocking_input(timeout=timeout)
def get_default_bbox_extra_artists(self):
bbox_artists = [artist for artist in self.get_children()
if artist.get_visible()]
for ax in self.axes:
if ax.get_visible():
bbox_artists.extend(ax.get_default_bbox_extra_artists())
# we don't want the figure's patch to influence the bbox calculation
bbox_artists.remove(self.patch)
return bbox_artists
def get_tightbbox(self, renderer):
"""
Return a (tight) bounding box of the figure in inches.
It only accounts axes title, axis labels, and axis
ticklabels. Needs improvement.
"""
bb = []
for ax in self.axes:
if ax.get_visible():
bb.append(ax.get_tightbbox(renderer))
if len(bb) == 0:
return self.bbox_inches
_bbox = Bbox.union([b for b in bb if b.width != 0 or b.height != 0])
bbox_inches = TransformedBbox(_bbox,
Affine2D().scale(1. / self.dpi))
return bbox_inches
def init_layoutbox(self):
"""
initilaize the layoutbox for use in constrained_layout.
"""
if self._layoutbox is None:
self._layoutbox = layoutbox.LayoutBox(parent=None,
name='figlb',
artist=self)
self._layoutbox.constrain_geometry(0., 0., 1., 1.)
def execute_constrained_layout(self, renderer=None):
"""
Use ``layoutbox`` to determine pos positions within axes.
See also set_constrained_layout_pads
"""
from matplotlib._constrained_layout import (do_constrained_layout)
_log.debug('Executing constrainedlayout')
if self._layoutbox is None:
warnings.warn("Calling figure.constrained_layout, but figure "
"not setup to do constrained layout. "
" You either called GridSpec without the "
"fig keyword, you are using plt.subplot, "
"or you need to call figure or subplots"
"with the constrained_layout=True kwarg.")
return
w_pad, h_pad, wspace, hspace = self.get_constrained_layout_pads()
# convert to unit-relative lengths
fig = self
width, height = fig.get_size_inches()
w_pad = w_pad / width
h_pad = h_pad / height
if renderer is None:
renderer = layoutbox.get_renderer(fig)
do_constrained_layout(fig, renderer, h_pad, w_pad, hspace, wspace)
def tight_layout(self, renderer=None, pad=1.08, h_pad=None, w_pad=None,
rect=None):
"""
Adjust subplot parameters to give specified padding.
Parameters
----------
pad : float
padding between the figure edge and the edges of subplots,
as a fraction of the font-size.
h_pad, w_pad : float, optional
padding (height/width) between edges of adjacent subplots.
Defaults to `pad_inches`.
rect : tuple (left, bottom, right, top), optional
a rectangle (left, bottom, right, top) in the normalized
figure coordinate that the whole subplots area (including
labels) will fit into. Default is (0, 0, 1, 1).
"""
from .tight_layout import (
get_renderer, get_subplotspec_list, get_tight_layout_figure)
subplotspec_list = get_subplotspec_list(self.axes)
if None in subplotspec_list:
warnings.warn("This figure includes Axes that are not compatible "
"with tight_layout, so results might be incorrect.")
if renderer is None:
renderer = get_renderer(self)
kwargs = get_tight_layout_figure(
self, self.axes, subplotspec_list, renderer,
pad=pad, h_pad=h_pad, w_pad=w_pad, rect=rect)
self.subplots_adjust(**kwargs)
def align_xlabels(self, axs=None):
"""
Align the ylabels of subplots in the same subplot column if label
alignment is being done automatically (i.e. the label position is
not manually set).
Alignment persists for draw events after this is called.
If a label is on the bottom, it is aligned with labels on axes that
also have their label on the bottom and that have the same
bottom-most subplot row. If the label is on the top,
it is aligned with labels on axes with the same top-most row.
Parameters
----------
axs : list of `~matplotlib.axes.Axes` (None)
Optional list of (or ndarray) `~matplotlib.axes.Axes` to align
the xlabels. Default is to align all axes on the figure.
See Also
--------
matplotlib.figure.Figure.align_ylabels
matplotlib.figure.Figure.align_labels
Notes
-----
This assumes that ``axs`` are from the same `.GridSpec`, so that
their `.SubplotSpec` positions correspond to figure positions.
Examples
--------
Example with rotated xtick labels::
fig, axs = plt.subplots(1, 2)
for tick in axs[0].get_xticklabels():
tick.set_rotation(55)
axs[0].set_xlabel('XLabel 0')
axs[1].set_xlabel('XLabel 1')
fig.align_xlabels()
"""
if axs is None:
axs = self.axes
axs = np.asarray(axs).ravel()
for ax in axs:
_log.debug(' Working on: %s', ax.get_xlabel())
ss = ax.get_subplotspec()
nrows, ncols, row0, row1, col0, col1 = ss.get_rows_columns()
labpo = ax.xaxis.get_label_position() # top or bottom
# loop through other axes, and search for label positions
# that are same as this one, and that share the appropriate
# row number.
# Add to a grouper associated with each axes of sibblings.
# This list is inspected in `axis.draw` by
# `axis._update_label_position`.
for axc in axs:
if axc.xaxis.get_label_position() == labpo:
ss = axc.get_subplotspec()
nrows, ncols, rowc0, rowc1, colc, col1 = \
ss.get_rows_columns()
if (labpo == 'bottom' and rowc1 == row1 or
labpo == 'top' and rowc0 == row0):
# grouper for groups of xlabels to align
self._align_xlabel_grp.join(ax, axc)
def align_ylabels(self, axs=None):
"""
Align the ylabels of subplots in the same subplot column if label
alignment is being done automatically (i.e. the label position is
not manually set).
Alignment persists for draw events after this is called.
If a label is on the left, it is aligned with labels on axes that
also have their label on the left and that have the same
left-most subplot column. If the label is on the right,
it is aligned with labels on axes with the same right-most column.
Parameters
----------
axs : list of `~matplotlib.axes.Axes` (None)
Optional list (or ndarray) of `~matplotlib.axes.Axes` to align
the ylabels. Default is to align all axes on the figure.
See Also
--------
matplotlib.figure.Figure.align_xlabels
matplotlib.figure.Figure.align_labels
Notes
-----
This assumes that ``axs`` are from the same `.GridSpec`, so that
their `.SubplotSpec` positions correspond to figure positions.
Examples
--------
Example with large yticks labels::
fig, axs = plt.subplots(2, 1)
axs[0].plot(np.arange(0, 1000, 50))
axs[0].set_ylabel('YLabel 0')
axs[1].set_ylabel('YLabel 1')
fig.align_ylabels()
"""
if axs is None:
axs = self.axes
axs = np.asarray(axs).ravel()
for ax in axs:
_log.debug(' Working on: %s', ax.get_ylabel())
ss = ax.get_subplotspec()
nrows, ncols, row0, row1, col0, col1 = ss.get_rows_columns()
same = [ax]
labpo = ax.yaxis.get_label_position() # left or right
# loop through other axes, and search for label positions
# that are same as this one, and that share the appropriate
# column number.
# Add to a list associated with each axes of sibblings.
# This list is inspected in `axis.draw` by
# `axis._update_label_position`.
for axc in axs:
if axc != ax:
if axc.yaxis.get_label_position() == labpo:
ss = axc.get_subplotspec()
nrows, ncols, row0, row1, colc0, colc1 = \
ss.get_rows_columns()
if (labpo == 'left' and colc0 == col0 or
labpo == 'right' and colc1 == col1):
# grouper for groups of ylabels to align
self._align_ylabel_grp.join(ax, axc)
def align_labels(self, axs=None):
"""
Align the xlabels and ylabels of subplots with the same subplots
row or column (respectively) if label alignment is being
done automatically (i.e. the label position is not manually set).
Alignment persists for draw events after this is called.
Parameters
----------
axs : list of `~matplotlib.axes.Axes` (None)
Optional list (or ndarray) of `~matplotlib.axes.Axes` to
align the labels. Default is to align all axes on the figure.
See Also
--------
matplotlib.figure.Figure.align_xlabels
matplotlib.figure.Figure.align_ylabels
"""
self.align_xlabels(axs=axs)
self.align_ylabels(axs=axs)
def figaspect(arg):
"""
Create a figure with specified aspect ratio. If *arg* is a number,
use that aspect ratio. If *arg* is an array, figaspect will
determine the width and height for a figure that would fit array
preserving aspect ratio. The figure width, height in inches are
returned. Be sure to create an axes with equal with and height,
e.g.,
Example usage::
# make a figure twice as tall as it is wide
w, h = figaspect(2.)
fig = Figure(figsize=(w,h))
ax = fig.add_axes([0.1, 0.1, 0.8, 0.8])
ax.imshow(A, **kwargs)
# make a figure with the proper aspect for an array
A = rand(5,3)
w, h = figaspect(A)
fig = Figure(figsize=(w,h))
ax = fig.add_axes([0.1, 0.1, 0.8, 0.8])
ax.imshow(A, **kwargs)
Thanks to Fernando Perez for this function
"""
isarray = hasattr(arg, 'shape') and not np.isscalar(arg)
# min/max sizes to respect when autoscaling. If John likes the idea, they
# could become rc parameters, for now they're hardwired.
figsize_min = np.array((4.0, 2.0)) # min length for width/height
figsize_max = np.array((16.0, 16.0)) # max length for width/height
# Extract the aspect ratio of the array
if isarray:
nr, nc = arg.shape[:2]
arr_ratio = nr / nc
else:
arr_ratio = arg
# Height of user figure defaults
fig_height = rcParams['figure.figsize'][1]
# New size for the figure, keeping the aspect ratio of the caller
newsize = np.array((fig_height / arr_ratio, fig_height))
# Sanity checks, don't drop either dimension below figsize_min
newsize /= min(1.0, *(newsize / figsize_min))
# Avoid humongous windows as well
newsize /= max(1.0, *(newsize / figsize_max))
# Finally, if we have a really funky aspect ratio, break it but respect
# the min/max dimensions (we don't want figures 10 feet tall!)
newsize = np.clip(newsize, figsize_min, figsize_max)
return newsize
docstring.interpd.update(Figure=martist.kwdoc(Figure))
| 90,311 | 35.153723 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/type1font.py
|
"""
This module contains a class representing a Type 1 font.
This version reads pfa and pfb files and splits them for embedding in
pdf files. It also supports SlantFont and ExtendFont transformations,
similarly to pdfTeX and friends. There is no support yet for
subsetting.
Usage::
>>> font = Type1Font(filename)
>>> clear_part, encrypted_part, finale = font.parts
>>> slanted_font = font.transform({'slant': 0.167})
>>> extended_font = font.transform({'extend': 1.2})
Sources:
* Adobe Technical Note #5040, Supporting Downloadable PostScript
Language Fonts.
* Adobe Type 1 Font Format, Adobe Systems Incorporated, third printing,
v1.1, 1993. ISBN 0-201-57044-0.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import binascii
import io
import itertools
import re
import struct
import numpy as np
if six.PY3:
def ord(x):
return x
class Type1Font(object):
"""
A class representing a Type-1 font, for use by backends.
Attributes
----------
parts : tuple
A 3-tuple of the cleartext part, the encrypted part, and the finale of
zeros.
prop : Dict[str, Any]
A dictionary of font properties.
"""
__slots__ = ('parts', 'prop')
def __init__(self, input):
"""
Initialize a Type-1 font. *input* can be either the file name of
a pfb file or a 3-tuple of already-decoded Type-1 font parts.
"""
if isinstance(input, tuple) and len(input) == 3:
self.parts = input
else:
with open(input, 'rb') as file:
data = self._read(file)
self.parts = self._split(data)
self._parse()
def _read(self, file):
"""
Read the font from a file, decoding into usable parts.
"""
rawdata = file.read()
if not rawdata.startswith(b'\x80'):
return rawdata
data = b''
while len(rawdata) > 0:
if not rawdata.startswith(b'\x80'):
raise RuntimeError('Broken pfb file (expected byte 128, '
'got %d)' % ord(rawdata[0]))
type = ord(rawdata[1])
if type in (1, 2):
length, = struct.unpack(str('<i'), rawdata[2:6])
segment = rawdata[6:6 + length]
rawdata = rawdata[6 + length:]
if type == 1: # ASCII text: include verbatim
data += segment
elif type == 2: # binary data: encode in hexadecimal
data += binascii.hexlify(segment)
elif type == 3: # end of file
break
else:
raise RuntimeError('Unknown segment type %d in pfb file' %
type)
return data
def _split(self, data):
"""
Split the Type 1 font into its three main parts.
The three parts are: (1) the cleartext part, which ends in a
eexec operator; (2) the encrypted part; (3) the fixed part,
which contains 512 ASCII zeros possibly divided on various
lines, a cleartomark operator, and possibly something else.
"""
# Cleartext part: just find the eexec and skip whitespace
idx = data.index(b'eexec')
idx += len(b'eexec')
while data[idx] in b' \t\r\n':
idx += 1
len1 = idx
# Encrypted part: find the cleartomark operator and count
# zeros backward
idx = data.rindex(b'cleartomark') - 1
zeros = 512
while zeros and data[idx] in b'0' or data[idx] in b'\r\n':
if data[idx] in b'0':
zeros -= 1
idx -= 1
if zeros:
raise RuntimeError('Insufficiently many zeros in Type 1 font')
# Convert encrypted part to binary (if we read a pfb file, we
# may end up converting binary to hexadecimal to binary again;
# but if we read a pfa file, this part is already in hex, and
# I am not quite sure if even the pfb format guarantees that
# it will be in binary).
binary = binascii.unhexlify(data[len1:idx+1])
return data[:len1], binary, data[idx+1:]
_whitespace_re = re.compile(br'[\0\t\r\014\n ]+')
_token_re = re.compile(br'/{0,2}[^]\0\t\r\v\n ()<>{}/%[]+')
_comment_re = re.compile(br'%[^\r\n\v]*')
_instring_re = re.compile(br'[()\\]')
# token types, compared via object identity (poor man's enum)
_whitespace = object()
_name = object()
_string = object()
_delimiter = object()
_number = object()
@classmethod
def _tokens(cls, text):
"""
A PostScript tokenizer. Yield (token, value) pairs such as
(cls._whitespace, ' ') or (cls._name, '/Foobar').
"""
pos = 0
while pos < len(text):
match = (cls._comment_re.match(text[pos:]) or
cls._whitespace_re.match(text[pos:]))
if match:
yield (cls._whitespace, match.group())
pos += match.end()
elif text[pos] == b'(':
start = pos
pos += 1
depth = 1
while depth:
match = cls._instring_re.search(text[pos:])
if match is None:
return
pos += match.end()
if match.group() == b'(':
depth += 1
elif match.group() == b')':
depth -= 1
else: # a backslash - skip the next character
pos += 1
yield (cls._string, text[start:pos])
elif text[pos:pos + 2] in (b'<<', b'>>'):
yield (cls._delimiter, text[pos:pos + 2])
pos += 2
elif text[pos] == b'<':
start = pos
pos += text[pos:].index(b'>')
yield (cls._string, text[start:pos])
else:
match = cls._token_re.match(text[pos:])
if match:
try:
float(match.group())
yield (cls._number, match.group())
except ValueError:
yield (cls._name, match.group())
pos += match.end()
else:
yield (cls._delimiter, text[pos:pos + 1])
pos += 1
def _parse(self):
"""
Find the values of various font properties. This limited kind
of parsing is described in Chapter 10 "Adobe Type Manager
Compatibility" of the Type-1 spec.
"""
# Start with reasonable defaults
prop = {'weight': 'Regular', 'ItalicAngle': 0.0, 'isFixedPitch': False,
'UnderlinePosition': -100, 'UnderlineThickness': 50}
filtered = ((token, value)
for token, value in self._tokens(self.parts[0])
if token is not self._whitespace)
# The spec calls this an ASCII format; in Python 2.x we could
# just treat the strings and names as opaque bytes but let's
# turn them into proper Unicode, and be lenient in case of high bytes.
convert = lambda x: x.decode('ascii', 'replace')
for token, value in filtered:
if token is self._name and value.startswith(b'/'):
key = convert(value[1:])
token, value = next(filtered)
if token is self._name:
if value in (b'true', b'false'):
value = value == b'true'
else:
value = convert(value.lstrip(b'/'))
elif token is self._string:
value = convert(value.lstrip(b'(').rstrip(b')'))
elif token is self._number:
if b'.' in value:
value = float(value)
else:
value = int(value)
else: # more complicated value such as an array
value = None
if key != 'FontInfo' and value is not None:
prop[key] = value
# Fill in the various *Name properties
if 'FontName' not in prop:
prop['FontName'] = (prop.get('FullName') or
prop.get('FamilyName') or
'Unknown')
if 'FullName' not in prop:
prop['FullName'] = prop['FontName']
if 'FamilyName' not in prop:
extras = ('(?i)([ -](regular|plain|italic|oblique|(semi)?bold|'
'(ultra)?light|extra|condensed))+$')
prop['FamilyName'] = re.sub(extras, '', prop['FullName'])
self.prop = prop
@classmethod
def _transformer(cls, tokens, slant, extend):
def fontname(name):
result = name
if slant:
result += b'_Slant_' + str(int(1000 * slant)).encode('ascii')
if extend != 1.0:
result += b'_Extend_' + str(int(1000 * extend)).encode('ascii')
return result
def italicangle(angle):
return (str(float(angle) - np.arctan(slant) / np.pi * 180)
.encode('ascii'))
def fontmatrix(array):
array = array.lstrip(b'[').rstrip(b']').split()
array = [float(x) for x in array]
oldmatrix = np.eye(3, 3)
oldmatrix[0:3, 0] = array[::2]
oldmatrix[0:3, 1] = array[1::2]
modifier = np.array([[extend, 0, 0],
[slant, 1, 0],
[0, 0, 1]])
newmatrix = np.dot(modifier, oldmatrix)
array[::2] = newmatrix[0:3, 0]
array[1::2] = newmatrix[0:3, 1]
as_string = u'[' + u' '.join(str(x) for x in array) + u']'
return as_string.encode('latin-1')
def replace(fun):
def replacer(tokens):
token, value = next(tokens) # name, e.g., /FontMatrix
yield bytes(value)
token, value = next(tokens) # possible whitespace
while token is cls._whitespace:
yield bytes(value)
token, value = next(tokens)
if value != b'[': # name/number/etc.
yield bytes(fun(value))
else: # array, e.g., [1 2 3]
result = b''
while value != b']':
result += value
token, value = next(tokens)
result += value
yield fun(result)
return replacer
def suppress(tokens):
for x in itertools.takewhile(lambda x: x[1] != b'def', tokens):
pass
yield b''
table = {b'/FontName': replace(fontname),
b'/ItalicAngle': replace(italicangle),
b'/FontMatrix': replace(fontmatrix),
b'/UniqueID': suppress}
for token, value in tokens:
if token is cls._name and value in table:
for value in table[value](itertools.chain([(token, value)],
tokens)):
yield value
else:
yield value
def transform(self, effects):
"""
Transform the font by slanting or extending. *effects* should
be a dict where ``effects['slant']`` is the tangent of the
angle that the font is to be slanted to the right (so negative
values slant to the left) and ``effects['extend']`` is the
multiplier by which the font is to be extended (so values less
than 1.0 condense). Returns a new :class:`Type1Font` object.
"""
with io.BytesIO() as buffer:
tokenizer = self._tokens(self.parts[0])
transformed = self._transformer(tokenizer,
slant=effects.get('slant', 0.0),
extend=effects.get('extend', 1.0))
list(map(buffer.write, transformed))
return Type1Font((buffer.getvalue(), self.parts[1], self.parts[2]))
| 12,473 | 36.235821 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/units.py
|
"""
The classes here provide support for using custom classes with
Matplotlib, e.g., those that do not expose the array interface but know
how to convert themselves to arrays. It also supports classes with
units and units conversion. Use cases include converters for custom
objects, e.g., a list of datetime objects, as well as for objects that
are unit aware. We don't assume any particular units implementation;
rather a units implementation must provide the register with the Registry
converter dictionary and a `ConversionInterface`. For example,
here is a complete implementation which supports plotting with native
datetime objects::
import matplotlib.units as units
import matplotlib.dates as dates
import matplotlib.ticker as ticker
import datetime
class DateConverter(units.ConversionInterface):
@staticmethod
def convert(value, unit, axis):
'Convert a datetime value to a scalar or array'
return dates.date2num(value)
@staticmethod
def axisinfo(unit, axis):
'Return major and minor tick locators and formatters'
if unit!='date': return None
majloc = dates.AutoDateLocator()
majfmt = dates.AutoDateFormatter(majloc)
return AxisInfo(majloc=majloc,
majfmt=majfmt,
label='date')
@staticmethod
def default_units(x, axis):
'Return the default unit for x or None'
return 'date'
# Finally we register our object type with the Matplotlib units registry.
units.registry[datetime.date] = DateConverter()
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from matplotlib.cbook import iterable, is_numlike, safe_first_element
import numpy as np
class AxisInfo(object):
"""
Information to support default axis labeling, tick labeling, and
default limits. An instance of this class must be returned by
:meth:`ConversionInterface.axisinfo`.
"""
def __init__(self, majloc=None, minloc=None,
majfmt=None, minfmt=None, label=None,
default_limits=None):
"""
Parameters
----------
majloc, minloc : Locator, optional
Tick locators for the major and minor ticks.
majfmt, minfmt : Formatter, optional
Tick formatters for the major and minor ticks.
label : str, optional
The default axis label.
default_limits : optional
The default min and max limits of the axis if no data has
been plotted.
Notes
-----
If any of the above are ``None``, the axis will simply use the
default value.
"""
self.majloc = majloc
self.minloc = minloc
self.majfmt = majfmt
self.minfmt = minfmt
self.label = label
self.default_limits = default_limits
class ConversionInterface(object):
"""
The minimal interface for a converter to take custom data types (or
sequences) and convert them to values Matplotlib can use.
"""
@staticmethod
def axisinfo(unit, axis):
"""
Return an `~units.AxisInfo` instance for the axis with the
specified units.
"""
return None
@staticmethod
def default_units(x, axis):
"""
Return the default unit for *x* or ``None`` for the given axis.
"""
return None
@staticmethod
def convert(obj, unit, axis):
"""
Convert *obj* using *unit* for the specified *axis*.
If *obj* is a sequence, return the converted sequence.
The output must be a sequence of scalars that can be used by the numpy
array layer.
"""
return obj
@staticmethod
def is_numlike(x):
"""
The Matplotlib datalim, autoscaling, locators etc work with
scalars which are the units converted to floats given the
current unit. The converter may be passed these floats, or
arrays of them, even when units are set.
"""
if iterable(x):
for thisx in x:
return is_numlike(thisx)
else:
return is_numlike(x)
class Registry(dict):
"""
A register that maps types to conversion interfaces.
"""
def __init__(self):
dict.__init__(self)
self._cached = {}
def get_converter(self, x):
"""
Get the converter for data that has the same type as *x*. If no
converters are registered for *x*, returns ``None``.
"""
if not len(self):
return None # nothing registered
# DISABLED idx = id(x)
# DISABLED cached = self._cached.get(idx)
# DISABLED if cached is not None: return cached
converter = None
classx = getattr(x, '__class__', None)
if classx is not None:
converter = self.get(classx)
if converter is None and hasattr(x, "values"):
# this unpacks pandas series or dataframes...
x = x.values
# If x is an array, look inside the array for data with units
if isinstance(x, np.ndarray) and x.size:
xravel = x.ravel()
try:
# pass the first value of x that is not masked back to
# get_converter
if not np.all(xravel.mask):
# some elements are not masked
converter = self.get_converter(
xravel[np.argmin(xravel.mask)])
return converter
except AttributeError:
# not a masked_array
# Make sure we don't recurse forever -- it's possible for
# ndarray subclasses to continue to return subclasses and
# not ever return a non-subclass for a single element.
next_item = xravel[0]
if (not isinstance(next_item, np.ndarray) or
next_item.shape != x.shape):
converter = self.get_converter(next_item)
return converter
# If we haven't found a converter yet, try to get the first element
if converter is None:
try:
thisx = safe_first_element(x)
except (TypeError, StopIteration):
pass
else:
if classx and classx != getattr(thisx, '__class__', None):
converter = self.get_converter(thisx)
return converter
# DISABLED self._cached[idx] = converter
return converter
registry = Registry()
| 6,753 | 32.60199 | 78 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/axis.py
|
"""
Classes for the ticks and x and y axis
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import logging
from matplotlib import rcParams
import matplotlib.artist as artist
from matplotlib.artist import allow_rasterization
import matplotlib.cbook as cbook
from matplotlib.cbook import _string_to_bool
import matplotlib.font_manager as font_manager
import matplotlib.lines as mlines
import matplotlib.patches as mpatches
import matplotlib.scale as mscale
import matplotlib.text as mtext
import matplotlib.ticker as mticker
import matplotlib.transforms as mtransforms
import matplotlib.units as munits
import numpy as np
import warnings
_log = logging.getLogger(__name__)
GRIDLINE_INTERPOLATION_STEPS = 180
# This list is being used for compatibility with Axes.grid, which
# allows all Line2D kwargs.
_line_AI = artist.ArtistInspector(mlines.Line2D)
_line_param_names = _line_AI.get_setters()
_line_param_aliases = [list(d.keys())[0] for d in _line_AI.aliasd.values()]
_gridline_param_names = ['grid_' + name
for name in _line_param_names + _line_param_aliases]
class Tick(artist.Artist):
"""
Abstract base class for the axis ticks, grid lines and labels
1 refers to the bottom of the plot for xticks and the left for yticks
2 refers to the top of the plot for xticks and the right for yticks
Attributes
----------
tick1line : Line2D
tick2line : Line2D
gridline : Line2D
label1 : Text
label2 : Text
gridOn : bool
Determines whether to draw the tickline.
tick1On : bool
Determines whether to draw the first tickline.
tick2On : bool
Determines whether to draw the second tickline.
label1On : bool
Determines whether to draw the first tick label.
label2On : bool
Determines whether to draw the second tick label.
"""
def __init__(self, axes, loc, label,
size=None, # points
width=None,
color=None,
tickdir=None,
pad=None,
labelsize=None,
labelcolor=None,
zorder=None,
gridOn=None, # defaults to axes.grid depending on
# axes.grid.which
tick1On=True,
tick2On=True,
label1On=True,
label2On=False,
major=True,
labelrotation=0,
grid_color=None,
grid_linestyle=None,
grid_linewidth=None,
grid_alpha=None,
**kw # Other Line2D kwargs applied to gridlines.
):
"""
bbox is the Bound2D bounding box in display coords of the Axes
loc is the tick location in data coords
size is the tick size in points
"""
artist.Artist.__init__(self)
if gridOn is None:
if major and (rcParams['axes.grid.which'] in ('both', 'major')):
gridOn = rcParams['axes.grid']
elif (not major) and (rcParams['axes.grid.which']
in ('both', 'minor')):
gridOn = rcParams['axes.grid']
else:
gridOn = False
self.set_figure(axes.figure)
self.axes = axes
name = self.__name__.lower()
self._name = name
self._loc = loc
if size is None:
if major:
size = rcParams['%s.major.size' % name]
else:
size = rcParams['%s.minor.size' % name]
self._size = size
if width is None:
if major:
width = rcParams['%s.major.width' % name]
else:
width = rcParams['%s.minor.width' % name]
self._width = width
if color is None:
color = rcParams['%s.color' % name]
self._color = color
if pad is None:
if major:
pad = rcParams['%s.major.pad' % name]
else:
pad = rcParams['%s.minor.pad' % name]
self._base_pad = pad
if labelcolor is None:
labelcolor = rcParams['%s.color' % name]
self._labelcolor = labelcolor
if labelsize is None:
labelsize = rcParams['%s.labelsize' % name]
self._labelsize = labelsize
self._set_labelrotation(labelrotation)
if zorder is None:
if major:
zorder = mlines.Line2D.zorder + 0.01
else:
zorder = mlines.Line2D.zorder
self._zorder = zorder
self._grid_color = (rcParams['grid.color']
if grid_color is None else grid_color)
self._grid_linestyle = (rcParams['grid.linestyle']
if grid_linestyle is None else grid_linestyle)
self._grid_linewidth = (rcParams['grid.linewidth']
if grid_linewidth is None else grid_linewidth)
self._grid_alpha = (rcParams['grid.alpha']
if grid_alpha is None else grid_alpha)
self._grid_kw = {k[5:]: v for k, v in kw.items()}
self.apply_tickdir(tickdir)
self.tick1line = self._get_tick1line()
self.tick2line = self._get_tick2line()
self.gridline = self._get_gridline()
self.label1 = self._get_text1()
self.label = self.label1 # legacy name
self.label2 = self._get_text2()
self.gridOn = gridOn
self.tick1On = tick1On
self.tick2On = tick2On
self.label1On = label1On
self.label2On = label2On
self.update_position(loc)
def _set_labelrotation(self, labelrotation):
if isinstance(labelrotation, six.string_types):
mode = labelrotation
angle = 0
elif isinstance(labelrotation, (tuple, list)):
mode, angle = labelrotation
else:
mode = 'default'
angle = labelrotation
if mode not in ('auto', 'default'):
raise ValueError("Label rotation mode must be 'default' or "
"'auto', not '{}'.".format(mode))
self._labelrotation = (mode, angle)
def apply_tickdir(self, tickdir):
"""
Calculate self._pad and self._tickmarkers
"""
pass
def get_tickdir(self):
return self._tickdir
def get_tick_padding(self):
"""
Get the length of the tick outside of the axes.
"""
padding = {
'in': 0.0,
'inout': 0.5,
'out': 1.0
}
return self._size * padding[self._tickdir]
def get_children(self):
children = [self.tick1line, self.tick2line,
self.gridline, self.label1, self.label2]
return children
def set_clip_path(self, clippath, transform=None):
artist.Artist.set_clip_path(self, clippath, transform)
self.gridline.set_clip_path(clippath, transform)
self.stale = True
set_clip_path.__doc__ = artist.Artist.set_clip_path.__doc__
def get_pad_pixels(self):
return self.figure.dpi * self._base_pad / 72.0
def contains(self, mouseevent):
"""
Test whether the mouse event occurred in the Tick marks.
This function always returns false. It is more useful to test if the
axis as a whole contains the mouse rather than the set of tick marks.
"""
if callable(self._contains):
return self._contains(self, mouseevent)
return False, {}
def set_pad(self, val):
"""
Set the tick label pad in points
ACCEPTS: float
"""
self._apply_params(pad=val)
self.stale = True
def get_pad(self):
'Get the value of the tick label pad in points'
return self._base_pad
def _get_text1(self):
'Get the default Text 1 instance'
pass
def _get_text2(self):
'Get the default Text 2 instance'
pass
def _get_tick1line(self):
'Get the default line2D instance for tick1'
pass
def _get_tick2line(self):
'Get the default line2D instance for tick2'
pass
def _get_gridline(self):
'Get the default grid Line2d instance for this tick'
pass
def get_loc(self):
'Return the tick location (data coords) as a scalar'
return self._loc
@allow_rasterization
def draw(self, renderer):
if not self.get_visible():
self.stale = False
return
renderer.open_group(self.__name__)
if self.gridOn:
self.gridline.draw(renderer)
if self.tick1On:
self.tick1line.draw(renderer)
if self.tick2On:
self.tick2line.draw(renderer)
if self.label1On:
self.label1.draw(renderer)
if self.label2On:
self.label2.draw(renderer)
renderer.close_group(self.__name__)
self.stale = False
def set_label1(self, s):
"""
Set the text of ticklabel
ACCEPTS: str
"""
self.label1.set_text(s)
self.stale = True
set_label = set_label1
def set_label2(self, s):
"""
Set the text of ticklabel2
ACCEPTS: str
"""
self.label2.set_text(s)
self.stale = True
def _set_artist_props(self, a):
a.set_figure(self.figure)
def get_view_interval(self):
'return the view Interval instance for the axis this tick is ticking'
raise NotImplementedError('Derived must override')
def _apply_params(self, **kw):
switchkw = ['gridOn', 'tick1On', 'tick2On', 'label1On', 'label2On']
switches = [k for k in kw if k in switchkw]
for k in switches:
setattr(self, k, kw.pop(k))
newmarker = [k for k in kw if k in ['size', 'width', 'pad', 'tickdir']]
if newmarker:
self._size = kw.pop('size', self._size)
# Width could be handled outside this block, but it is
# convenient to leave it here.
self._width = kw.pop('width', self._width)
self._base_pad = kw.pop('pad', self._base_pad)
# apply_tickdir uses _size and _base_pad to make _pad,
# and also makes _tickmarkers.
self.apply_tickdir(kw.pop('tickdir', self._tickdir))
self.tick1line.set_marker(self._tickmarkers[0])
self.tick2line.set_marker(self._tickmarkers[1])
for line in (self.tick1line, self.tick2line):
line.set_markersize(self._size)
line.set_markeredgewidth(self._width)
# _get_text1_transform uses _pad from apply_tickdir.
trans = self._get_text1_transform()[0]
self.label1.set_transform(trans)
trans = self._get_text2_transform()[0]
self.label2.set_transform(trans)
tick_kw = {k: v for k, v in six.iteritems(kw)
if k in ['color', 'zorder']}
if tick_kw:
self.tick1line.set(**tick_kw)
self.tick2line.set(**tick_kw)
for k, v in six.iteritems(tick_kw):
setattr(self, '_' + k, v)
if 'labelrotation' in kw:
self._set_labelrotation(kw.pop('labelrotation'))
self.label1.set(rotation=self._labelrotation[1])
self.label2.set(rotation=self._labelrotation[1])
label_list = [k for k in six.iteritems(kw)
if k[0] in ['labelsize', 'labelcolor']]
if label_list:
label_kw = {k[5:]: v for k, v in label_list}
self.label1.set(**label_kw)
self.label2.set(**label_kw)
for k, v in six.iteritems(label_kw):
# for labelsize the text objects covert str ('small')
# -> points. grab the integer from the `Text` object
# instead of saving the string representation
v = getattr(self.label1, 'get_' + k)()
setattr(self, '_label' + k, v)
grid_list = [k for k in six.iteritems(kw)
if k[0] in _gridline_param_names]
if grid_list:
grid_kw = {k[5:]: v for k, v in grid_list}
self.gridline.set(**grid_kw)
for k, v in six.iteritems(grid_kw):
setattr(self, '_grid_' + k, v)
def update_position(self, loc):
'Set the location of tick in data coords with scalar *loc*'
raise NotImplementedError('Derived must override')
def _get_text1_transform(self):
raise NotImplementedError('Derived must override')
def _get_text2_transform(self):
raise NotImplementedError('Derived must override')
class XTick(Tick):
"""
Contains all the Artists needed to make an x tick - the tick line,
the label text and the grid line
"""
__name__ = 'xtick'
def _get_text1_transform(self):
return self.axes.get_xaxis_text1_transform(self._pad)
def _get_text2_transform(self):
return self.axes.get_xaxis_text2_transform(self._pad)
def apply_tickdir(self, tickdir):
if tickdir is None:
tickdir = rcParams['%s.direction' % self._name]
self._tickdir = tickdir
if self._tickdir == 'in':
self._tickmarkers = (mlines.TICKUP, mlines.TICKDOWN)
elif self._tickdir == 'inout':
self._tickmarkers = ('|', '|')
else:
self._tickmarkers = (mlines.TICKDOWN, mlines.TICKUP)
self._pad = self._base_pad + self.get_tick_padding()
self.stale = True
def _get_text1(self):
'Get the default Text instance'
# the y loc is 3 points below the min of y axis
# get the affine as an a,b,c,d,tx,ty list
# x in data coords, y in axes coords
trans, vert, horiz = self._get_text1_transform()
t = mtext.Text(
x=0, y=0,
fontproperties=font_manager.FontProperties(size=self._labelsize),
color=self._labelcolor,
verticalalignment=vert,
horizontalalignment=horiz,
)
t.set_transform(trans)
self._set_artist_props(t)
return t
def _get_text2(self):
'Get the default Text 2 instance'
# x in data coords, y in axes coords
trans, vert, horiz = self._get_text2_transform()
t = mtext.Text(
x=0, y=1,
fontproperties=font_manager.FontProperties(size=self._labelsize),
color=self._labelcolor,
verticalalignment=vert,
horizontalalignment=horiz,
)
t.set_transform(trans)
self._set_artist_props(t)
return t
def _get_tick1line(self):
'Get the default line2D instance'
# x in data coords, y in axes coords
l = mlines.Line2D(xdata=(0,), ydata=(0,), color=self._color,
linestyle='None', marker=self._tickmarkers[0],
markersize=self._size,
markeredgewidth=self._width, zorder=self._zorder)
l.set_transform(self.axes.get_xaxis_transform(which='tick1'))
self._set_artist_props(l)
return l
def _get_tick2line(self):
'Get the default line2D instance'
# x in data coords, y in axes coords
l = mlines.Line2D(xdata=(0,), ydata=(1,),
color=self._color,
linestyle='None',
marker=self._tickmarkers[1],
markersize=self._size,
markeredgewidth=self._width,
zorder=self._zorder)
l.set_transform(self.axes.get_xaxis_transform(which='tick2'))
self._set_artist_props(l)
return l
def _get_gridline(self):
'Get the default line2D instance'
# x in data coords, y in axes coords
l = mlines.Line2D(xdata=(0.0, 0.0), ydata=(0, 1.0),
color=self._grid_color,
linestyle=self._grid_linestyle,
linewidth=self._grid_linewidth,
alpha=self._grid_alpha,
markersize=0,
**self._grid_kw)
l.set_transform(self.axes.get_xaxis_transform(which='grid'))
l.get_path()._interpolation_steps = GRIDLINE_INTERPOLATION_STEPS
self._set_artist_props(l)
return l
def update_position(self, loc):
'Set the location of tick in data coords with scalar *loc*'
if self.tick1On:
self.tick1line.set_xdata((loc,))
if self.tick2On:
self.tick2line.set_xdata((loc,))
if self.gridOn:
self.gridline.set_xdata((loc,))
if self.label1On:
self.label1.set_x(loc)
if self.label2On:
self.label2.set_x(loc)
self._loc = loc
self.stale = True
def get_view_interval(self):
'return the Interval instance for this axis view limits'
return self.axes.viewLim.intervalx
class YTick(Tick):
"""
Contains all the Artists needed to make a Y tick - the tick line,
the label text and the grid line
"""
__name__ = 'ytick'
def _get_text1_transform(self):
return self.axes.get_yaxis_text1_transform(self._pad)
def _get_text2_transform(self):
return self.axes.get_yaxis_text2_transform(self._pad)
def apply_tickdir(self, tickdir):
if tickdir is None:
tickdir = rcParams['%s.direction' % self._name]
self._tickdir = tickdir
if self._tickdir == 'in':
self._tickmarkers = (mlines.TICKRIGHT, mlines.TICKLEFT)
elif self._tickdir == 'inout':
self._tickmarkers = ('_', '_')
else:
self._tickmarkers = (mlines.TICKLEFT, mlines.TICKRIGHT)
self._pad = self._base_pad + self.get_tick_padding()
self.stale = True
# how far from the y axis line the right of the ticklabel are
def _get_text1(self):
'Get the default Text instance'
# x in axes coords, y in data coords
trans, vert, horiz = self._get_text1_transform()
t = mtext.Text(
x=0, y=0,
fontproperties=font_manager.FontProperties(size=self._labelsize),
color=self._labelcolor,
verticalalignment=vert,
horizontalalignment=horiz,
)
t.set_transform(trans)
self._set_artist_props(t)
return t
def _get_text2(self):
'Get the default Text instance'
# x in axes coords, y in data coords
trans, vert, horiz = self._get_text2_transform()
t = mtext.Text(
x=1, y=0,
fontproperties=font_manager.FontProperties(size=self._labelsize),
color=self._labelcolor,
verticalalignment=vert,
horizontalalignment=horiz,
)
t.set_transform(trans)
self._set_artist_props(t)
return t
def _get_tick1line(self):
'Get the default line2D instance'
# x in axes coords, y in data coords
l = mlines.Line2D((0,), (0,),
color=self._color,
marker=self._tickmarkers[0],
linestyle='None',
markersize=self._size,
markeredgewidth=self._width,
zorder=self._zorder)
l.set_transform(self.axes.get_yaxis_transform(which='tick1'))
self._set_artist_props(l)
return l
def _get_tick2line(self):
'Get the default line2D instance'
# x in axes coords, y in data coords
l = mlines.Line2D((1,), (0,),
color=self._color,
marker=self._tickmarkers[1],
linestyle='None',
markersize=self._size,
markeredgewidth=self._width,
zorder=self._zorder)
l.set_transform(self.axes.get_yaxis_transform(which='tick2'))
self._set_artist_props(l)
return l
def _get_gridline(self):
'Get the default line2D instance'
# x in axes coords, y in data coords
l = mlines.Line2D(xdata=(0, 1), ydata=(0, 0),
color=self._grid_color,
linestyle=self._grid_linestyle,
linewidth=self._grid_linewidth,
alpha=self._grid_alpha,
markersize=0,
**self._grid_kw)
l.set_transform(self.axes.get_yaxis_transform(which='grid'))
l.get_path()._interpolation_steps = GRIDLINE_INTERPOLATION_STEPS
self._set_artist_props(l)
return l
def update_position(self, loc):
'Set the location of tick in data coords with scalar *loc*'
if self.tick1On:
self.tick1line.set_ydata((loc,))
if self.tick2On:
self.tick2line.set_ydata((loc,))
if self.gridOn:
self.gridline.set_ydata((loc,))
if self.label1On:
self.label1.set_y(loc)
if self.label2On:
self.label2.set_y(loc)
self._loc = loc
self.stale = True
def get_view_interval(self):
'return the Interval instance for this axis view limits'
return self.axes.viewLim.intervaly
class Ticker(object):
locator = None
formatter = None
class _LazyTickList(object):
"""
A descriptor for lazy instantiation of tick lists.
See comment above definition of the ``majorTicks`` and ``minorTicks``
attributes.
"""
def __init__(self, major):
self._major = major
def __get__(self, instance, cls):
if instance is None:
return self
else:
# instance._get_tick() can itself try to access the majorTicks
# attribute (e.g. in certain projection classes which override
# e.g. get_xaxis_text1_transform). In order to avoid infinite
# recursion, first set the majorTicks on the instance to an empty
# list, then create the tick and append it.
if self._major:
instance.majorTicks = []
tick = instance._get_tick(major=True)
instance.majorTicks.append(tick)
return instance.majorTicks
else:
instance.minorTicks = []
tick = instance._get_tick(major=False)
instance.minorTicks.append(tick)
return instance.minorTicks
class Axis(artist.Artist):
"""
Public attributes
* :attr:`axes.transData` - transform data coords to display coords
* :attr:`axes.transAxes` - transform axis coords to display coords
* :attr:`labelpad` - number of points between the axis and its label
"""
OFFSETTEXTPAD = 3
def __str__(self):
return self.__class__.__name__ \
+ "(%f,%f)" % tuple(self.axes.transAxes.transform_point((0, 0)))
def __init__(self, axes, pickradius=15):
"""
Init the axis with the parent Axes instance
"""
artist.Artist.__init__(self)
self.set_figure(axes.figure)
self.isDefault_label = True
self.axes = axes
self.major = Ticker()
self.minor = Ticker()
self.callbacks = cbook.CallbackRegistry()
self._autolabelpos = True
self._smart_bounds = False
self.label = self._get_label()
self.labelpad = rcParams['axes.labelpad']
self.offsetText = self._get_offset_text()
self.pickradius = pickradius
# Initialize here for testing; later add API
self._major_tick_kw = dict()
self._minor_tick_kw = dict()
self.cla()
self._set_scale('linear')
# During initialization, Axis objects often create ticks that are later
# unused; this turns out to be a very slow step. Instead, use a custom
# descriptor to make the tick lists lazy and instantiate them as needed.
majorTicks = _LazyTickList(major=True)
minorTicks = _LazyTickList(major=False)
def set_label_coords(self, x, y, transform=None):
"""
Set the coordinates of the label. By default, the x
coordinate of the y label is determined by the tick label
bounding boxes, but this can lead to poor alignment of
multiple ylabels if there are multiple axes. Ditto for the y
coordinate of the x label.
You can also specify the coordinate system of the label with
the transform. If None, the default coordinate system will be
the axes coordinate system (0,0) is (left,bottom), (0.5, 0.5)
is middle, etc
"""
self._autolabelpos = False
if transform is None:
transform = self.axes.transAxes
self.label.set_transform(transform)
self.label.set_position((x, y))
self.stale = True
def get_transform(self):
return self._scale.get_transform()
def get_scale(self):
return self._scale.name
def _set_scale(self, value, **kwargs):
self._scale = mscale.scale_factory(value, self, **kwargs)
self._scale.set_default_locators_and_formatters(self)
self.isDefault_majloc = True
self.isDefault_minloc = True
self.isDefault_majfmt = True
self.isDefault_minfmt = True
def limit_range_for_scale(self, vmin, vmax):
return self._scale.limit_range_for_scale(vmin, vmax, self.get_minpos())
@property
@cbook.deprecated("2.2.0")
def unit_data(self):
return self.units
@unit_data.setter
@cbook.deprecated("2.2.0")
def unit_data(self, unit_data):
self.set_units(unit_data)
def get_children(self):
children = [self.label, self.offsetText]
majorticks = self.get_major_ticks()
minorticks = self.get_minor_ticks()
children.extend(majorticks)
children.extend(minorticks)
return children
def cla(self):
'clear the current axis'
self.label.set_text('') # self.set_label_text would change isDefault_
self._set_scale('linear')
# Clear the callback registry for this axis, or it may "leak"
self.callbacks = cbook.CallbackRegistry()
# whether the grids are on
self._gridOnMajor = (rcParams['axes.grid'] and
rcParams['axes.grid.which'] in ('both', 'major'))
self._gridOnMinor = (rcParams['axes.grid'] and
rcParams['axes.grid.which'] in ('both', 'minor'))
self.reset_ticks()
self.converter = None
self.units = None
self.set_units(None)
self.stale = True
def reset_ticks(self):
"""
Re-initialize the major and minor Tick lists.
Each list starts with a single fresh Tick.
"""
# Restore the lazy tick lists.
try:
del self.majorTicks
except AttributeError:
pass
try:
del self.minorTicks
except AttributeError:
pass
try:
self.set_clip_path(self.axes.patch)
except AttributeError:
pass
def set_tick_params(self, which='major', reset=False, **kw):
"""
Set appearance parameters for ticks, ticklabels, and gridlines.
For documentation of keyword arguments, see
:meth:`matplotlib.axes.Axes.tick_params`.
"""
dicts = []
if which == 'major' or which == 'both':
dicts.append(self._major_tick_kw)
if which == 'minor' or which == 'both':
dicts.append(self._minor_tick_kw)
kwtrans = self._translate_tick_kw(kw, to_init_kw=True)
for d in dicts:
if reset:
d.clear()
d.update(kwtrans)
if reset:
self.reset_ticks()
else:
if which == 'major' or which == 'both':
for tick in self.majorTicks:
tick._apply_params(**self._major_tick_kw)
if which == 'minor' or which == 'both':
for tick in self.minorTicks:
tick._apply_params(**self._minor_tick_kw)
if 'labelcolor' in kwtrans:
self.offsetText.set_color(kwtrans['labelcolor'])
self.stale = True
@staticmethod
def _translate_tick_kw(kw, to_init_kw=True):
# The following lists may be moved to a more
# accessible location.
kwkeys0 = ['size', 'width', 'color', 'tickdir', 'pad',
'labelsize', 'labelcolor', 'zorder', 'gridOn',
'tick1On', 'tick2On', 'label1On', 'label2On']
kwkeys1 = ['length', 'direction', 'left', 'bottom', 'right', 'top',
'labelleft', 'labelbottom', 'labelright', 'labeltop',
'labelrotation']
kwkeys2 = _gridline_param_names
kwkeys = kwkeys0 + kwkeys1 + kwkeys2
kwtrans = dict()
if to_init_kw:
if 'length' in kw:
kwtrans['size'] = kw.pop('length')
if 'direction' in kw:
kwtrans['tickdir'] = kw.pop('direction')
if 'rotation' in kw:
kwtrans['labelrotation'] = kw.pop('rotation')
if 'left' in kw:
kwtrans['tick1On'] = _string_to_bool(kw.pop('left'))
if 'bottom' in kw:
kwtrans['tick1On'] = _string_to_bool(kw.pop('bottom'))
if 'right' in kw:
kwtrans['tick2On'] = _string_to_bool(kw.pop('right'))
if 'top' in kw:
kwtrans['tick2On'] = _string_to_bool(kw.pop('top'))
if 'labelleft' in kw:
kwtrans['label1On'] = _string_to_bool(kw.pop('labelleft'))
if 'labelbottom' in kw:
kwtrans['label1On'] = _string_to_bool(kw.pop('labelbottom'))
if 'labelright' in kw:
kwtrans['label2On'] = _string_to_bool(kw.pop('labelright'))
if 'labeltop' in kw:
kwtrans['label2On'] = _string_to_bool(kw.pop('labeltop'))
if 'colors' in kw:
c = kw.pop('colors')
kwtrans['color'] = c
kwtrans['labelcolor'] = c
# Maybe move the checking up to the caller of this method.
for key in kw:
if key not in kwkeys:
raise ValueError(
"keyword %s is not recognized; valid keywords are %s"
% (key, kwkeys))
kwtrans.update(kw)
else:
raise NotImplementedError("Inverse translation is deferred")
return kwtrans
def set_clip_path(self, clippath, transform=None):
artist.Artist.set_clip_path(self, clippath, transform)
for child in self.majorTicks + self.minorTicks:
child.set_clip_path(clippath, transform)
self.stale = True
def get_view_interval(self):
'return the Interval instance for this axis view limits'
raise NotImplementedError('Derived must override')
def set_view_interval(self, vmin, vmax, ignore=False):
raise NotImplementedError('Derived must override')
def get_data_interval(self):
'return the Interval instance for this axis data limits'
raise NotImplementedError('Derived must override')
def set_data_interval(self):
'''set the axis data limits'''
raise NotImplementedError('Derived must override')
def set_default_intervals(self):
'''set the default limits for the axis data and view interval if they
are not mutated'''
# this is mainly in support of custom object plotting. For
# example, if someone passes in a datetime object, we do not
# know automagically how to set the default min/max of the
# data and view limits. The unit conversion AxisInfo
# interface provides a hook for custom types to register
# default limits through the AxisInfo.default_limits
# attribute, and the derived code below will check for that
# and use it if is available (else just use 0..1)
pass
def _set_artist_props(self, a):
if a is None:
return
a.set_figure(self.figure)
def iter_ticks(self):
"""
Iterate through all of the major and minor ticks.
"""
majorLocs = self.major.locator()
majorTicks = self.get_major_ticks(len(majorLocs))
self.major.formatter.set_locs(majorLocs)
majorLabels = [self.major.formatter(val, i)
for i, val in enumerate(majorLocs)]
minorLocs = self.minor.locator()
minorTicks = self.get_minor_ticks(len(minorLocs))
self.minor.formatter.set_locs(minorLocs)
minorLabels = [self.minor.formatter(val, i)
for i, val in enumerate(minorLocs)]
major_minor = [
(majorTicks, majorLocs, majorLabels),
(minorTicks, minorLocs, minorLabels)]
for group in major_minor:
for tick in zip(*group):
yield tick
def get_ticklabel_extents(self, renderer):
"""
Get the extents of the tick labels on either side
of the axes.
"""
ticks_to_draw = self._update_ticks(renderer)
ticklabelBoxes, ticklabelBoxes2 = self._get_tick_bboxes(ticks_to_draw,
renderer)
if len(ticklabelBoxes):
bbox = mtransforms.Bbox.union(ticklabelBoxes)
else:
bbox = mtransforms.Bbox.from_extents(0, 0, 0, 0)
if len(ticklabelBoxes2):
bbox2 = mtransforms.Bbox.union(ticklabelBoxes2)
else:
bbox2 = mtransforms.Bbox.from_extents(0, 0, 0, 0)
return bbox, bbox2
def set_smart_bounds(self, value):
"""set the axis to have smart bounds"""
self._smart_bounds = value
self.stale = True
def get_smart_bounds(self):
"""get whether the axis has smart bounds"""
return self._smart_bounds
def _update_ticks(self, renderer):
"""
Update ticks (position and labels) using the current data
interval of the axes. Returns a list of ticks that will be
drawn.
"""
interval = self.get_view_interval()
tick_tups = list(self.iter_ticks()) # iter_ticks calls the locator
if self._smart_bounds and tick_tups:
# handle inverted limits
view_low, view_high = sorted(interval)
data_low, data_high = sorted(self.get_data_interval())
locs = np.sort([ti[1] for ti in tick_tups])
if data_low <= view_low:
# data extends beyond view, take view as limit
ilow = view_low
else:
# data stops within view, take best tick
good_locs = locs[locs <= data_low]
if len(good_locs):
# last tick prior or equal to first data point
ilow = good_locs[-1]
else:
# No ticks (why not?), take first tick
ilow = locs[0]
if data_high >= view_high:
# data extends beyond view, take view as limit
ihigh = view_high
else:
# data stops within view, take best tick
good_locs = locs[locs >= data_high]
if len(good_locs):
# first tick after or equal to last data point
ihigh = good_locs[0]
else:
# No ticks (why not?), take last tick
ihigh = locs[-1]
tick_tups = [ti for ti in tick_tups if ilow <= ti[1] <= ihigh]
# so that we don't lose ticks on the end, expand out the interval ever
# so slightly. The "ever so slightly" is defined to be the width of a
# half of a pixel. We don't want to draw a tick that even one pixel
# outside of the defined axis interval.
if interval[0] <= interval[1]:
interval_expanded = interval
else:
interval_expanded = interval[1], interval[0]
if hasattr(self, '_get_pixel_distance_along_axis'):
# normally, one does not want to catch all exceptions that
# could possibly happen, but it is not clear exactly what
# exceptions might arise from a user's projection (their
# rendition of the Axis object). So, we catch all, with
# the idea that one would rather potentially lose a tick
# from one side of the axis or another, rather than see a
# stack trace.
# We also catch users warnings here. These are the result of
# invalid numpy calculations that may be the result of out of
# bounds on axis with finite allowed intervals such as geo
# projections i.e. Mollweide.
with np.errstate(invalid='ignore'):
try:
ds1 = self._get_pixel_distance_along_axis(
interval_expanded[0], -0.5)
except:
warnings.warn("Unable to find pixel distance along axis "
"for interval padding of ticks; assuming no "
"interval padding needed.")
ds1 = 0.0
if np.isnan(ds1):
ds1 = 0.0
try:
ds2 = self._get_pixel_distance_along_axis(
interval_expanded[1], +0.5)
except:
warnings.warn("Unable to find pixel distance along axis "
"for interval padding of ticks; assuming no "
"interval padding needed.")
ds2 = 0.0
if np.isnan(ds2):
ds2 = 0.0
interval_expanded = (interval_expanded[0] - ds1,
interval_expanded[1] + ds2)
ticks_to_draw = []
for tick, loc, label in tick_tups:
if tick is None:
continue
# NB: always update labels and position to avoid issues like #9397
tick.update_position(loc)
tick.set_label1(label)
tick.set_label2(label)
if not mtransforms.interval_contains(interval_expanded, loc):
continue
ticks_to_draw.append(tick)
return ticks_to_draw
def _get_tick_bboxes(self, ticks, renderer):
"""
Given the list of ticks, return two lists of bboxes. One for
tick lable1's and another for tick label2's.
"""
ticklabelBoxes = []
ticklabelBoxes2 = []
for tick in ticks:
if tick.label1On and tick.label1.get_visible():
extent = tick.label1.get_window_extent(renderer)
ticklabelBoxes.append(extent)
if tick.label2On and tick.label2.get_visible():
extent = tick.label2.get_window_extent(renderer)
ticklabelBoxes2.append(extent)
return ticklabelBoxes, ticklabelBoxes2
def get_tightbbox(self, renderer):
"""
Return a bounding box that encloses the axis. It only accounts
tick labels, axis label, and offsetText.
"""
if not self.get_visible():
return
ticks_to_draw = self._update_ticks(renderer)
self._update_label_position(renderer)
# go back to just this axis's tick labels
ticklabelBoxes, ticklabelBoxes2 = self._get_tick_bboxes(
ticks_to_draw, renderer)
self._update_offset_text_position(ticklabelBoxes, ticklabelBoxes2)
self.offsetText.set_text(self.major.formatter.get_offset())
bb = []
for a in [self.label, self.offsetText]:
if a.get_visible():
bb.append(a.get_window_extent(renderer))
bb.extend(ticklabelBoxes)
bb.extend(ticklabelBoxes2)
bb = [b for b in bb if b.width != 0 or b.height != 0]
if bb:
_bbox = mtransforms.Bbox.union(bb)
return _bbox
else:
return None
def get_tick_padding(self):
values = []
if len(self.majorTicks):
values.append(self.majorTicks[0].get_tick_padding())
if len(self.minorTicks):
values.append(self.minorTicks[0].get_tick_padding())
if len(values):
return max(values)
return 0.0
@allow_rasterization
def draw(self, renderer, *args, **kwargs):
'Draw the axis lines, grid lines, tick lines and labels'
if not self.get_visible():
return
renderer.open_group(__name__)
ticks_to_draw = self._update_ticks(renderer)
ticklabelBoxes, ticklabelBoxes2 = self._get_tick_bboxes(ticks_to_draw,
renderer)
for tick in ticks_to_draw:
tick.draw(renderer)
# scale up the axis label box to also find the neighbors, not
# just the tick labels that actually overlap note we need a
# *copy* of the axis label box because we don't wan't to scale
# the actual bbox
self._update_label_position(renderer)
self.label.draw(renderer)
self._update_offset_text_position(ticklabelBoxes, ticklabelBoxes2)
self.offsetText.set_text(self.major.formatter.get_offset())
self.offsetText.draw(renderer)
if 0: # draw the bounding boxes around the text for debug
for tick in self.majorTicks:
label = tick.label1
mpatches.bbox_artist(label, renderer)
mpatches.bbox_artist(self.label, renderer)
renderer.close_group(__name__)
self.stale = False
def _get_label(self):
raise NotImplementedError('Derived must override')
def _get_offset_text(self):
raise NotImplementedError('Derived must override')
def get_gridlines(self):
'Return the grid lines as a list of Line2D instance'
ticks = self.get_major_ticks()
return cbook.silent_list('Line2D gridline',
[tick.gridline for tick in ticks])
def get_label(self):
'Return the axis label as a Text instance'
return self.label
def get_offset_text(self):
'Return the axis offsetText as a Text instance'
return self.offsetText
def get_pickradius(self):
'Return the depth of the axis used by the picker'
return self.pickradius
def get_majorticklabels(self):
'Return a list of Text instances for the major ticklabels'
ticks = self.get_major_ticks()
labels1 = [tick.label1 for tick in ticks if tick.label1On]
labels2 = [tick.label2 for tick in ticks if tick.label2On]
return cbook.silent_list('Text major ticklabel', labels1 + labels2)
def get_minorticklabels(self):
'Return a list of Text instances for the minor ticklabels'
ticks = self.get_minor_ticks()
labels1 = [tick.label1 for tick in ticks if tick.label1On]
labels2 = [tick.label2 for tick in ticks if tick.label2On]
return cbook.silent_list('Text minor ticklabel', labels1 + labels2)
def get_ticklabels(self, minor=False, which=None):
"""
Get the x tick labels as a list of :class:`~matplotlib.text.Text`
instances.
Parameters
----------
minor : bool
If True return the minor ticklabels,
else return the major ticklabels
which : None, ('minor', 'major', 'both')
Overrides `minor`.
Selects which ticklabels to return
Returns
-------
ret : list
List of :class:`~matplotlib.text.Text` instances.
"""
if which is not None:
if which == 'minor':
return self.get_minorticklabels()
elif which == 'major':
return self.get_majorticklabels()
elif which == 'both':
return self.get_majorticklabels() + self.get_minorticklabels()
else:
raise ValueError("`which` must be one of ('minor', 'major', "
"'both') not " + str(which))
if minor:
return self.get_minorticklabels()
return self.get_majorticklabels()
def get_majorticklines(self):
'Return the major tick lines as a list of Line2D instances'
lines = []
ticks = self.get_major_ticks()
for tick in ticks:
lines.append(tick.tick1line)
lines.append(tick.tick2line)
return cbook.silent_list('Line2D ticklines', lines)
def get_minorticklines(self):
'Return the minor tick lines as a list of Line2D instances'
lines = []
ticks = self.get_minor_ticks()
for tick in ticks:
lines.append(tick.tick1line)
lines.append(tick.tick2line)
return cbook.silent_list('Line2D ticklines', lines)
def get_ticklines(self, minor=False):
'Return the tick lines as a list of Line2D instances'
if minor:
return self.get_minorticklines()
return self.get_majorticklines()
def get_majorticklocs(self):
"Get the major tick locations in data coordinates as a numpy array"
return self.major.locator()
def get_minorticklocs(self):
"Get the minor tick locations in data coordinates as a numpy array"
return self.minor.locator()
def get_ticklocs(self, minor=False):
"Get the tick locations in data coordinates as a numpy array"
if minor:
return self.minor.locator()
return self.major.locator()
def get_ticks_direction(self, minor=False):
"""
Get the tick directions as a numpy array
Parameters
----------
minor : boolean
True to return the minor tick directions,
False to return the major tick directions,
Default is False
Returns
-------
numpy array of tick directions
"""
if minor:
return np.array(
[tick._tickdir for tick in self.get_minor_ticks()])
else:
return np.array(
[tick._tickdir for tick in self.get_major_ticks()])
def _get_tick(self, major):
'return the default tick instance'
raise NotImplementedError('derived must override')
def _copy_tick_props(self, src, dest):
'Copy the props from src tick to dest tick'
if src is None or dest is None:
return
dest.label1.update_from(src.label1)
dest.label2.update_from(src.label2)
dest.tick1line.update_from(src.tick1line)
dest.tick2line.update_from(src.tick2line)
dest.gridline.update_from(src.gridline)
dest.tick1On = src.tick1On
dest.tick2On = src.tick2On
dest.label1On = src.label1On
dest.label2On = src.label2On
def get_label_text(self):
'Get the text of the label'
return self.label.get_text()
def get_major_locator(self):
'Get the locator of the major ticker'
return self.major.locator
def get_minor_locator(self):
'Get the locator of the minor ticker'
return self.minor.locator
def get_major_formatter(self):
'Get the formatter of the major ticker'
return self.major.formatter
def get_minor_formatter(self):
'Get the formatter of the minor ticker'
return self.minor.formatter
def get_major_ticks(self, numticks=None):
'get the tick instances; grow as necessary'
if numticks is None:
numticks = len(self.get_major_locator()())
while len(self.majorTicks) < numticks:
# update the new tick label properties from the old
tick = self._get_tick(major=True)
self.majorTicks.append(tick)
if self._gridOnMajor:
tick.gridOn = True
self._copy_tick_props(self.majorTicks[0], tick)
return self.majorTicks[:numticks]
def get_minor_ticks(self, numticks=None):
'get the minor tick instances; grow as necessary'
if numticks is None:
numticks = len(self.get_minor_locator()())
while len(self.minorTicks) < numticks:
# update the new tick label properties from the old
tick = self._get_tick(major=False)
self.minorTicks.append(tick)
if self._gridOnMinor:
tick.gridOn = True
self._copy_tick_props(self.minorTicks[0], tick)
return self.minorTicks[:numticks]
def grid(self, b=None, which='major', **kwargs):
"""
Set the axis grid on or off; b is a boolean. Use *which* =
'major' | 'minor' | 'both' to set the grid for major or minor ticks.
If *b* is *None* and len(kwargs)==0, toggle the grid state. If
*kwargs* are supplied, it is assumed you want the grid on and *b*
will be set to True.
*kwargs* are used to set the line properties of the grids, e.g.,
xax.grid(color='r', linestyle='-', linewidth=2)
"""
if len(kwargs):
b = True
which = which.lower()
gridkw = {'grid_' + item[0]: item[1] for item in kwargs.items()}
if which in ['minor', 'both']:
if b is None:
self._gridOnMinor = not self._gridOnMinor
else:
self._gridOnMinor = b
self.set_tick_params(which='minor', gridOn=self._gridOnMinor,
**gridkw)
if which in ['major', 'both']:
if b is None:
self._gridOnMajor = not self._gridOnMajor
else:
self._gridOnMajor = b
self.set_tick_params(which='major', gridOn=self._gridOnMajor,
**gridkw)
self.stale = True
def update_units(self, data):
"""
introspect *data* for units converter and update the
axis.converter instance if necessary. Return *True*
if *data* is registered for unit conversion.
"""
converter = munits.registry.get_converter(data)
if converter is None:
return False
neednew = self.converter != converter
self.converter = converter
default = self.converter.default_units(data, self)
if default is not None and self.units is None:
self.set_units(default)
if neednew:
self._update_axisinfo()
self.stale = True
return True
def _update_axisinfo(self):
"""
check the axis converter for the stored units to see if the
axis info needs to be updated
"""
if self.converter is None:
return
info = self.converter.axisinfo(self.units, self)
if info is None:
return
if info.majloc is not None and \
self.major.locator != info.majloc and self.isDefault_majloc:
self.set_major_locator(info.majloc)
self.isDefault_majloc = True
if info.minloc is not None and \
self.minor.locator != info.minloc and self.isDefault_minloc:
self.set_minor_locator(info.minloc)
self.isDefault_minloc = True
if info.majfmt is not None and \
self.major.formatter != info.majfmt and self.isDefault_majfmt:
self.set_major_formatter(info.majfmt)
self.isDefault_majfmt = True
if info.minfmt is not None and \
self.minor.formatter != info.minfmt and self.isDefault_minfmt:
self.set_minor_formatter(info.minfmt)
self.isDefault_minfmt = True
if info.label is not None and self.isDefault_label:
self.set_label_text(info.label)
self.isDefault_label = True
self.set_default_intervals()
def have_units(self):
return self.converter is not None or self.units is not None
def convert_units(self, x):
# If x is already a number, doesn't need converting
if munits.ConversionInterface.is_numlike(x):
return x
if self.converter is None:
self.converter = munits.registry.get_converter(x)
if self.converter is None:
return x
ret = self.converter.convert(x, self.units, self)
return ret
def set_units(self, u):
"""
set the units for axis
ACCEPTS: a units tag
"""
pchanged = False
if u is None:
self.units = None
pchanged = True
else:
if u != self.units:
self.units = u
pchanged = True
if pchanged:
self._update_axisinfo()
self.callbacks.process('units')
self.callbacks.process('units finalize')
self.stale = True
def get_units(self):
'return the units for axis'
return self.units
def set_label_text(self, label, fontdict=None, **kwargs):
""" Sets the text value of the axis label
ACCEPTS: A string value for the label
"""
self.isDefault_label = False
self.label.set_text(label)
if fontdict is not None:
self.label.update(fontdict)
self.label.update(kwargs)
self.stale = True
return self.label
def set_major_formatter(self, formatter):
"""
Set the formatter of the major ticker
ACCEPTS: A :class:`~matplotlib.ticker.Formatter` instance
"""
self.isDefault_majfmt = False
self.major.formatter = formatter
formatter.set_axis(self)
self.stale = True
def set_minor_formatter(self, formatter):
"""
Set the formatter of the minor ticker
ACCEPTS: A :class:`~matplotlib.ticker.Formatter` instance
"""
self.isDefault_minfmt = False
self.minor.formatter = formatter
formatter.set_axis(self)
self.stale = True
def set_major_locator(self, locator):
"""
Set the locator of the major ticker
ACCEPTS: a :class:`~matplotlib.ticker.Locator` instance
"""
self.isDefault_majloc = False
self.major.locator = locator
locator.set_axis(self)
self.stale = True
def set_minor_locator(self, locator):
"""
Set the locator of the minor ticker
ACCEPTS: a :class:`~matplotlib.ticker.Locator` instance
"""
self.isDefault_minloc = False
self.minor.locator = locator
locator.set_axis(self)
self.stale = True
def set_pickradius(self, pickradius):
"""
Set the depth of the axis used by the picker
ACCEPTS: a distance in points
"""
self.pickradius = pickradius
def set_ticklabels(self, ticklabels, *args, **kwargs):
"""
Set the text values of the tick labels. Return a list of Text
instances. Use *kwarg* *minor=True* to select minor ticks.
All other kwargs are used to update the text object properties.
As for get_ticklabels, label1 (left or bottom) is
affected for a given tick only if its label1On attribute
is True, and similarly for label2. The list of returned
label text objects consists of all such label1 objects followed
by all such label2 objects.
The input *ticklabels* is assumed to match the set of
tick locations, regardless of the state of label1On and
label2On.
ACCEPTS: sequence of strings or Text objects
"""
get_labels = []
for t in ticklabels:
# try calling get_text() to check whether it is Text object
# if it is Text, get label content
try:
get_labels.append(t.get_text())
# otherwise add the label to the list directly
except AttributeError:
get_labels.append(t)
# replace the ticklabels list with the processed one
ticklabels = get_labels
minor = kwargs.pop('minor', False)
if minor:
self.set_minor_formatter(mticker.FixedFormatter(ticklabels))
ticks = self.get_minor_ticks()
else:
self.set_major_formatter(mticker.FixedFormatter(ticklabels))
ticks = self.get_major_ticks()
ret = []
for tick_label, tick in zip(ticklabels, ticks):
# deal with label1
tick.label1.set_text(tick_label)
tick.label1.update(kwargs)
# deal with label2
tick.label2.set_text(tick_label)
tick.label2.update(kwargs)
# only return visible tick labels
if tick.label1On:
ret.append(tick.label1)
if tick.label2On:
ret.append(tick.label2)
self.stale = True
return ret
def set_ticks(self, ticks, minor=False):
"""
Set the locations of the tick marks from sequence ticks
ACCEPTS: sequence of floats
"""
# XXX if the user changes units, the information will be lost here
ticks = self.convert_units(ticks)
if len(ticks) > 1:
xleft, xright = self.get_view_interval()
if xright > xleft:
self.set_view_interval(min(ticks), max(ticks))
else:
self.set_view_interval(max(ticks), min(ticks))
if minor:
self.set_minor_locator(mticker.FixedLocator(ticks))
return self.get_minor_ticks(len(ticks))
else:
self.set_major_locator(mticker.FixedLocator(ticks))
return self.get_major_ticks(len(ticks))
def _get_tick_boxes_siblings(self, xdir, renderer):
"""
Get the bounding boxes for this `.axis` and its siblings
as set by `.Figure.align_xlabels` or `.Figure.align_ylablels`.
By default it just gets bboxes for self.
"""
raise NotImplementedError('Derived must override')
def _update_label_position(self, renderer):
"""
Update the label position based on the bounding box enclosing
all the ticklabels and axis spine
"""
raise NotImplementedError('Derived must override')
def _update_offset_text_position(self, bboxes, bboxes2):
"""
Update the label position based on the sequence of bounding
boxes of all the ticklabels
"""
raise NotImplementedError('Derived must override')
def pan(self, numsteps):
'Pan *numsteps* (can be positive or negative)'
self.major.locator.pan(numsteps)
def zoom(self, direction):
"Zoom in/out on axis; if *direction* is >0 zoom in, else zoom out"
self.major.locator.zoom(direction)
def axis_date(self, tz=None):
"""
Sets up x-axis ticks and labels that treat the x data as dates.
*tz* is a :class:`tzinfo` instance or a timezone string.
This timezone is used to create date labels.
"""
# By providing a sample datetime instance with the desired
# timezone, the registered converter can be selected,
# and the "units" attribute, which is the timezone, can
# be set.
import datetime
if isinstance(tz, six.string_types):
import pytz
tz = pytz.timezone(tz)
self.update_units(datetime.datetime(2009, 1, 1, 0, 0, 0, 0, tz))
def get_tick_space(self):
"""
Return the estimated number of ticks that can fit on the axis.
"""
# Must be overridden in the subclass
raise NotImplementedError()
def get_label_position(self):
"""
Return the label position (top or bottom)
"""
return self.label_position
def set_label_position(self, position):
"""
Set the label position (top or bottom)
ACCEPTS: [ 'top' | 'bottom' ]
"""
raise NotImplementedError()
def get_minpos(self):
raise NotImplementedError()
class XAxis(Axis):
__name__ = 'xaxis'
axis_name = 'x'
def contains(self, mouseevent):
"""Test whether the mouse event occurred in the x axis.
"""
if callable(self._contains):
return self._contains(self, mouseevent)
x, y = mouseevent.x, mouseevent.y
try:
trans = self.axes.transAxes.inverted()
xaxes, yaxes = trans.transform_point((x, y))
except ValueError:
return False, {}
l, b = self.axes.transAxes.transform_point((0, 0))
r, t = self.axes.transAxes.transform_point((1, 1))
inaxis = xaxes >= 0 and xaxes <= 1 and (
(y < b and y > b - self.pickradius) or
(y > t and y < t + self.pickradius))
return inaxis, {}
def _get_tick(self, major):
if major:
tick_kw = self._major_tick_kw
else:
tick_kw = self._minor_tick_kw
return XTick(self.axes, 0, '', major=major, **tick_kw)
def _get_label(self):
# x in axes coords, y in display coords (to be updated at draw
# time by _update_label_positions)
label = mtext.Text(x=0.5, y=0,
fontproperties=font_manager.FontProperties(
size=rcParams['axes.labelsize'],
weight=rcParams['axes.labelweight']),
color=rcParams['axes.labelcolor'],
verticalalignment='top',
horizontalalignment='center')
label.set_transform(mtransforms.blended_transform_factory(
self.axes.transAxes, mtransforms.IdentityTransform()))
self._set_artist_props(label)
self.label_position = 'bottom'
return label
def _get_offset_text(self):
# x in axes coords, y in display coords (to be updated at draw time)
offsetText = mtext.Text(x=1, y=0,
fontproperties=font_manager.FontProperties(
size=rcParams['xtick.labelsize']),
color=rcParams['xtick.color'],
verticalalignment='top',
horizontalalignment='right')
offsetText.set_transform(mtransforms.blended_transform_factory(
self.axes.transAxes, mtransforms.IdentityTransform())
)
self._set_artist_props(offsetText)
self.offset_text_position = 'bottom'
return offsetText
def _get_pixel_distance_along_axis(self, where, perturb):
"""
Returns the amount, in data coordinates, that a single pixel
corresponds to in the locality given by "where", which is also given
in data coordinates, and is an x coordinate. "perturb" is the amount
to perturb the pixel. Usually +0.5 or -0.5.
Implementing this routine for an axis is optional; if present, it will
ensure that no ticks are lost due to round-off at the extreme ends of
an axis.
"""
# Note that this routine does not work for a polar axis, because of
# the 1e-10 below. To do things correctly, we need to use rmax
# instead of 1e-10 for a polar axis. But since we do not have that
# kind of information at this point, we just don't try to pad anything
# for the theta axis of a polar plot.
if self.axes.name == 'polar':
return 0.0
#
# first figure out the pixel location of the "where" point. We use
# 1e-10 for the y point, so that we remain compatible with log axes.
# transformation from data coords to display coords
trans = self.axes.transData
# transformation from display coords to data coords
transinv = trans.inverted()
pix = trans.transform_point((where, 1e-10))
# perturb the pixel
ptp = transinv.transform_point((pix[0] + perturb, pix[1]))
dx = abs(ptp[0] - where)
return dx
def set_label_position(self, position):
"""
Set the label position (top or bottom)
ACCEPTS: [ 'top' | 'bottom' ]
"""
if position == 'top':
self.label.set_verticalalignment('baseline')
elif position == 'bottom':
self.label.set_verticalalignment('top')
else:
raise ValueError("Position accepts only 'top' or 'bottom'")
self.label_position = position
self.stale = True
def _get_tick_boxes_siblings(self, renderer):
"""
Get the bounding boxes for this `.axis` and its siblings
as set by `.Figure.align_xlabels` or `.Figure.align_ylablels`.
By default it just gets bboxes for self.
"""
bboxes = []
bboxes2 = []
# get the Grouper that keeps track of x-label groups for this figure
grp = self.figure._align_xlabel_grp
# if we want to align labels from other axes:
for nn, axx in enumerate(grp.get_siblings(self.axes)):
ticks_to_draw = axx.xaxis._update_ticks(renderer)
tlb, tlb2 = axx.xaxis._get_tick_bboxes(ticks_to_draw, renderer)
bboxes.extend(tlb)
bboxes2.extend(tlb2)
return bboxes, bboxes2
def _update_label_position(self, renderer):
"""
Update the label position based on the bounding box enclosing
all the ticklabels and axis spine
"""
if not self._autolabelpos:
return
# get bounding boxes for this axis and any siblings
# that have been set by `fig.align_xlabels()`
bboxes, bboxes2 = self._get_tick_boxes_siblings(renderer=renderer)
x, y = self.label.get_position()
if self.label_position == 'bottom':
try:
spine = self.axes.spines['bottom']
spinebbox = spine.get_transform().transform_path(
spine.get_path()).get_extents()
except KeyError:
# use axes if spine doesn't exist
spinebbox = self.axes.bbox
bbox = mtransforms.Bbox.union(bboxes + [spinebbox])
bottom = bbox.y0
self.label.set_position(
(x, bottom - self.labelpad * self.figure.dpi / 72.0)
)
else:
try:
spine = self.axes.spines['top']
spinebbox = spine.get_transform().transform_path(
spine.get_path()).get_extents()
except KeyError:
# use axes if spine doesn't exist
spinebbox = self.axes.bbox
bbox = mtransforms.Bbox.union(bboxes2 + [spinebbox])
top = bbox.y1
self.label.set_position(
(x, top + self.labelpad * self.figure.dpi / 72.0)
)
def _update_offset_text_position(self, bboxes, bboxes2):
"""
Update the offset_text position based on the sequence of bounding
boxes of all the ticklabels
"""
x, y = self.offsetText.get_position()
if not len(bboxes):
bottom = self.axes.bbox.ymin
else:
bbox = mtransforms.Bbox.union(bboxes)
bottom = bbox.y0
self.offsetText.set_position(
(x, bottom - self.OFFSETTEXTPAD * self.figure.dpi / 72.0)
)
def get_text_heights(self, renderer):
"""
Returns the amount of space one should reserve for text
above and below the axes. Returns a tuple (above, below)
"""
bbox, bbox2 = self.get_ticklabel_extents(renderer)
# MGDTODO: Need a better way to get the pad
padPixels = self.majorTicks[0].get_pad_pixels()
above = 0.0
if bbox2.height:
above += bbox2.height + padPixels
below = 0.0
if bbox.height:
below += bbox.height + padPixels
if self.get_label_position() == 'top':
above += self.label.get_window_extent(renderer).height + padPixels
else:
below += self.label.get_window_extent(renderer).height + padPixels
return above, below
def set_ticks_position(self, position):
"""
Set the ticks position (top, bottom, both, default or none)
both sets the ticks to appear on both positions, but does not
change the tick labels. 'default' resets the tick positions to
the default: ticks on both positions, labels at bottom. 'none'
can be used if you don't want any ticks. 'none' and 'both'
affect only the ticks, not the labels.
ACCEPTS: [ 'top' | 'bottom' | 'both' | 'default' | 'none' ]
"""
if position == 'top':
self.set_tick_params(which='both', top=True, labeltop=True,
bottom=False, labelbottom=False)
elif position == 'bottom':
self.set_tick_params(which='both', top=False, labeltop=False,
bottom=True, labelbottom=True)
elif position == 'both':
self.set_tick_params(which='both', top=True,
bottom=True)
elif position == 'none':
self.set_tick_params(which='both', top=False,
bottom=False)
elif position == 'default':
self.set_tick_params(which='both', top=True, labeltop=False,
bottom=True, labelbottom=True)
else:
raise ValueError("invalid position: %s" % position)
self.stale = True
def tick_top(self):
"""
Move ticks and ticklabels (if present) to the top of the axes.
"""
label = True
if 'label1On' in self._major_tick_kw:
label = (self._major_tick_kw['label1On']
or self._major_tick_kw['label2On'])
self.set_ticks_position('top')
# if labels were turned off before this was called
# leave them off
self.set_tick_params(which='both', labeltop=label)
def tick_bottom(self):
"""
Move ticks and ticklabels (if present) to the bottom of the axes.
"""
label = True
if 'label1On' in self._major_tick_kw:
label = (self._major_tick_kw['label1On']
or self._major_tick_kw['label2On'])
self.set_ticks_position('bottom')
# if labels were turned off before this was called
# leave them off
self.set_tick_params(which='both', labelbottom=label)
def get_ticks_position(self):
"""
Return the ticks position (top, bottom, default or unknown)
"""
majt = self.majorTicks[0]
mT = self.minorTicks[0]
majorTop = ((not majt.tick1On) and majt.tick2On and
(not majt.label1On) and majt.label2On)
minorTop = ((not mT.tick1On) and mT.tick2On and
(not mT.label1On) and mT.label2On)
if majorTop and minorTop:
return 'top'
MajorBottom = (majt.tick1On and (not majt.tick2On) and
majt.label1On and (not majt.label2On))
MinorBottom = (mT.tick1On and (not mT.tick2On) and
mT.label1On and (not mT.label2On))
if MajorBottom and MinorBottom:
return 'bottom'
majorDefault = (majt.tick1On and majt.tick2On and
majt.label1On and (not majt.label2On))
minorDefault = (mT.tick1On and mT.tick2On and
mT.label1On and (not mT.label2On))
if majorDefault and minorDefault:
return 'default'
return 'unknown'
def get_view_interval(self):
'return the Interval instance for this axis view limits'
return self.axes.viewLim.intervalx
def set_view_interval(self, vmin, vmax, ignore=False):
"""
If *ignore* is *False*, the order of vmin, vmax
does not matter; the original axis orientation will
be preserved. In addition, the view limits can be
expanded, but will not be reduced. This method is
for mpl internal use; for normal use, see
:meth:`~matplotlib.axes.Axes.set_xlim`.
"""
if ignore:
self.axes.viewLim.intervalx = vmin, vmax
else:
Vmin, Vmax = self.get_view_interval()
if Vmin < Vmax:
self.axes.viewLim.intervalx = (min(vmin, vmax, Vmin),
max(vmin, vmax, Vmax))
else:
self.axes.viewLim.intervalx = (max(vmin, vmax, Vmin),
min(vmin, vmax, Vmax))
def get_minpos(self):
return self.axes.dataLim.minposx
def get_data_interval(self):
'return the Interval instance for this axis data limits'
return self.axes.dataLim.intervalx
def set_data_interval(self, vmin, vmax, ignore=False):
'set the axis data limits'
if ignore:
self.axes.dataLim.intervalx = vmin, vmax
else:
Vmin, Vmax = self.get_data_interval()
self.axes.dataLim.intervalx = min(vmin, Vmin), max(vmax, Vmax)
self.stale = True
def set_default_intervals(self):
'set the default limits for the axis interval if they are not mutated'
xmin, xmax = 0., 1.
dataMutated = self.axes.dataLim.mutatedx()
viewMutated = self.axes.viewLim.mutatedx()
if not dataMutated or not viewMutated:
if self.converter is not None:
info = self.converter.axisinfo(self.units, self)
if info.default_limits is not None:
valmin, valmax = info.default_limits
xmin = self.converter.convert(valmin, self.units, self)
xmax = self.converter.convert(valmax, self.units, self)
if not dataMutated:
self.axes.dataLim.intervalx = xmin, xmax
if not viewMutated:
self.axes.viewLim.intervalx = xmin, xmax
self.stale = True
def get_tick_space(self):
ends = self.axes.transAxes.transform([[0, 0], [1, 0]])
length = ((ends[1][0] - ends[0][0]) / self.axes.figure.dpi) * 72.0
tick = self._get_tick(True)
# There is a heuristic here that the aspect ratio of tick text
# is no more than 3:1
size = tick.label1.get_size() * 3
if size > 0:
return int(np.floor(length / size))
else:
return 2**31 - 1
class YAxis(Axis):
__name__ = 'yaxis'
axis_name = 'y'
def contains(self, mouseevent):
"""Test whether the mouse event occurred in the y axis.
Returns *True* | *False*
"""
if callable(self._contains):
return self._contains(self, mouseevent)
x, y = mouseevent.x, mouseevent.y
try:
trans = self.axes.transAxes.inverted()
xaxes, yaxes = trans.transform_point((x, y))
except ValueError:
return False, {}
l, b = self.axes.transAxes.transform_point((0, 0))
r, t = self.axes.transAxes.transform_point((1, 1))
inaxis = yaxes >= 0 and yaxes <= 1 and (
(x < l and x > l - self.pickradius) or
(x > r and x < r + self.pickradius))
return inaxis, {}
def _get_tick(self, major):
if major:
tick_kw = self._major_tick_kw
else:
tick_kw = self._minor_tick_kw
return YTick(self.axes, 0, '', major=major, **tick_kw)
def _get_label(self):
# x in display coords (updated by _update_label_position)
# y in axes coords
label = mtext.Text(x=0, y=0.5,
# todo: get the label position
fontproperties=font_manager.FontProperties(
size=rcParams['axes.labelsize'],
weight=rcParams['axes.labelweight']),
color=rcParams['axes.labelcolor'],
verticalalignment='bottom',
horizontalalignment='center',
rotation='vertical',
rotation_mode='anchor')
label.set_transform(mtransforms.blended_transform_factory(
mtransforms.IdentityTransform(), self.axes.transAxes))
self._set_artist_props(label)
self.label_position = 'left'
return label
def _get_offset_text(self):
# x in display coords, y in axes coords (to be updated at draw time)
offsetText = mtext.Text(x=0, y=0.5,
fontproperties=font_manager.FontProperties(
size=rcParams['ytick.labelsize']
),
color=rcParams['ytick.color'],
verticalalignment='baseline',
horizontalalignment='left')
offsetText.set_transform(mtransforms.blended_transform_factory(
self.axes.transAxes, mtransforms.IdentityTransform())
)
self._set_artist_props(offsetText)
self.offset_text_position = 'left'
return offsetText
def _get_pixel_distance_along_axis(self, where, perturb):
"""
Returns the amount, in data coordinates, that a single pixel
corresponds to in the locality given by *where*, which is also given
in data coordinates, and is a y coordinate.
*perturb* is the amount to perturb the pixel. Usually +0.5 or -0.5.
Implementing this routine for an axis is optional; if present, it will
ensure that no ticks are lost due to round-off at the extreme ends of
an axis.
"""
#
# first figure out the pixel location of the "where" point. We use
# 1e-10 for the x point, so that we remain compatible with log axes.
# transformation from data coords to display coords
trans = self.axes.transData
# transformation from display coords to data coords
transinv = trans.inverted()
pix = trans.transform_point((1e-10, where))
# perturb the pixel
ptp = transinv.transform_point((pix[0], pix[1] + perturb))
dy = abs(ptp[1] - where)
return dy
def set_label_position(self, position):
"""
Set the label position (left or right)
ACCEPTS: [ 'left' | 'right' ]
"""
self.label.set_rotation_mode('anchor')
self.label.set_horizontalalignment('center')
if position == 'left':
self.label.set_verticalalignment('bottom')
elif position == 'right':
self.label.set_verticalalignment('top')
else:
raise ValueError("Position accepts only 'left' or 'right'")
self.label_position = position
self.stale = True
def _get_tick_boxes_siblings(self, renderer):
"""
Get the bounding boxes for this `.axis` and its siblings
as set by `.Figure.align_xlabels` or `.Figure.align_ylablels`.
By default it just gets bboxes for self.
"""
bboxes = []
bboxes2 = []
# get the Grouper that keeps track of y-label groups for this figure
grp = self.figure._align_ylabel_grp
# if we want to align labels from other axes:
for axx in grp.get_siblings(self.axes):
ticks_to_draw = axx.yaxis._update_ticks(renderer)
tlb, tlb2 = axx.yaxis._get_tick_bboxes(ticks_to_draw, renderer)
bboxes.extend(tlb)
bboxes2.extend(tlb2)
return bboxes, bboxes2
def _update_label_position(self, renderer):
"""
Update the label position based on the bounding box enclosing
all the ticklabels and axis spine
"""
if not self._autolabelpos:
return
# get bounding boxes for this axis and any siblings
# that have been set by `fig.align_ylabels()`
bboxes, bboxes2 = self._get_tick_boxes_siblings(renderer=renderer)
x, y = self.label.get_position()
if self.label_position == 'left':
try:
spine = self.axes.spines['left']
spinebbox = spine.get_transform().transform_path(
spine.get_path()).get_extents()
except KeyError:
# use axes if spine doesn't exist
spinebbox = self.axes.bbox
bbox = mtransforms.Bbox.union(bboxes + [spinebbox])
left = bbox.x0
self.label.set_position(
(left - self.labelpad * self.figure.dpi / 72.0, y)
)
else:
try:
spine = self.axes.spines['right']
spinebbox = spine.get_transform().transform_path(
spine.get_path()).get_extents()
except KeyError:
# use axes if spine doesn't exist
spinebbox = self.axes.bbox
bbox = mtransforms.Bbox.union(bboxes2 + [spinebbox])
right = bbox.x1
self.label.set_position(
(right + self.labelpad * self.figure.dpi / 72.0, y)
)
def _update_offset_text_position(self, bboxes, bboxes2):
"""
Update the offset_text position based on the sequence of bounding
boxes of all the ticklabels
"""
x, y = self.offsetText.get_position()
top = self.axes.bbox.ymax
self.offsetText.set_position(
(x, top + self.OFFSETTEXTPAD * self.figure.dpi / 72.0)
)
def set_offset_position(self, position):
"""
.. ACCEPTS: [ 'left' | 'right' ]
"""
x, y = self.offsetText.get_position()
if position == 'left':
x = 0
elif position == 'right':
x = 1
else:
raise ValueError("Position accepts only [ 'left' | 'right' ]")
self.offsetText.set_ha(position)
self.offsetText.set_position((x, y))
self.stale = True
def get_text_widths(self, renderer):
bbox, bbox2 = self.get_ticklabel_extents(renderer)
# MGDTODO: Need a better way to get the pad
padPixels = self.majorTicks[0].get_pad_pixels()
left = 0.0
if bbox.width:
left += bbox.width + padPixels
right = 0.0
if bbox2.width:
right += bbox2.width + padPixels
if self.get_label_position() == 'left':
left += self.label.get_window_extent(renderer).width + padPixels
else:
right += self.label.get_window_extent(renderer).width + padPixels
return left, right
def set_ticks_position(self, position):
"""
Set the ticks position (left, right, both, default or none)
'both' sets the ticks to appear on both positions, but does not
change the tick labels. 'default' resets the tick positions to
the default: ticks on both positions, labels at left. 'none'
can be used if you don't want any ticks. 'none' and 'both'
affect only the ticks, not the labels.
ACCEPTS: [ 'left' | 'right' | 'both' | 'default' | 'none' ]
"""
if position == 'right':
self.set_tick_params(which='both', right=True, labelright=True,
left=False, labelleft=False)
self.set_offset_position(position)
elif position == 'left':
self.set_tick_params(which='both', right=False, labelright=False,
left=True, labelleft=True)
self.set_offset_position(position)
elif position == 'both':
self.set_tick_params(which='both', right=True,
left=True)
elif position == 'none':
self.set_tick_params(which='both', right=False,
left=False)
elif position == 'default':
self.set_tick_params(which='both', right=True, labelright=False,
left=True, labelleft=True)
else:
raise ValueError("invalid position: %s" % position)
self.stale = True
def tick_right(self):
"""
Move ticks and ticklabels (if present) to the right of the axes.
"""
label = True
if 'label1On' in self._major_tick_kw:
label = (self._major_tick_kw['label1On']
or self._major_tick_kw['label2On'])
self.set_ticks_position('right')
# if labels were turned off before this was called
# leave them off
self.set_tick_params(which='both', labelright=label)
def tick_left(self):
"""
Move ticks and ticklabels (if present) to the left of the axes.
"""
label = True
if 'label1On' in self._major_tick_kw:
label = (self._major_tick_kw['label1On']
or self._major_tick_kw['label2On'])
self.set_ticks_position('left')
# if labels were turned off before this was called
# leave them off
self.set_tick_params(which='both', labelleft=label)
def get_ticks_position(self):
"""
Return the ticks position (left, right, both or unknown)
"""
majt = self.majorTicks[0]
mT = self.minorTicks[0]
majorRight = ((not majt.tick1On) and majt.tick2On and
(not majt.label1On) and majt.label2On)
minorRight = ((not mT.tick1On) and mT.tick2On and
(not mT.label1On) and mT.label2On)
if majorRight and minorRight:
return 'right'
majorLeft = (majt.tick1On and (not majt.tick2On) and
majt.label1On and (not majt.label2On))
minorLeft = (mT.tick1On and (not mT.tick2On) and
mT.label1On and (not mT.label2On))
if majorLeft and minorLeft:
return 'left'
majorDefault = (majt.tick1On and majt.tick2On and
majt.label1On and (not majt.label2On))
minorDefault = (mT.tick1On and mT.tick2On and
mT.label1On and (not mT.label2On))
if majorDefault and minorDefault:
return 'default'
return 'unknown'
def get_view_interval(self):
'return the Interval instance for this axis view limits'
return self.axes.viewLim.intervaly
def set_view_interval(self, vmin, vmax, ignore=False):
"""
If *ignore* is *False*, the order of vmin, vmax
does not matter; the original axis orientation will
be preserved. In addition, the view limits can be
expanded, but will not be reduced. This method is
for mpl internal use; for normal use, see
:meth:`~matplotlib.axes.Axes.set_ylim`.
"""
if ignore:
self.axes.viewLim.intervaly = vmin, vmax
else:
Vmin, Vmax = self.get_view_interval()
if Vmin < Vmax:
self.axes.viewLim.intervaly = (min(vmin, vmax, Vmin),
max(vmin, vmax, Vmax))
else:
self.axes.viewLim.intervaly = (max(vmin, vmax, Vmin),
min(vmin, vmax, Vmax))
self.stale = True
def get_minpos(self):
return self.axes.dataLim.minposy
def get_data_interval(self):
'return the Interval instance for this axis data limits'
return self.axes.dataLim.intervaly
def set_data_interval(self, vmin, vmax, ignore=False):
'set the axis data limits'
if ignore:
self.axes.dataLim.intervaly = vmin, vmax
else:
Vmin, Vmax = self.get_data_interval()
self.axes.dataLim.intervaly = min(vmin, Vmin), max(vmax, Vmax)
self.stale = True
def set_default_intervals(self):
'set the default limits for the axis interval if they are not mutated'
ymin, ymax = 0., 1.
dataMutated = self.axes.dataLim.mutatedy()
viewMutated = self.axes.viewLim.mutatedy()
if not dataMutated or not viewMutated:
if self.converter is not None:
info = self.converter.axisinfo(self.units, self)
if info.default_limits is not None:
valmin, valmax = info.default_limits
ymin = self.converter.convert(valmin, self.units, self)
ymax = self.converter.convert(valmax, self.units, self)
if not dataMutated:
self.axes.dataLim.intervaly = ymin, ymax
if not viewMutated:
self.axes.viewLim.intervaly = ymin, ymax
self.stale = True
def get_tick_space(self):
ends = self.axes.transAxes.transform([[0, 0], [0, 1]])
length = ((ends[1][1] - ends[0][1]) / self.axes.figure.dpi) * 72.0
tick = self._get_tick(True)
# Having a spacing of at least 2 just looks good.
size = tick.label1.get_size() * 2.0
if size > 0:
return int(np.floor(length / size))
else:
return 2**31 - 1
| 90,104 | 34.898406 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/stackplot.py
|
"""
Stacked area plot for 1D arrays inspired by Douglas Y'barbo's stackoverflow
answer:
http://stackoverflow.com/questions/2225995/how-can-i-create-stacked-line-graph-with-matplotlib
(http://stackoverflow.com/users/66549/doug)
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange
from cycler import cycler
import numpy as np
__all__ = ['stackplot']
def stackplot(axes, x, *args, **kwargs):
"""
Draws a stacked area plot.
Parameters
----------
x : 1d array of dimension N
y : 2d array (dimension MxN), or sequence of 1d arrays (each dimension 1xN)
The data is assumed to be unstacked. Each of the following
calls is legal::
stackplot(x, y) # where y is MxN
stackplot(x, y1, y2, y3, y4) # where y1, y2, y3, y4, are all 1xNm
baseline : ['zero' | 'sym' | 'wiggle' | 'weighted_wiggle']
Method used to calculate the baseline:
- ``'zero'``: Constant zero baseline, i.e. a simple stacked plot.
- ``'sym'``: Symmetric around zero and is sometimes called
'ThemeRiver'.
- ``'wiggle'``: Minimizes the sum of the squared slopes.
- ``'weighted_wiggle'``: Does the same but weights to account for
size of each layer. It is also called 'Streamgraph'-layout. More
details can be found at http://leebyron.com/streamgraph/.
labels : Length N sequence of strings
Labels to assign to each data series.
colors : Length N sequence of colors
A list or tuple of colors. These will be cycled through and used to
colour the stacked areas.
**kwargs :
All other keyword arguments are passed to `Axes.fill_between()`.
Returns
-------
list of `.PolyCollection`
A list of `.PolyCollection` instances, one for each element in the
stacked area plot.
"""
y = np.row_stack(args)
labels = iter(kwargs.pop('labels', []))
colors = kwargs.pop('colors', None)
if colors is not None:
axes.set_prop_cycle(cycler('color', colors))
baseline = kwargs.pop('baseline', 'zero')
# Assume data passed has not been 'stacked', so stack it here.
# We'll need a float buffer for the upcoming calculations.
stack = np.cumsum(y, axis=0, dtype=np.promote_types(y.dtype, np.float32))
if baseline == 'zero':
first_line = 0.
elif baseline == 'sym':
first_line = -np.sum(y, 0) * 0.5
stack += first_line[None, :]
elif baseline == 'wiggle':
m = y.shape[0]
first_line = (y * (m - 0.5 - np.arange(m)[:, None])).sum(0)
first_line /= -m
stack += first_line
elif baseline == 'weighted_wiggle':
m, n = y.shape
total = np.sum(y, 0)
# multiply by 1/total (or zero) to avoid infinities in the division:
inv_total = np.zeros_like(total)
mask = total > 0
inv_total[mask] = 1.0 / total[mask]
increase = np.hstack((y[:, 0:1], np.diff(y)))
below_size = total - stack
below_size += 0.5 * y
move_up = below_size * inv_total
move_up[:, 0] = 0.5
center = (move_up - 0.5) * increase
center = np.cumsum(center.sum(0))
first_line = center - 0.5 * total
stack += first_line
else:
errstr = "Baseline method %s not recognised. " % baseline
errstr += "Expected 'zero', 'sym', 'wiggle' or 'weighted_wiggle'"
raise ValueError(errstr)
# Color between x = 0 and the first array.
color = axes._get_lines.get_next_color()
coll = axes.fill_between(x, first_line, stack[0, :],
facecolor=color, label=next(labels, None),
**kwargs)
coll.sticky_edges.y[:] = [0]
r = [coll]
# Color between array i-1 and array i
for i in xrange(len(y) - 1):
color = axes._get_lines.get_next_color()
r.append(axes.fill_between(x, stack[i, :], stack[i + 1, :],
facecolor=color, label=next(labels, None),
**kwargs))
return r
| 4,199 | 31.55814 | 94 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/patheffects.py
|
"""
Defines classes for path effects. The path effects are supported in
:class:`~matplotlib.text.Text`, :class:`~matplotlib.lines.Line2D`
and :class:`~matplotlib.patches.Patch`.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from matplotlib.backend_bases import RendererBase
from matplotlib import colors as mcolors
from matplotlib import patches as mpatches
from matplotlib import transforms as mtransforms
class AbstractPathEffect(object):
"""
A base class for path effects.
Subclasses should override the ``draw_path`` method to add effect
functionality.
"""
def __init__(self, offset=(0., 0.)):
"""
Parameters
----------
offset : pair of floats
The offset to apply to the path, measured in points.
"""
self._offset = offset
self._offset_trans = mtransforms.Affine2D()
def _offset_transform(self, renderer, transform):
"""Apply the offset to the given transform."""
offset_x = renderer.points_to_pixels(self._offset[0])
offset_y = renderer.points_to_pixels(self._offset[1])
return transform + self._offset_trans.clear().translate(offset_x,
offset_y)
def _update_gc(self, gc, new_gc_dict):
"""
Update the given GraphicsCollection with the given
dictionary of properties. The keys in the dictionary are used to
identify the appropriate set_ method on the gc.
"""
new_gc_dict = new_gc_dict.copy()
dashes = new_gc_dict.pop("dashes", None)
if dashes:
gc.set_dashes(**dashes)
for k, v in six.iteritems(new_gc_dict):
set_method = getattr(gc, 'set_' + k, None)
if not callable(set_method):
raise AttributeError('Unknown property {0}'.format(k))
set_method(v)
return gc
def draw_path(self, renderer, gc, tpath, affine, rgbFace=None):
"""
Derived should override this method. The arguments are the same
as :meth:`matplotlib.backend_bases.RendererBase.draw_path`
except the first argument is a renderer.
"""
# Get the real renderer, not a PathEffectRenderer.
if isinstance(renderer, PathEffectRenderer):
renderer = renderer._renderer
return renderer.draw_path(gc, tpath, affine, rgbFace)
class PathEffectRenderer(RendererBase):
"""
Implements a Renderer which contains another renderer.
This proxy then intercepts draw calls, calling the appropriate
:class:`AbstractPathEffect` draw method.
.. note::
Not all methods have been overridden on this RendererBase subclass.
It may be necessary to add further methods to extend the PathEffects
capabilities further.
"""
def __init__(self, path_effects, renderer):
"""
Parameters
----------
path_effects : iterable of :class:`AbstractPathEffect`
The path effects which this renderer represents.
renderer : :class:`matplotlib.backend_bases.RendererBase` instance
"""
self._path_effects = path_effects
self._renderer = renderer
def new_gc(self):
return self._renderer.new_gc()
def copy_with_path_effect(self, path_effects):
return self.__class__(path_effects, self._renderer)
def draw_path(self, gc, tpath, affine, rgbFace=None):
for path_effect in self._path_effects:
path_effect.draw_path(self._renderer, gc, tpath, affine,
rgbFace)
def draw_markers(self, gc, marker_path, marker_trans, path, *args,
**kwargs):
# We do a little shimmy so that all markers are drawn for each path
# effect in turn. Essentially, we induce recursion (depth 1) which is
# terminated once we have just a single path effect to work with.
if len(self._path_effects) == 1:
# Call the base path effect function - this uses the unoptimised
# approach of calling "draw_path" multiple times.
return RendererBase.draw_markers(self, gc, marker_path,
marker_trans, path, *args,
**kwargs)
for path_effect in self._path_effects:
renderer = self.copy_with_path_effect([path_effect])
# Recursively call this method, only next time we will only have
# one path effect.
renderer.draw_markers(gc, marker_path, marker_trans, path,
*args, **kwargs)
def draw_path_collection(self, gc, master_transform, paths, *args,
**kwargs):
# We do a little shimmy so that all paths are drawn for each path
# effect in turn. Essentially, we induce recursion (depth 1) which is
# terminated once we have just a single path effect to work with.
if len(self._path_effects) == 1:
# Call the base path effect function - this uses the unoptimised
# approach of calling "draw_path" multiple times.
return RendererBase.draw_path_collection(self, gc,
master_transform, paths,
*args, **kwargs)
for path_effect in self._path_effects:
renderer = self.copy_with_path_effect([path_effect])
# Recursively call this method, only next time we will only have
# one path effect.
renderer.draw_path_collection(gc, master_transform, paths,
*args, **kwargs)
def points_to_pixels(self, points):
return self._renderer.points_to_pixels(points)
def _draw_text_as_path(self, gc, x, y, s, prop, angle, ismath):
# Implements the naive text drawing as is found in RendererBase.
path, transform = self._get_text_path_transform(x, y, s, prop,
angle, ismath)
color = gc.get_rgb()
gc.set_linewidth(0.0)
self.draw_path(gc, path, transform, rgbFace=color)
def __getattribute__(self, name):
if name in ['_text2path', 'flipy', 'height', 'width']:
return getattr(self._renderer, name)
else:
return object.__getattribute__(self, name)
class Normal(AbstractPathEffect):
"""
The "identity" PathEffect.
The Normal PathEffect's sole purpose is to draw the original artist with
no special path effect.
"""
pass
class Stroke(AbstractPathEffect):
"""A line based PathEffect which re-draws a stroke."""
def __init__(self, offset=(0, 0), **kwargs):
"""
The path will be stroked with its gc updated with the given
keyword arguments, i.e., the keyword arguments should be valid
gc parameter values.
"""
super(Stroke, self).__init__(offset)
self._gc = kwargs
def draw_path(self, renderer, gc, tpath, affine, rgbFace):
"""
draw the path with updated gc.
"""
# Do not modify the input! Use copy instead.
gc0 = renderer.new_gc()
gc0.copy_properties(gc)
gc0 = self._update_gc(gc0, self._gc)
trans = self._offset_transform(renderer, affine)
renderer.draw_path(gc0, tpath, trans, rgbFace)
gc0.restore()
class withStroke(Stroke):
"""
Adds a simple :class:`Stroke` and then draws the
original Artist to avoid needing to call :class:`Normal`.
"""
def draw_path(self, renderer, gc, tpath, affine, rgbFace):
Stroke.draw_path(self, renderer, gc, tpath, affine, rgbFace)
renderer.draw_path(gc, tpath, affine, rgbFace)
class SimplePatchShadow(AbstractPathEffect):
"""A simple shadow via a filled patch."""
def __init__(self, offset=(2, -2),
shadow_rgbFace=None, alpha=None,
rho=0.3, **kwargs):
"""
Parameters
----------
offset : pair of floats
The offset of the shadow in points.
shadow_rgbFace : color
The shadow color.
alpha : float
The alpha transparency of the created shadow patch.
Default is 0.3.
http://matplotlib.1069221.n5.nabble.com/path-effects-question-td27630.html
rho : float
A scale factor to apply to the rgbFace color if `shadow_rgbFace`
is not specified. Default is 0.3.
**kwargs
Extra keywords are stored and passed through to
:meth:`AbstractPathEffect._update_gc`.
"""
super(SimplePatchShadow, self).__init__(offset)
if shadow_rgbFace is None:
self._shadow_rgbFace = shadow_rgbFace
else:
self._shadow_rgbFace = mcolors.to_rgba(shadow_rgbFace)
if alpha is None:
alpha = 0.3
self._alpha = alpha
self._rho = rho
#: The dictionary of keywords to update the graphics collection with.
self._gc = kwargs
#: The offset transform object. The offset isn't calculated yet
#: as we don't know how big the figure will be in pixels.
self._offset_tran = mtransforms.Affine2D()
def draw_path(self, renderer, gc, tpath, affine, rgbFace):
"""
Overrides the standard draw_path to add the shadow offset and
necessary color changes for the shadow.
"""
# IMPORTANT: Do not modify the input - we copy everything instead.
affine0 = self._offset_transform(renderer, affine)
gc0 = renderer.new_gc()
gc0.copy_properties(gc)
if self._shadow_rgbFace is None:
r,g,b = (rgbFace or (1., 1., 1.))[:3]
# Scale the colors by a factor to improve the shadow effect.
shadow_rgbFace = (r * self._rho, g * self._rho, b * self._rho)
else:
shadow_rgbFace = self._shadow_rgbFace
gc0.set_foreground("none")
gc0.set_alpha(self._alpha)
gc0.set_linewidth(0)
gc0 = self._update_gc(gc0, self._gc)
renderer.draw_path(gc0, tpath, affine0, shadow_rgbFace)
gc0.restore()
class withSimplePatchShadow(SimplePatchShadow):
"""
Adds a simple :class:`SimplePatchShadow` and then draws the
original Artist to avoid needing to call :class:`Normal`.
"""
def draw_path(self, renderer, gc, tpath, affine, rgbFace):
SimplePatchShadow.draw_path(self, renderer, gc, tpath, affine, rgbFace)
renderer.draw_path(gc, tpath, affine, rgbFace)
class SimpleLineShadow(AbstractPathEffect):
"""A simple shadow via a line."""
def __init__(self, offset=(2,-2),
shadow_color='k', alpha=0.3, rho=0.3, **kwargs):
"""
Parameters
----------
offset : pair of floats
The offset to apply to the path, in points.
shadow_color : color
The shadow color. Default is black.
A value of ``None`` takes the original artist's color
with a scale factor of `rho`.
alpha : float
The alpha transparency of the created shadow patch.
Default is 0.3.
rho : float
A scale factor to apply to the rgbFace color if `shadow_rgbFace`
is ``None``. Default is 0.3.
**kwargs
Extra keywords are stored and passed through to
:meth:`AbstractPathEffect._update_gc`.
"""
super(SimpleLineShadow, self).__init__(offset)
if shadow_color is None:
self._shadow_color = shadow_color
else:
self._shadow_color = mcolors.to_rgba(shadow_color)
self._alpha = alpha
self._rho = rho
#: The dictionary of keywords to update the graphics collection with.
self._gc = kwargs
#: The offset transform object. The offset isn't calculated yet
#: as we don't know how big the figure will be in pixels.
self._offset_tran = mtransforms.Affine2D()
def draw_path(self, renderer, gc, tpath, affine, rgbFace):
"""
Overrides the standard draw_path to add the shadow offset and
necessary color changes for the shadow.
"""
# IMPORTANT: Do not modify the input - we copy everything instead.
affine0 = self._offset_transform(renderer, affine)
gc0 = renderer.new_gc()
gc0.copy_properties(gc)
if self._shadow_color is None:
r,g,b = (gc0.get_foreground() or (1., 1., 1.))[:3]
# Scale the colors by a factor to improve the shadow effect.
shadow_rgbFace = (r * self._rho, g * self._rho, b * self._rho)
else:
shadow_rgbFace = self._shadow_color
fill_color = None
gc0.set_foreground(shadow_rgbFace)
gc0.set_alpha(self._alpha)
gc0 = self._update_gc(gc0, self._gc)
renderer.draw_path(gc0, tpath, affine0, fill_color)
gc0.restore()
class PathPatchEffect(AbstractPathEffect):
"""
Draws a :class:`~matplotlib.patches.PathPatch` instance whose Path
comes from the original PathEffect artist.
"""
def __init__(self, offset=(0, 0), **kwargs):
"""
Parameters
----------
offset : pair of floats
The offset to apply to the path, in points.
**kwargs :
All keyword arguments are passed through to the
:class:`~matplotlib.patches.PathPatch` constructor. The
properties which cannot be overridden are "path", "clip_box"
"transform" and "clip_path".
"""
super(PathPatchEffect, self).__init__(offset=offset)
self.patch = mpatches.PathPatch([], **kwargs)
def draw_path(self, renderer, gc, tpath, affine, rgbFace):
affine = self._offset_transform(renderer, affine)
self.patch._path = tpath
self.patch.set_transform(affine)
self.patch.set_clip_box(gc.get_clip_rectangle())
clip_path = gc.get_clip_path()
if clip_path:
self.patch.set_clip_path(*clip_path)
self.patch.draw(renderer)
| 14,346 | 35.413706 | 86 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/category.py
|
# -*- coding: utf-8 -*-
"""
Module that allows plotting of string "category" data. i.e.
``plot(['d', 'f', 'a'],[1, 2, 3])`` will plot three points with x-axis
values of 'd', 'f', 'a'.
See :doc:`/gallery/lines_bars_and_markers/categorical_variables` for an
example.
The module uses Matplotlib's `matplotlib.units` mechanism to convert from
strings to integers, provides a tick locator and formatter, and the
class:`.UnitData` that creates and stores the string-to-integer mapping.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import itertools
import six
import numpy as np
import matplotlib.units as units
import matplotlib.ticker as ticker
# np 1.6/1.7 support
from distutils.version import LooseVersion
VALID_TYPES = tuple(set(six.string_types +
(bytes, six.text_type, np.str_, np.bytes_)))
class StrCategoryConverter(units.ConversionInterface):
@staticmethod
def convert(value, unit, axis):
"""Converts strings in value to floats using
mapping information store in the unit object
Parameters
----------
value : string or iterable
value or list of values to be converted
unit : :class:`.UnitData`
object string unit information for value
axis : :class:`~matplotlib.Axis.axis`
axis on which the converted value is plotted
Returns
-------
mapped_ value : float or ndarray[float]
.. note:: axis is not used in this function
"""
# dtype = object preserves numerical pass throughs
values = np.atleast_1d(np.array(value, dtype=object))
# pass through sequence of non binary numbers
if all((units.ConversionInterface.is_numlike(v) and
not isinstance(v, VALID_TYPES)) for v in values):
return np.asarray(values, dtype=float)
# force an update so it also does type checking
unit.update(values)
str2idx = np.vectorize(unit._mapping.__getitem__,
otypes=[float])
mapped_value = str2idx(values)
return mapped_value
@staticmethod
def axisinfo(unit, axis):
"""Sets the default axis ticks and labels
Parameters
---------
unit : :class:`.UnitData`
object string unit information for value
axis : :class:`~matplotlib.Axis.axis`
axis for which information is being set
Returns
-------
:class:~matplotlib.units.AxisInfo~
Information to support default tick labeling
.. note: axis is not used
"""
# locator and formatter take mapping dict because
# args need to be pass by reference for updates
majloc = StrCategoryLocator(unit._mapping)
majfmt = StrCategoryFormatter(unit._mapping)
return units.AxisInfo(majloc=majloc, majfmt=majfmt)
@staticmethod
def default_units(data, axis):
""" Sets and updates the :class:`~matplotlib.Axis.axis~ units
Parameters
----------
data : string or iterable of strings
axis : :class:`~matplotlib.Axis.axis`
axis on which the data is plotted
Returns
-------
class:~.UnitData~
object storing string to integer mapping
"""
# the conversion call stack is supposed to be
# default_units->axis_info->convert
if axis.units is None:
axis.set_units(UnitData(data))
else:
axis.units.update(data)
return axis.units
class StrCategoryLocator(ticker.Locator):
"""tick at every integer mapping of the string data"""
def __init__(self, units_mapping):
"""
Parameters
-----------
units_mapping : Dict[str, int]
string:integer mapping
"""
self._units = units_mapping
def __call__(self):
return list(self._units.values())
def tick_values(self, vmin, vmax):
return self()
class StrCategoryFormatter(ticker.Formatter):
"""String representation of the data at every tick"""
def __init__(self, units_mapping):
"""
Parameters
----------
units_mapping : Dict[Str, int]
string:integer mapping
"""
self._units = units_mapping
def __call__(self, x, pos=None):
if pos is None:
return ""
r_mapping = {v: StrCategoryFormatter._text(k)
for k, v in self._units.items()}
return r_mapping.get(int(np.round(x)), '')
@staticmethod
def _text(value):
"""Converts text values into `utf-8` or `ascii` strings
"""
if LooseVersion(np.__version__) < LooseVersion('1.7.0'):
if (isinstance(value, (six.text_type, np.unicode))):
value = value.encode('utf-8', 'ignore').decode('utf-8')
if isinstance(value, (np.bytes_, six.binary_type)):
value = value.decode(encoding='utf-8')
elif not isinstance(value, (np.str_, six.string_types)):
value = str(value)
return value
class UnitData(object):
def __init__(self, data=None):
"""Create mapping between unique categorical values
and integer identifiers
----------
data: iterable
sequence of string values
"""
self._mapping = OrderedDict()
self._counter = itertools.count(start=0)
if data is not None:
self.update(data)
def update(self, data):
"""Maps new values to integer identifiers.
Paramters
---------
data: iterable
sequence of string values
Raises
------
TypeError
If the value in data is not a string, unicode, bytes type
"""
data = np.atleast_1d(np.array(data, dtype=object))
for val in OrderedDict.fromkeys(data):
if not isinstance(val, VALID_TYPES):
raise TypeError("{val!r} is not a string".format(val=val))
if val not in self._mapping:
self._mapping[val] = next(self._counter)
# Connects the convertor to matplotlib
units.registry[str] = StrCategoryConverter()
units.registry[np.str_] = StrCategoryConverter()
units.registry[six.text_type] = StrCategoryConverter()
units.registry[bytes] = StrCategoryConverter()
units.registry[np.bytes_] = StrCategoryConverter()
| 6,548 | 29.891509 | 74 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/_version.py
|
# This file was generated by 'versioneer.py' (0.15) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
import sys
version_json = '''
{
"dirty": false,
"error": null,
"full-revisionid": "b471ee21cbe0d0fdd9cf5142d0a4be01517f4e68",
"version": "2.2.2"
}
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
| 471 | 20.454545 | 77 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/_color_data.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import six
BASE_COLORS = {
'b': (0, 0, 1),
'g': (0, 0.5, 0),
'r': (1, 0, 0),
'c': (0, 0.75, 0.75),
'm': (0.75, 0, 0.75),
'y': (0.75, 0.75, 0),
'k': (0, 0, 0),
'w': (1, 1, 1)}
# These colors are from Tableau
TABLEAU_COLORS = (
('blue', '#1f77b4'),
('orange', '#ff7f0e'),
('green', '#2ca02c'),
('red', '#d62728'),
('purple', '#9467bd'),
('brown', '#8c564b'),
('pink', '#e377c2'),
('gray', '#7f7f7f'),
('olive', '#bcbd22'),
('cyan', '#17becf'),
)
# Normalize name to "tab:<name>" to avoid name collisions.
TABLEAU_COLORS = OrderedDict(
('tab:' + name, value) for name, value in TABLEAU_COLORS)
# This mapping of color names -> hex values is taken from
# a survey run by Randel Monroe see:
# http://blog.xkcd.com/2010/05/03/color-survey-results/
# for more details. The results are hosted at
# https://xkcd.com/color/rgb.txt
#
# License: http://creativecommons.org/publicdomain/zero/1.0/
XKCD_COLORS = {
'cloudy blue': '#acc2d9',
'dark pastel green': '#56ae57',
'dust': '#b2996e',
'electric lime': '#a8ff04',
'fresh green': '#69d84f',
'light eggplant': '#894585',
'nasty green': '#70b23f',
'really light blue': '#d4ffff',
'tea': '#65ab7c',
'warm purple': '#952e8f',
'yellowish tan': '#fcfc81',
'cement': '#a5a391',
'dark grass green': '#388004',
'dusty teal': '#4c9085',
'grey teal': '#5e9b8a',
'macaroni and cheese': '#efb435',
'pinkish tan': '#d99b82',
'spruce': '#0a5f38',
'strong blue': '#0c06f7',
'toxic green': '#61de2a',
'windows blue': '#3778bf',
'blue blue': '#2242c7',
'blue with a hint of purple': '#533cc6',
'booger': '#9bb53c',
'bright sea green': '#05ffa6',
'dark green blue': '#1f6357',
'deep turquoise': '#017374',
'green teal': '#0cb577',
'strong pink': '#ff0789',
'bland': '#afa88b',
'deep aqua': '#08787f',
'lavender pink': '#dd85d7',
'light moss green': '#a6c875',
'light seafoam green': '#a7ffb5',
'olive yellow': '#c2b709',
'pig pink': '#e78ea5',
'deep lilac': '#966ebd',
'desert': '#ccad60',
'dusty lavender': '#ac86a8',
'purpley grey': '#947e94',
'purply': '#983fb2',
'candy pink': '#ff63e9',
'light pastel green': '#b2fba5',
'boring green': '#63b365',
'kiwi green': '#8ee53f',
'light grey green': '#b7e1a1',
'orange pink': '#ff6f52',
'tea green': '#bdf8a3',
'very light brown': '#d3b683',
'egg shell': '#fffcc4',
'eggplant purple': '#430541',
'powder pink': '#ffb2d0',
'reddish grey': '#997570',
'baby shit brown': '#ad900d',
'liliac': '#c48efd',
'stormy blue': '#507b9c',
'ugly brown': '#7d7103',
'custard': '#fffd78',
'darkish pink': '#da467d',
'deep brown': '#410200',
'greenish beige': '#c9d179',
'manilla': '#fffa86',
'off blue': '#5684ae',
'battleship grey': '#6b7c85',
'browny green': '#6f6c0a',
'bruise': '#7e4071',
'kelley green': '#009337',
'sickly yellow': '#d0e429',
'sunny yellow': '#fff917',
'azul': '#1d5dec',
'darkgreen': '#054907',
'green/yellow': '#b5ce08',
'lichen': '#8fb67b',
'light light green': '#c8ffb0',
'pale gold': '#fdde6c',
'sun yellow': '#ffdf22',
'tan green': '#a9be70',
'burple': '#6832e3',
'butterscotch': '#fdb147',
'toupe': '#c7ac7d',
'dark cream': '#fff39a',
'indian red': '#850e04',
'light lavendar': '#efc0fe',
'poison green': '#40fd14',
'baby puke green': '#b6c406',
'bright yellow green': '#9dff00',
'charcoal grey': '#3c4142',
'squash': '#f2ab15',
'cinnamon': '#ac4f06',
'light pea green': '#c4fe82',
'radioactive green': '#2cfa1f',
'raw sienna': '#9a6200',
'baby purple': '#ca9bf7',
'cocoa': '#875f42',
'light royal blue': '#3a2efe',
'orangeish': '#fd8d49',
'rust brown': '#8b3103',
'sand brown': '#cba560',
'swamp': '#698339',
'tealish green': '#0cdc73',
'burnt siena': '#b75203',
'camo': '#7f8f4e',
'dusk blue': '#26538d',
'fern': '#63a950',
'old rose': '#c87f89',
'pale light green': '#b1fc99',
'peachy pink': '#ff9a8a',
'rosy pink': '#f6688e',
'light bluish green': '#76fda8',
'light bright green': '#53fe5c',
'light neon green': '#4efd54',
'light seafoam': '#a0febf',
'tiffany blue': '#7bf2da',
'washed out green': '#bcf5a6',
'browny orange': '#ca6b02',
'nice blue': '#107ab0',
'sapphire': '#2138ab',
'greyish teal': '#719f91',
'orangey yellow': '#fdb915',
'parchment': '#fefcaf',
'straw': '#fcf679',
'very dark brown': '#1d0200',
'terracota': '#cb6843',
'ugly blue': '#31668a',
'clear blue': '#247afd',
'creme': '#ffffb6',
'foam green': '#90fda9',
'grey/green': '#86a17d',
'light gold': '#fddc5c',
'seafoam blue': '#78d1b6',
'topaz': '#13bbaf',
'violet pink': '#fb5ffc',
'wintergreen': '#20f986',
'yellow tan': '#ffe36e',
'dark fuchsia': '#9d0759',
'indigo blue': '#3a18b1',
'light yellowish green': '#c2ff89',
'pale magenta': '#d767ad',
'rich purple': '#720058',
'sunflower yellow': '#ffda03',
'green/blue': '#01c08d',
'leather': '#ac7434',
'racing green': '#014600',
'vivid purple': '#9900fa',
'dark royal blue': '#02066f',
'hazel': '#8e7618',
'muted pink': '#d1768f',
'booger green': '#96b403',
'canary': '#fdff63',
'cool grey': '#95a3a6',
'dark taupe': '#7f684e',
'darkish purple': '#751973',
'true green': '#089404',
'coral pink': '#ff6163',
'dark sage': '#598556',
'dark slate blue': '#214761',
'flat blue': '#3c73a8',
'mushroom': '#ba9e88',
'rich blue': '#021bf9',
'dirty purple': '#734a65',
'greenblue': '#23c48b',
'icky green': '#8fae22',
'light khaki': '#e6f2a2',
'warm blue': '#4b57db',
'dark hot pink': '#d90166',
'deep sea blue': '#015482',
'carmine': '#9d0216',
'dark yellow green': '#728f02',
'pale peach': '#ffe5ad',
'plum purple': '#4e0550',
'golden rod': '#f9bc08',
'neon red': '#ff073a',
'old pink': '#c77986',
'very pale blue': '#d6fffe',
'blood orange': '#fe4b03',
'grapefruit': '#fd5956',
'sand yellow': '#fce166',
'clay brown': '#b2713d',
'dark blue grey': '#1f3b4d',
'flat green': '#699d4c',
'light green blue': '#56fca2',
'warm pink': '#fb5581',
'dodger blue': '#3e82fc',
'gross green': '#a0bf16',
'ice': '#d6fffa',
'metallic blue': '#4f738e',
'pale salmon': '#ffb19a',
'sap green': '#5c8b15',
'algae': '#54ac68',
'bluey grey': '#89a0b0',
'greeny grey': '#7ea07a',
'highlighter green': '#1bfc06',
'light light blue': '#cafffb',
'light mint': '#b6ffbb',
'raw umber': '#a75e09',
'vivid blue': '#152eff',
'deep lavender': '#8d5eb7',
'dull teal': '#5f9e8f',
'light greenish blue': '#63f7b4',
'mud green': '#606602',
'pinky': '#fc86aa',
'red wine': '#8c0034',
'shit green': '#758000',
'tan brown': '#ab7e4c',
'darkblue': '#030764',
'rosa': '#fe86a4',
'lipstick': '#d5174e',
'pale mauve': '#fed0fc',
'claret': '#680018',
'dandelion': '#fedf08',
'orangered': '#fe420f',
'poop green': '#6f7c00',
'ruby': '#ca0147',
'dark': '#1b2431',
'greenish turquoise': '#00fbb0',
'pastel red': '#db5856',
'piss yellow': '#ddd618',
'bright cyan': '#41fdfe',
'dark coral': '#cf524e',
'algae green': '#21c36f',
'darkish red': '#a90308',
'reddy brown': '#6e1005',
'blush pink': '#fe828c',
'camouflage green': '#4b6113',
'lawn green': '#4da409',
'putty': '#beae8a',
'vibrant blue': '#0339f8',
'dark sand': '#a88f59',
'purple/blue': '#5d21d0',
'saffron': '#feb209',
'twilight': '#4e518b',
'warm brown': '#964e02',
'bluegrey': '#85a3b2',
'bubble gum pink': '#ff69af',
'duck egg blue': '#c3fbf4',
'greenish cyan': '#2afeb7',
'petrol': '#005f6a',
'royal': '#0c1793',
'butter': '#ffff81',
'dusty orange': '#f0833a',
'off yellow': '#f1f33f',
'pale olive green': '#b1d27b',
'orangish': '#fc824a',
'leaf': '#71aa34',
'light blue grey': '#b7c9e2',
'dried blood': '#4b0101',
'lightish purple': '#a552e6',
'rusty red': '#af2f0d',
'lavender blue': '#8b88f8',
'light grass green': '#9af764',
'light mint green': '#a6fbb2',
'sunflower': '#ffc512',
'velvet': '#750851',
'brick orange': '#c14a09',
'lightish red': '#fe2f4a',
'pure blue': '#0203e2',
'twilight blue': '#0a437a',
'violet red': '#a50055',
'yellowy brown': '#ae8b0c',
'carnation': '#fd798f',
'muddy yellow': '#bfac05',
'dark seafoam green': '#3eaf76',
'deep rose': '#c74767',
'dusty red': '#b9484e',
'grey/blue': '#647d8e',
'lemon lime': '#bffe28',
'purple/pink': '#d725de',
'brown yellow': '#b29705',
'purple brown': '#673a3f',
'wisteria': '#a87dc2',
'banana yellow': '#fafe4b',
'lipstick red': '#c0022f',
'water blue': '#0e87cc',
'brown grey': '#8d8468',
'vibrant purple': '#ad03de',
'baby green': '#8cff9e',
'barf green': '#94ac02',
'eggshell blue': '#c4fff7',
'sandy yellow': '#fdee73',
'cool green': '#33b864',
'pale': '#fff9d0',
'blue/grey': '#758da3',
'hot magenta': '#f504c9',
'greyblue': '#77a1b5',
'purpley': '#8756e4',
'baby shit green': '#889717',
'brownish pink': '#c27e79',
'dark aquamarine': '#017371',
'diarrhea': '#9f8303',
'light mustard': '#f7d560',
'pale sky blue': '#bdf6fe',
'turtle green': '#75b84f',
'bright olive': '#9cbb04',
'dark grey blue': '#29465b',
'greeny brown': '#696006',
'lemon green': '#adf802',
'light periwinkle': '#c1c6fc',
'seaweed green': '#35ad6b',
'sunshine yellow': '#fffd37',
'ugly purple': '#a442a0',
'medium pink': '#f36196',
'puke brown': '#947706',
'very light pink': '#fff4f2',
'viridian': '#1e9167',
'bile': '#b5c306',
'faded yellow': '#feff7f',
'very pale green': '#cffdbc',
'vibrant green': '#0add08',
'bright lime': '#87fd05',
'spearmint': '#1ef876',
'light aquamarine': '#7bfdc7',
'light sage': '#bcecac',
'yellowgreen': '#bbf90f',
'baby poo': '#ab9004',
'dark seafoam': '#1fb57a',
'deep teal': '#00555a',
'heather': '#a484ac',
'rust orange': '#c45508',
'dirty blue': '#3f829d',
'fern green': '#548d44',
'bright lilac': '#c95efb',
'weird green': '#3ae57f',
'peacock blue': '#016795',
'avocado green': '#87a922',
'faded orange': '#f0944d',
'grape purple': '#5d1451',
'hot green': '#25ff29',
'lime yellow': '#d0fe1d',
'mango': '#ffa62b',
'shamrock': '#01b44c',
'bubblegum': '#ff6cb5',
'purplish brown': '#6b4247',
'vomit yellow': '#c7c10c',
'pale cyan': '#b7fffa',
'key lime': '#aeff6e',
'tomato red': '#ec2d01',
'lightgreen': '#76ff7b',
'merlot': '#730039',
'night blue': '#040348',
'purpleish pink': '#df4ec8',
'apple': '#6ecb3c',
'baby poop green': '#8f9805',
'green apple': '#5edc1f',
'heliotrope': '#d94ff5',
'yellow/green': '#c8fd3d',
'almost black': '#070d0d',
'cool blue': '#4984b8',
'leafy green': '#51b73b',
'mustard brown': '#ac7e04',
'dusk': '#4e5481',
'dull brown': '#876e4b',
'frog green': '#58bc08',
'vivid green': '#2fef10',
'bright light green': '#2dfe54',
'fluro green': '#0aff02',
'kiwi': '#9cef43',
'seaweed': '#18d17b',
'navy green': '#35530a',
'ultramarine blue': '#1805db',
'iris': '#6258c4',
'pastel orange': '#ff964f',
'yellowish orange': '#ffab0f',
'perrywinkle': '#8f8ce7',
'tealish': '#24bca8',
'dark plum': '#3f012c',
'pear': '#cbf85f',
'pinkish orange': '#ff724c',
'midnight purple': '#280137',
'light urple': '#b36ff6',
'dark mint': '#48c072',
'greenish tan': '#bccb7a',
'light burgundy': '#a8415b',
'turquoise blue': '#06b1c4',
'ugly pink': '#cd7584',
'sandy': '#f1da7a',
'electric pink': '#ff0490',
'muted purple': '#805b87',
'mid green': '#50a747',
'greyish': '#a8a495',
'neon yellow': '#cfff04',
'banana': '#ffff7e',
'carnation pink': '#ff7fa7',
'tomato': '#ef4026',
'sea': '#3c9992',
'muddy brown': '#886806',
'turquoise green': '#04f489',
'buff': '#fef69e',
'fawn': '#cfaf7b',
'muted blue': '#3b719f',
'pale rose': '#fdc1c5',
'dark mint green': '#20c073',
'amethyst': '#9b5fc0',
'blue/green': '#0f9b8e',
'chestnut': '#742802',
'sick green': '#9db92c',
'pea': '#a4bf20',
'rusty orange': '#cd5909',
'stone': '#ada587',
'rose red': '#be013c',
'pale aqua': '#b8ffeb',
'deep orange': '#dc4d01',
'earth': '#a2653e',
'mossy green': '#638b27',
'grassy green': '#419c03',
'pale lime green': '#b1ff65',
'light grey blue': '#9dbcd4',
'pale grey': '#fdfdfe',
'asparagus': '#77ab56',
'blueberry': '#464196',
'purple red': '#990147',
'pale lime': '#befd73',
'greenish teal': '#32bf84',
'caramel': '#af6f09',
'deep magenta': '#a0025c',
'light peach': '#ffd8b1',
'milk chocolate': '#7f4e1e',
'ocher': '#bf9b0c',
'off green': '#6ba353',
'purply pink': '#f075e6',
'lightblue': '#7bc8f6',
'dusky blue': '#475f94',
'golden': '#f5bf03',
'light beige': '#fffeb6',
'butter yellow': '#fffd74',
'dusky purple': '#895b7b',
'french blue': '#436bad',
'ugly yellow': '#d0c101',
'greeny yellow': '#c6f808',
'orangish red': '#f43605',
'shamrock green': '#02c14d',
'orangish brown': '#b25f03',
'tree green': '#2a7e19',
'deep violet': '#490648',
'gunmetal': '#536267',
'blue/purple': '#5a06ef',
'cherry': '#cf0234',
'sandy brown': '#c4a661',
'warm grey': '#978a84',
'dark indigo': '#1f0954',
'midnight': '#03012d',
'bluey green': '#2bb179',
'grey pink': '#c3909b',
'soft purple': '#a66fb5',
'blood': '#770001',
'brown red': '#922b05',
'medium grey': '#7d7f7c',
'berry': '#990f4b',
'poo': '#8f7303',
'purpley pink': '#c83cb9',
'light salmon': '#fea993',
'snot': '#acbb0d',
'easter purple': '#c071fe',
'light yellow green': '#ccfd7f',
'dark navy blue': '#00022e',
'drab': '#828344',
'light rose': '#ffc5cb',
'rouge': '#ab1239',
'purplish red': '#b0054b',
'slime green': '#99cc04',
'baby poop': '#937c00',
'irish green': '#019529',
'pink/purple': '#ef1de7',
'dark navy': '#000435',
'greeny blue': '#42b395',
'light plum': '#9d5783',
'pinkish grey': '#c8aca9',
'dirty orange': '#c87606',
'rust red': '#aa2704',
'pale lilac': '#e4cbff',
'orangey red': '#fa4224',
'primary blue': '#0804f9',
'kermit green': '#5cb200',
'brownish purple': '#76424e',
'murky green': '#6c7a0e',
'wheat': '#fbdd7e',
'very dark purple': '#2a0134',
'bottle green': '#044a05',
'watermelon': '#fd4659',
'deep sky blue': '#0d75f8',
'fire engine red': '#fe0002',
'yellow ochre': '#cb9d06',
'pumpkin orange': '#fb7d07',
'pale olive': '#b9cc81',
'light lilac': '#edc8ff',
'lightish green': '#61e160',
'carolina blue': '#8ab8fe',
'mulberry': '#920a4e',
'shocking pink': '#fe02a2',
'auburn': '#9a3001',
'bright lime green': '#65fe08',
'celadon': '#befdb7',
'pinkish brown': '#b17261',
'poo brown': '#885f01',
'bright sky blue': '#02ccfe',
'celery': '#c1fd95',
'dirt brown': '#836539',
'strawberry': '#fb2943',
'dark lime': '#84b701',
'copper': '#b66325',
'medium brown': '#7f5112',
'muted green': '#5fa052',
"robin's egg": '#6dedfd',
'bright aqua': '#0bf9ea',
'bright lavender': '#c760ff',
'ivory': '#ffffcb',
'very light purple': '#f6cefc',
'light navy': '#155084',
'pink red': '#f5054f',
'olive brown': '#645403',
'poop brown': '#7a5901',
'mustard green': '#a8b504',
'ocean green': '#3d9973',
'very dark blue': '#000133',
'dusty green': '#76a973',
'light navy blue': '#2e5a88',
'minty green': '#0bf77d',
'adobe': '#bd6c48',
'barney': '#ac1db8',
'jade green': '#2baf6a',
'bright light blue': '#26f7fd',
'light lime': '#aefd6c',
'dark khaki': '#9b8f55',
'orange yellow': '#ffad01',
'ocre': '#c69c04',
'maize': '#f4d054',
'faded pink': '#de9dac',
'british racing green': '#05480d',
'sandstone': '#c9ae74',
'mud brown': '#60460f',
'light sea green': '#98f6b0',
'robin egg blue': '#8af1fe',
'aqua marine': '#2ee8bb',
'dark sea green': '#11875d',
'soft pink': '#fdb0c0',
'orangey brown': '#b16002',
'cherry red': '#f7022a',
'burnt yellow': '#d5ab09',
'brownish grey': '#86775f',
'camel': '#c69f59',
'purplish grey': '#7a687f',
'marine': '#042e60',
'greyish pink': '#c88d94',
'pale turquoise': '#a5fbd5',
'pastel yellow': '#fffe71',
'bluey purple': '#6241c7',
'canary yellow': '#fffe40',
'faded red': '#d3494e',
'sepia': '#985e2b',
'coffee': '#a6814c',
'bright magenta': '#ff08e8',
'mocha': '#9d7651',
'ecru': '#feffca',
'purpleish': '#98568d',
'cranberry': '#9e003a',
'darkish green': '#287c37',
'brown orange': '#b96902',
'dusky rose': '#ba6873',
'melon': '#ff7855',
'sickly green': '#94b21c',
'silver': '#c5c9c7',
'purply blue': '#661aee',
'purpleish blue': '#6140ef',
'hospital green': '#9be5aa',
'shit brown': '#7b5804',
'mid blue': '#276ab3',
'amber': '#feb308',
'easter green': '#8cfd7e',
'soft blue': '#6488ea',
'cerulean blue': '#056eee',
'golden brown': '#b27a01',
'bright turquoise': '#0ffef9',
'red pink': '#fa2a55',
'red purple': '#820747',
'greyish brown': '#7a6a4f',
'vermillion': '#f4320c',
'russet': '#a13905',
'steel grey': '#6f828a',
'lighter purple': '#a55af4',
'bright violet': '#ad0afd',
'prussian blue': '#004577',
'slate green': '#658d6d',
'dirty pink': '#ca7b80',
'dark blue green': '#005249',
'pine': '#2b5d34',
'yellowy green': '#bff128',
'dark gold': '#b59410',
'bluish': '#2976bb',
'darkish blue': '#014182',
'dull red': '#bb3f3f',
'pinky red': '#fc2647',
'bronze': '#a87900',
'pale teal': '#82cbb2',
'military green': '#667c3e',
'barbie pink': '#fe46a5',
'bubblegum pink': '#fe83cc',
'pea soup green': '#94a617',
'dark mustard': '#a88905',
'shit': '#7f5f00',
'medium purple': '#9e43a2',
'very dark green': '#062e03',
'dirt': '#8a6e45',
'dusky pink': '#cc7a8b',
'red violet': '#9e0168',
'lemon yellow': '#fdff38',
'pistachio': '#c0fa8b',
'dull yellow': '#eedc5b',
'dark lime green': '#7ebd01',
'denim blue': '#3b5b92',
'teal blue': '#01889f',
'lightish blue': '#3d7afd',
'purpley blue': '#5f34e7',
'light indigo': '#6d5acf',
'swamp green': '#748500',
'brown green': '#706c11',
'dark maroon': '#3c0008',
'hot purple': '#cb00f5',
'dark forest green': '#002d04',
'faded blue': '#658cbb',
'drab green': '#749551',
'light lime green': '#b9ff66',
'snot green': '#9dc100',
'yellowish': '#faee66',
'light blue green': '#7efbb3',
'bordeaux': '#7b002c',
'light mauve': '#c292a1',
'ocean': '#017b92',
'marigold': '#fcc006',
'muddy green': '#657432',
'dull orange': '#d8863b',
'steel': '#738595',
'electric purple': '#aa23ff',
'fluorescent green': '#08ff08',
'yellowish brown': '#9b7a01',
'blush': '#f29e8e',
'soft green': '#6fc276',
'bright orange': '#ff5b00',
'lemon': '#fdff52',
'purple grey': '#866f85',
'acid green': '#8ffe09',
'pale lavender': '#eecffe',
'violet blue': '#510ac9',
'light forest green': '#4f9153',
'burnt red': '#9f2305',
'khaki green': '#728639',
'cerise': '#de0c62',
'faded purple': '#916e99',
'apricot': '#ffb16d',
'dark olive green': '#3c4d03',
'grey brown': '#7f7053',
'green grey': '#77926f',
'true blue': '#010fcc',
'pale violet': '#ceaefa',
'periwinkle blue': '#8f99fb',
'light sky blue': '#c6fcff',
'blurple': '#5539cc',
'green brown': '#544e03',
'bluegreen': '#017a79',
'bright teal': '#01f9c6',
'brownish yellow': '#c9b003',
'pea soup': '#929901',
'forest': '#0b5509',
'barney purple': '#a00498',
'ultramarine': '#2000b1',
'purplish': '#94568c',
'puke yellow': '#c2be0e',
'bluish grey': '#748b97',
'dark periwinkle': '#665fd1',
'dark lilac': '#9c6da5',
'reddish': '#c44240',
'light maroon': '#a24857',
'dusty purple': '#825f87',
'terra cotta': '#c9643b',
'avocado': '#90b134',
'marine blue': '#01386a',
'teal green': '#25a36f',
'slate grey': '#59656d',
'lighter green': '#75fd63',
'electric green': '#21fc0d',
'dusty blue': '#5a86ad',
'golden yellow': '#fec615',
'bright yellow': '#fffd01',
'light lavender': '#dfc5fe',
'umber': '#b26400',
'poop': '#7f5e00',
'dark peach': '#de7e5d',
'jungle green': '#048243',
'eggshell': '#ffffd4',
'denim': '#3b638c',
'yellow brown': '#b79400',
'dull purple': '#84597e',
'chocolate brown': '#411900',
'wine red': '#7b0323',
'neon blue': '#04d9ff',
'dirty green': '#667e2c',
'light tan': '#fbeeac',
'ice blue': '#d7fffe',
'cadet blue': '#4e7496',
'dark mauve': '#874c62',
'very light blue': '#d5ffff',
'grey purple': '#826d8c',
'pastel pink': '#ffbacd',
'very light green': '#d1ffbd',
'dark sky blue': '#448ee4',
'evergreen': '#05472a',
'dull pink': '#d5869d',
'aubergine': '#3d0734',
'mahogany': '#4a0100',
'reddish orange': '#f8481c',
'deep green': '#02590f',
'vomit green': '#89a203',
'purple pink': '#e03fd8',
'dusty pink': '#d58a94',
'faded green': '#7bb274',
'camo green': '#526525',
'pinky purple': '#c94cbe',
'pink purple': '#db4bda',
'brownish red': '#9e3623',
'dark rose': '#b5485d',
'mud': '#735c12',
'brownish': '#9c6d57',
'emerald green': '#028f1e',
'pale brown': '#b1916e',
'dull blue': '#49759c',
'burnt umber': '#a0450e',
'medium green': '#39ad48',
'clay': '#b66a50',
'light aqua': '#8cffdb',
'light olive green': '#a4be5c',
'brownish orange': '#cb7723',
'dark aqua': '#05696b',
'purplish pink': '#ce5dae',
'dark salmon': '#c85a53',
'greenish grey': '#96ae8d',
'jade': '#1fa774',
'ugly green': '#7a9703',
'dark beige': '#ac9362',
'emerald': '#01a049',
'pale red': '#d9544d',
'light magenta': '#fa5ff7',
'sky': '#82cafc',
'light cyan': '#acfffc',
'yellow orange': '#fcb001',
'reddish purple': '#910951',
'reddish pink': '#fe2c54',
'orchid': '#c875c4',
'dirty yellow': '#cdc50a',
'orange red': '#fd411e',
'deep red': '#9a0200',
'orange brown': '#be6400',
'cobalt blue': '#030aa7',
'neon pink': '#fe019a',
'rose pink': '#f7879a',
'greyish purple': '#887191',
'raspberry': '#b00149',
'aqua green': '#12e193',
'salmon pink': '#fe7b7c',
'tangerine': '#ff9408',
'brownish green': '#6a6e09',
'red brown': '#8b2e16',
'greenish brown': '#696112',
'pumpkin': '#e17701',
'pine green': '#0a481e',
'charcoal': '#343837',
'baby pink': '#ffb7ce',
'cornflower': '#6a79f7',
'blue violet': '#5d06e9',
'chocolate': '#3d1c02',
'greyish green': '#82a67d',
'scarlet': '#be0119',
'green yellow': '#c9ff27',
'dark olive': '#373e02',
'sienna': '#a9561e',
'pastel purple': '#caa0ff',
'terracotta': '#ca6641',
'aqua blue': '#02d8e9',
'sage green': '#88b378',
'blood red': '#980002',
'deep pink': '#cb0162',
'grass': '#5cac2d',
'moss': '#769958',
'pastel blue': '#a2bffe',
'bluish green': '#10a674',
'green blue': '#06b48b',
'dark tan': '#af884a',
'greenish blue': '#0b8b87',
'pale orange': '#ffa756',
'vomit': '#a2a415',
'forrest green': '#154406',
'dark lavender': '#856798',
'dark violet': '#34013f',
'purple blue': '#632de9',
'dark cyan': '#0a888a',
'olive drab': '#6f7632',
'pinkish': '#d46a7e',
'cobalt': '#1e488f',
'neon purple': '#bc13fe',
'light turquoise': '#7ef4cc',
'apple green': '#76cd26',
'dull green': '#74a662',
'wine': '#80013f',
'powder blue': '#b1d1fc',
'off white': '#ffffe4',
'electric blue': '#0652ff',
'dark turquoise': '#045c5a',
'blue purple': '#5729ce',
'azure': '#069af3',
'bright red': '#ff000d',
'pinkish red': '#f10c45',
'cornflower blue': '#5170d7',
'light olive': '#acbf69',
'grape': '#6c3461',
'greyish blue': '#5e819d',
'purplish blue': '#601ef9',
'yellowish green': '#b0dd16',
'greenish yellow': '#cdfd02',
'medium blue': '#2c6fbb',
'dusty rose': '#c0737a',
'light violet': '#d6b4fc',
'midnight blue': '#020035',
'bluish purple': '#703be7',
'red orange': '#fd3c06',
'dark magenta': '#960056',
'greenish': '#40a368',
'ocean blue': '#03719c',
'coral': '#fc5a50',
'cream': '#ffffc2',
'reddish brown': '#7f2b0a',
'burnt sienna': '#b04e0f',
'brick': '#a03623',
'sage': '#87ae73',
'grey green': '#789b73',
'white': '#ffffff',
"robin's egg blue": '#98eff9',
'moss green': '#658b38',
'steel blue': '#5a7d9a',
'eggplant': '#380835',
'light yellow': '#fffe7a',
'leaf green': '#5ca904',
'light grey': '#d8dcd6',
'puke': '#a5a502',
'pinkish purple': '#d648d7',
'sea blue': '#047495',
'pale purple': '#b790d4',
'slate blue': '#5b7c99',
'blue grey': '#607c8e',
'hunter green': '#0b4008',
'fuchsia': '#ed0dd9',
'crimson': '#8c000f',
'pale yellow': '#ffff84',
'ochre': '#bf9005',
'mustard yellow': '#d2bd0a',
'light red': '#ff474c',
'cerulean': '#0485d1',
'pale pink': '#ffcfdc',
'deep blue': '#040273',
'rust': '#a83c09',
'light teal': '#90e4c1',
'slate': '#516572',
'goldenrod': '#fac205',
'dark yellow': '#d5b60a',
'dark grey': '#363737',
'army green': '#4b5d16',
'grey blue': '#6b8ba4',
'seafoam': '#80f9ad',
'puce': '#a57e52',
'spring green': '#a9f971',
'dark orange': '#c65102',
'sand': '#e2ca76',
'pastel green': '#b0ff9d',
'mint': '#9ffeb0',
'light orange': '#fdaa48',
'bright pink': '#fe01b1',
'chartreuse': '#c1f80a',
'deep purple': '#36013f',
'dark brown': '#341c02',
'taupe': '#b9a281',
'pea green': '#8eab12',
'puke green': '#9aae07',
'kelly green': '#02ab2e',
'seafoam green': '#7af9ab',
'blue green': '#137e6d',
'khaki': '#aaa662',
'burgundy': '#610023',
'dark teal': '#014d4e',
'brick red': '#8f1402',
'royal purple': '#4b006e',
'plum': '#580f41',
'mint green': '#8fff9f',
'gold': '#dbb40c',
'baby blue': '#a2cffe',
'yellow green': '#c0fb2d',
'bright purple': '#be03fd',
'dark red': '#840000',
'pale blue': '#d0fefe',
'grass green': '#3f9b0b',
'navy': '#01153e',
'aquamarine': '#04d8b2',
'burnt orange': '#c04e01',
'neon green': '#0cff0c',
'bright blue': '#0165fc',
'rose': '#cf6275',
'light pink': '#ffd1df',
'mustard': '#ceb301',
'indigo': '#380282',
'lime': '#aaff32',
'sea green': '#53fca1',
'periwinkle': '#8e82fe',
'dark pink': '#cb416b',
'olive green': '#677a04',
'peach': '#ffb07c',
'pale green': '#c7fdb5',
'light brown': '#ad8150',
'hot pink': '#ff028d',
'black': '#000000',
'lilac': '#cea2fd',
'navy blue': '#001146',
'royal blue': '#0504aa',
'beige': '#e6daa6',
'salmon': '#ff796c',
'olive': '#6e750e',
'maroon': '#650021',
'bright green': '#01ff07',
'dark purple': '#35063e',
'mauve': '#ae7181',
'forest green': '#06470c',
'aqua': '#13eac9',
'cyan': '#00ffff',
'tan': '#d1b26f',
'dark blue': '#00035b',
'lavender': '#c79fef',
'turquoise': '#06c2ac',
'dark green': '#033500',
'violet': '#9a0eea',
'light purple': '#bf77f6',
'lime green': '#89fe05',
'grey': '#929591',
'sky blue': '#75bbfd',
'yellow': '#ffff14',
'magenta': '#c20078',
'light green': '#96f97b',
'orange': '#f97306',
'teal': '#029386',
'light blue': '#95d0fc',
'red': '#e50000',
'brown': '#653700',
'pink': '#ff81c0',
'blue': '#0343df',
'green': '#15b01a',
'purple': '#7e1e9c'}
# Normalize name to "xkcd:<name>" to avoid name collisions.
XKCD_COLORS = {'xkcd:' + name: value for name, value in XKCD_COLORS.items()}
# https://drafts.csswg.org/css-color-4/#named-colors
CSS4_COLORS = {
'aliceblue': '#F0F8FF',
'antiquewhite': '#FAEBD7',
'aqua': '#00FFFF',
'aquamarine': '#7FFFD4',
'azure': '#F0FFFF',
'beige': '#F5F5DC',
'bisque': '#FFE4C4',
'black': '#000000',
'blanchedalmond': '#FFEBCD',
'blue': '#0000FF',
'blueviolet': '#8A2BE2',
'brown': '#A52A2A',
'burlywood': '#DEB887',
'cadetblue': '#5F9EA0',
'chartreuse': '#7FFF00',
'chocolate': '#D2691E',
'coral': '#FF7F50',
'cornflowerblue': '#6495ED',
'cornsilk': '#FFF8DC',
'crimson': '#DC143C',
'cyan': '#00FFFF',
'darkblue': '#00008B',
'darkcyan': '#008B8B',
'darkgoldenrod': '#B8860B',
'darkgray': '#A9A9A9',
'darkgreen': '#006400',
'darkgrey': '#A9A9A9',
'darkkhaki': '#BDB76B',
'darkmagenta': '#8B008B',
'darkolivegreen': '#556B2F',
'darkorange': '#FF8C00',
'darkorchid': '#9932CC',
'darkred': '#8B0000',
'darksalmon': '#E9967A',
'darkseagreen': '#8FBC8F',
'darkslateblue': '#483D8B',
'darkslategray': '#2F4F4F',
'darkslategrey': '#2F4F4F',
'darkturquoise': '#00CED1',
'darkviolet': '#9400D3',
'deeppink': '#FF1493',
'deepskyblue': '#00BFFF',
'dimgray': '#696969',
'dimgrey': '#696969',
'dodgerblue': '#1E90FF',
'firebrick': '#B22222',
'floralwhite': '#FFFAF0',
'forestgreen': '#228B22',
'fuchsia': '#FF00FF',
'gainsboro': '#DCDCDC',
'ghostwhite': '#F8F8FF',
'gold': '#FFD700',
'goldenrod': '#DAA520',
'gray': '#808080',
'green': '#008000',
'greenyellow': '#ADFF2F',
'grey': '#808080',
'honeydew': '#F0FFF0',
'hotpink': '#FF69B4',
'indianred': '#CD5C5C',
'indigo': '#4B0082',
'ivory': '#FFFFF0',
'khaki': '#F0E68C',
'lavender': '#E6E6FA',
'lavenderblush': '#FFF0F5',
'lawngreen': '#7CFC00',
'lemonchiffon': '#FFFACD',
'lightblue': '#ADD8E6',
'lightcoral': '#F08080',
'lightcyan': '#E0FFFF',
'lightgoldenrodyellow': '#FAFAD2',
'lightgray': '#D3D3D3',
'lightgreen': '#90EE90',
'lightgrey': '#D3D3D3',
'lightpink': '#FFB6C1',
'lightsalmon': '#FFA07A',
'lightseagreen': '#20B2AA',
'lightskyblue': '#87CEFA',
'lightslategray': '#778899',
'lightslategrey': '#778899',
'lightsteelblue': '#B0C4DE',
'lightyellow': '#FFFFE0',
'lime': '#00FF00',
'limegreen': '#32CD32',
'linen': '#FAF0E6',
'magenta': '#FF00FF',
'maroon': '#800000',
'mediumaquamarine': '#66CDAA',
'mediumblue': '#0000CD',
'mediumorchid': '#BA55D3',
'mediumpurple': '#9370DB',
'mediumseagreen': '#3CB371',
'mediumslateblue': '#7B68EE',
'mediumspringgreen': '#00FA9A',
'mediumturquoise': '#48D1CC',
'mediumvioletred': '#C71585',
'midnightblue': '#191970',
'mintcream': '#F5FFFA',
'mistyrose': '#FFE4E1',
'moccasin': '#FFE4B5',
'navajowhite': '#FFDEAD',
'navy': '#000080',
'oldlace': '#FDF5E6',
'olive': '#808000',
'olivedrab': '#6B8E23',
'orange': '#FFA500',
'orangered': '#FF4500',
'orchid': '#DA70D6',
'palegoldenrod': '#EEE8AA',
'palegreen': '#98FB98',
'paleturquoise': '#AFEEEE',
'palevioletred': '#DB7093',
'papayawhip': '#FFEFD5',
'peachpuff': '#FFDAB9',
'peru': '#CD853F',
'pink': '#FFC0CB',
'plum': '#DDA0DD',
'powderblue': '#B0E0E6',
'purple': '#800080',
'rebeccapurple': '#663399',
'red': '#FF0000',
'rosybrown': '#BC8F8F',
'royalblue': '#4169E1',
'saddlebrown': '#8B4513',
'salmon': '#FA8072',
'sandybrown': '#F4A460',
'seagreen': '#2E8B57',
'seashell': '#FFF5EE',
'sienna': '#A0522D',
'silver': '#C0C0C0',
'skyblue': '#87CEEB',
'slateblue': '#6A5ACD',
'slategray': '#708090',
'slategrey': '#708090',
'snow': '#FFFAFA',
'springgreen': '#00FF7F',
'steelblue': '#4682B4',
'tan': '#D2B48C',
'teal': '#008080',
'thistle': '#D8BFD8',
'tomato': '#FF6347',
'turquoise': '#40E0D0',
'violet': '#EE82EE',
'wheat': '#F5DEB3',
'white': '#FFFFFF',
'whitesmoke': '#F5F5F5',
'yellow': '#FFFF00',
'yellowgreen': '#9ACD32'}
| 34,896 | 29.398084 | 76 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/colorbar.py
|
'''
Colorbar toolkit with two classes and a function:
:class:`ColorbarBase`
the base class with full colorbar drawing functionality.
It can be used as-is to make a colorbar for a given colormap;
a mappable object (e.g., image) is not needed.
:class:`Colorbar`
the derived class for use with images or contour plots.
:func:`make_axes`
a function for resizing an axes and adding a second axes
suitable for a colorbar
The :meth:`~matplotlib.figure.Figure.colorbar` method uses :func:`make_axes`
and :class:`Colorbar`; the :func:`~matplotlib.pyplot.colorbar` function
is a thin wrapper over :meth:`~matplotlib.figure.Figure.colorbar`.
'''
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange, zip
import warnings
import numpy as np
import matplotlib as mpl
import matplotlib.artist as martist
import matplotlib.cbook as cbook
import matplotlib.collections as collections
import matplotlib.colors as colors
import matplotlib.contour as contour
import matplotlib.cm as cm
import matplotlib.gridspec as gridspec
import matplotlib.patches as mpatches
import matplotlib.path as mpath
import matplotlib.ticker as ticker
import matplotlib.transforms as mtransforms
import matplotlib._layoutbox as layoutbox
import matplotlib._constrained_layout as constrained_layout
from matplotlib import docstring
make_axes_kw_doc = '''
============= ====================================================
Property Description
============= ====================================================
*orientation* vertical or horizontal
*fraction* 0.15; fraction of original axes to use for colorbar
*pad* 0.05 if vertical, 0.15 if horizontal; fraction
of original axes between colorbar and new image axes
*shrink* 1.0; fraction by which to multiply the size of the colorbar
*aspect* 20; ratio of long to short dimensions
*anchor* (0.0, 0.5) if vertical; (0.5, 1.0) if horizontal;
the anchor point of the colorbar axes
*panchor* (1.0, 0.5) if vertical; (0.5, 0.0) if horizontal;
the anchor point of the colorbar parent axes. If
False, the parent axes' anchor will be unchanged
============= ====================================================
'''
colormap_kw_doc = '''
============ ====================================================
Property Description
============ ====================================================
*extend* [ 'neither' | 'both' | 'min' | 'max' ]
If not 'neither', make pointed end(s) for out-of-
range values. These are set for a given colormap
using the colormap set_under and set_over methods.
*extendfrac* [ *None* | 'auto' | length | lengths ]
If set to *None*, both the minimum and maximum
triangular colorbar extensions with have a length of
5% of the interior colorbar length (this is the
default setting). If set to 'auto', makes the
triangular colorbar extensions the same lengths as
the interior boxes (when *spacing* is set to
'uniform') or the same lengths as the respective
adjacent interior boxes (when *spacing* is set to
'proportional'). If a scalar, indicates the length
of both the minimum and maximum triangular colorbar
extensions as a fraction of the interior colorbar
length. A two-element sequence of fractions may also
be given, indicating the lengths of the minimum and
maximum colorbar extensions respectively as a
fraction of the interior colorbar length.
*extendrect* bool
If *False* the minimum and maximum colorbar extensions
will be triangular (the default). If *True* the
extensions will be rectangular.
*spacing* [ 'uniform' | 'proportional' ]
Uniform spacing gives each discrete color the same
space; proportional makes the space proportional to
the data interval.
*ticks* [ None | list of ticks | Locator object ]
If None, ticks are determined automatically from the
input.
*format* [ None | format string | Formatter object ]
If None, the
:class:`~matplotlib.ticker.ScalarFormatter` is used.
If a format string is given, e.g., '%.3f', that is
used. An alternative
:class:`~matplotlib.ticker.Formatter` object may be
given instead.
*drawedges* bool
Whether to draw lines at color boundaries.
============ ====================================================
The following will probably be useful only in the context of
indexed colors (that is, when the mappable has norm=NoNorm()),
or other unusual circumstances.
============ ===================================================
Property Description
============ ===================================================
*boundaries* None or a sequence
*values* None or a sequence which must be of length 1 less
than the sequence of *boundaries*. For each region
delimited by adjacent entries in *boundaries*, the
color mapped to the corresponding value in values
will be used.
============ ===================================================
'''
colorbar_doc = '''
Add a colorbar to a plot.
Function signatures for the :mod:`~matplotlib.pyplot` interface; all
but the first are also method signatures for the
:meth:`~matplotlib.figure.Figure.colorbar` method::
colorbar(**kwargs)
colorbar(mappable, **kwargs)
colorbar(mappable, cax=cax, **kwargs)
colorbar(mappable, ax=ax, **kwargs)
Parameters
----------
mappable :
The :class:`~matplotlib.image.Image`,
:class:`~matplotlib.contour.ContourSet`, etc. to
which the colorbar applies; this argument is mandatory for the Figure
:meth:`~matplotlib.figure.Figure.colorbar` method but optional for the
pyplot :func:`~matplotlib.pyplot.colorbar` function, which sets the
default to the current image.
cax : :class:`~matplotlib.axes.Axes` object, optional
Axis into which the colorbar will be drawn
ax : :class:`~matplotlib.axes.Axes`, list of Axes, optional
Parent axes from which space for a new colorbar axes will be stolen.
If a list of axes is given they will all be resized to make room for the
colorbar axes.
use_gridspec : bool, optional
If *cax* is ``None``, a new *cax* is created as an instance of
Axes. If *ax* is an instance of Subplot and *use_gridspec* is ``True``,
*cax* is created as an instance of Subplot using the
grid_spec module.
Returns
-------
:class:`~matplotlib.colorbar.Colorbar` instance
See also its base class, :class:`~matplotlib.colorbar.ColorbarBase`.
Call the :meth:`~matplotlib.colorbar.ColorbarBase.set_label` method
to label the colorbar.
Notes
-----
Additional keyword arguments are of two kinds:
axes properties:
%s
colorbar properties:
%s
If *mappable* is a :class:`~matplotlib.contours.ContourSet`, its *extend*
kwarg is included automatically.
The *shrink* kwarg provides a simple way to scale the colorbar with respect
to the axes. Note that if *cax* is specified it determines the size of the
colorbar and *shrink* and *aspect* kwargs are ignored.
For more precise control, you can manually specify the positions of
the axes objects in which the mappable and the colorbar are drawn. In
this case, do not use any of the axes properties kwargs.
It is known that some vector graphics viewer (svg and pdf) renders white gaps
between segments of the colorbar. This is due to bugs in the viewers not
matplotlib. As a workaround the colorbar can be rendered with overlapping
segments::
cbar = colorbar()
cbar.solids.set_edgecolor("face")
draw()
However this has negative consequences in other circumstances. Particularly
with semi transparent images (alpha < 1) and colorbar extensions and is not
enabled by default see (issue #1188).
''' % (make_axes_kw_doc, colormap_kw_doc)
docstring.interpd.update(colorbar_doc=colorbar_doc)
def _set_ticks_on_axis_warn(*args, **kw):
# a top level function which gets put in at the axes'
# set_xticks set_yticks by _patch_ax
warnings.warn("Use the colorbar set_ticks() method instead.")
class ColorbarBase(cm.ScalarMappable):
'''
Draw a colorbar in an existing axes.
This is a base class for the :class:`Colorbar` class, which is the
basis for the :func:`~matplotlib.pyplot.colorbar` function and the
:meth:`~matplotlib.figure.Figure.colorbar` method, which are the
usual ways of creating a colorbar.
It is also useful by itself for showing a colormap. If the *cmap*
kwarg is given but *boundaries* and *values* are left as None,
then the colormap will be displayed on a 0-1 scale. To show the
under- and over-value colors, specify the *norm* as::
colors.Normalize(clip=False)
To show the colors versus index instead of on the 0-1 scale,
use::
norm=colors.NoNorm.
Useful public methods are :meth:`set_label` and :meth:`add_lines`.
Attributes
----------
ax : Axes
The `Axes` instance in which the colorbar is drawn.
lines : list
A list of `LineCollection` if lines were drawn, otherwise
an empty list.
dividers : LineCollection
A LineCollection if *drawedges* is ``True``, otherwise ``None``.
'''
_slice_dict = {'neither': slice(0, None),
'both': slice(1, -1),
'min': slice(1, None),
'max': slice(0, -1)}
n_rasterize = 50 # rasterize solids if number of colors >= n_rasterize
def __init__(self, ax, cmap=None,
norm=None,
alpha=None,
values=None,
boundaries=None,
orientation='vertical',
ticklocation='auto',
extend='neither',
spacing='uniform', # uniform or proportional
ticks=None,
format=None,
drawedges=False,
filled=True,
extendfrac=None,
extendrect=False,
label='',
):
#: The axes that this colorbar lives in.
self.ax = ax
self._patch_ax()
if cmap is None:
cmap = cm.get_cmap()
if norm is None:
norm = colors.Normalize()
self.alpha = alpha
cm.ScalarMappable.__init__(self, cmap=cmap, norm=norm)
self.values = values
self.boundaries = boundaries
self.extend = extend
self._inside = self._slice_dict[extend]
self.spacing = spacing
self.orientation = orientation
self.drawedges = drawedges
self.filled = filled
self.extendfrac = extendfrac
self.extendrect = extendrect
self.solids = None
self.lines = list()
self.outline = None
self.patch = None
self.dividers = None
if ticklocation == 'auto':
ticklocation = 'bottom' if orientation == 'horizontal' else 'right'
self.ticklocation = ticklocation
self.set_label(label)
if cbook.iterable(ticks):
self.locator = ticker.FixedLocator(ticks, nbins=len(ticks))
else:
self.locator = ticks # Handle default in _ticker()
if format is None:
if isinstance(self.norm, colors.LogNorm):
self.formatter = ticker.LogFormatterSciNotation()
elif isinstance(self.norm, colors.SymLogNorm):
self.formatter = ticker.LogFormatterSciNotation(
linthresh=self.norm.linthresh)
else:
self.formatter = ticker.ScalarFormatter()
elif isinstance(format, six.string_types):
self.formatter = ticker.FormatStrFormatter(format)
else:
self.formatter = format # Assume it is a Formatter
# The rest is in a method so we can recalculate when clim changes.
self.config_axis()
self.draw_all()
def _extend_lower(self):
"""Returns whether the lower limit is open ended."""
return self.extend in ('both', 'min')
def _extend_upper(self):
"""Returns whether the uper limit is open ended."""
return self.extend in ('both', 'max')
def _patch_ax(self):
# bind some methods to the axes to warn users
# against using those methods.
self.ax.set_xticks = _set_ticks_on_axis_warn
self.ax.set_yticks = _set_ticks_on_axis_warn
def draw_all(self):
'''
Calculate any free parameters based on the current cmap and norm,
and do all the drawing.
'''
self._process_values()
self._find_range()
X, Y = self._mesh()
C = self._values[:, np.newaxis]
self._config_axes(X, Y)
if self.filled:
self._add_solids(X, Y, C)
def config_axis(self):
ax = self.ax
if self.orientation == 'vertical':
ax.xaxis.set_ticks([])
# location is either one of 'bottom' or 'top'
ax.yaxis.set_label_position(self.ticklocation)
ax.yaxis.set_ticks_position(self.ticklocation)
else:
ax.yaxis.set_ticks([])
# location is either one of 'left' or 'right'
ax.xaxis.set_label_position(self.ticklocation)
ax.xaxis.set_ticks_position(self.ticklocation)
self._set_label()
def update_ticks(self):
"""
Force the update of the ticks and ticklabels. This must be
called whenever the tick locator and/or tick formatter changes.
"""
ax = self.ax
ticks, ticklabels, offset_string = self._ticker()
if self.orientation == 'vertical':
ax.yaxis.set_ticks(ticks)
ax.set_yticklabels(ticklabels)
ax.yaxis.get_major_formatter().set_offset_string(offset_string)
else:
ax.xaxis.set_ticks(ticks)
ax.set_xticklabels(ticklabels)
ax.xaxis.get_major_formatter().set_offset_string(offset_string)
def set_ticks(self, ticks, update_ticks=True):
"""
Set tick locations.
Parameters
----------
ticks : {None, sequence, :class:`~matplotlib.ticker.Locator` instance}
If None, a default Locator will be used.
update_ticks : {True, False}, optional
If True, tick locations are updated immediately. If False,
use :meth:`update_ticks` to manually update the ticks.
"""
if cbook.iterable(ticks):
self.locator = ticker.FixedLocator(ticks, nbins=len(ticks))
else:
self.locator = ticks
if update_ticks:
self.update_ticks()
self.stale = True
def get_ticks(self, minor=False):
"""Return the x ticks as a list of locations"""
return self._tick_data_values
def set_ticklabels(self, ticklabels, update_ticks=True):
"""
set tick labels. Tick labels are updated immediately unless
update_ticks is *False*. To manually update the ticks, call
*update_ticks* method explicitly.
"""
if isinstance(self.locator, ticker.FixedLocator):
self.formatter = ticker.FixedFormatter(ticklabels)
if update_ticks:
self.update_ticks()
else:
warnings.warn("set_ticks() must have been called.")
self.stale = True
def _config_axes(self, X, Y):
'''
Make an axes patch and outline.
'''
ax = self.ax
ax.set_frame_on(False)
ax.set_navigate(False)
xy = self._outline(X, Y)
ax.update_datalim(xy)
ax.set_xlim(*ax.dataLim.intervalx)
ax.set_ylim(*ax.dataLim.intervaly)
if self.outline is not None:
self.outline.remove()
self.outline = mpatches.Polygon(
xy, edgecolor=mpl.rcParams['axes.edgecolor'],
facecolor='none',
linewidth=mpl.rcParams['axes.linewidth'],
closed=True,
zorder=2)
ax.add_artist(self.outline)
self.outline.set_clip_box(None)
self.outline.set_clip_path(None)
c = mpl.rcParams['axes.facecolor']
if self.patch is not None:
self.patch.remove()
self.patch = mpatches.Polygon(xy, edgecolor=c,
facecolor=c,
linewidth=0.01,
zorder=-1)
ax.add_artist(self.patch)
self.update_ticks()
def _set_label(self):
if self.orientation == 'vertical':
self.ax.set_ylabel(self._label, **self._labelkw)
else:
self.ax.set_xlabel(self._label, **self._labelkw)
self.stale = True
def set_label(self, label, **kw):
'''
Label the long axis of the colorbar
'''
self._label = '%s' % (label, )
self._labelkw = kw
self._set_label()
def _outline(self, X, Y):
'''
Return *x*, *y* arrays of colorbar bounding polygon,
taking orientation into account.
'''
N = X.shape[0]
ii = [0, 1, N - 2, N - 1, 2 * N - 1, 2 * N - 2, N + 1, N, 0]
x = np.take(np.ravel(np.transpose(X)), ii)
y = np.take(np.ravel(np.transpose(Y)), ii)
x = x.reshape((len(x), 1))
y = y.reshape((len(y), 1))
if self.orientation == 'horizontal':
return np.hstack((y, x))
return np.hstack((x, y))
def _edges(self, X, Y):
'''
Return the separator line segments; helper for _add_solids.
'''
N = X.shape[0]
# Using the non-array form of these line segments is much
# simpler than making them into arrays.
if self.orientation == 'vertical':
return [list(zip(X[i], Y[i])) for i in xrange(1, N - 1)]
else:
return [list(zip(Y[i], X[i])) for i in xrange(1, N - 1)]
def _add_solids(self, X, Y, C):
'''
Draw the colors using :meth:`~matplotlib.axes.Axes.pcolormesh`;
optionally add separators.
'''
if self.orientation == 'vertical':
args = (X, Y, C)
else:
args = (np.transpose(Y), np.transpose(X), np.transpose(C))
kw = dict(cmap=self.cmap,
norm=self.norm,
alpha=self.alpha,
edgecolors='None')
# Save, set, and restore hold state to keep pcolor from
# clearing the axes. Ordinarily this will not be needed,
# since the axes object should already have hold set.
_hold = self.ax._hold
self.ax._hold = True
col = self.ax.pcolormesh(*args, **kw)
self.ax._hold = _hold
#self.add_observer(col) # We should observe, not be observed...
if self.solids is not None:
self.solids.remove()
self.solids = col
if self.dividers is not None:
self.dividers.remove()
self.dividers = None
if self.drawedges:
linewidths = (0.5 * mpl.rcParams['axes.linewidth'],)
self.dividers = collections.LineCollection(
self._edges(X, Y),
colors=(mpl.rcParams['axes.edgecolor'],),
linewidths=linewidths)
self.ax.add_collection(self.dividers)
elif len(self._y) >= self.n_rasterize:
self.solids.set_rasterized(True)
def add_lines(self, levels, colors, linewidths, erase=True):
'''
Draw lines on the colorbar.
*colors* and *linewidths* must be scalars or
sequences the same length as *levels*.
Set *erase* to False to add lines without first
removing any previously added lines.
'''
y = self._locate(levels)
igood = (y < 1.001) & (y > -0.001)
y = y[igood]
if cbook.iterable(colors):
colors = np.asarray(colors)[igood]
if cbook.iterable(linewidths):
linewidths = np.asarray(linewidths)[igood]
N = len(y)
x = np.array([0.0, 1.0])
X, Y = np.meshgrid(x, y)
if self.orientation == 'vertical':
xy = [list(zip(X[i], Y[i])) for i in xrange(N)]
else:
xy = [list(zip(Y[i], X[i])) for i in xrange(N)]
col = collections.LineCollection(xy, linewidths=linewidths)
if erase and self.lines:
for lc in self.lines:
lc.remove()
self.lines = []
self.lines.append(col)
col.set_color(colors)
self.ax.add_collection(col)
self.stale = True
def _ticker(self):
'''
Return the sequence of ticks (colorbar data locations),
ticklabels (strings), and the corresponding offset string.
'''
locator = self.locator
formatter = self.formatter
if locator is None:
if self.boundaries is None:
if isinstance(self.norm, colors.NoNorm):
nv = len(self._values)
base = 1 + int(nv / 10)
locator = ticker.IndexLocator(base=base, offset=0)
elif isinstance(self.norm, colors.BoundaryNorm):
b = self.norm.boundaries
locator = ticker.FixedLocator(b, nbins=10)
elif isinstance(self.norm, colors.LogNorm):
locator = ticker.LogLocator(subs='all')
elif isinstance(self.norm, colors.SymLogNorm):
# The subs setting here should be replaced
# by logic in the locator.
locator = ticker.SymmetricalLogLocator(
subs=np.arange(1, 10),
linthresh=self.norm.linthresh,
base=10)
else:
if mpl.rcParams['_internal.classic_mode']:
locator = ticker.MaxNLocator()
else:
locator = ticker.AutoLocator()
else:
b = self._boundaries[self._inside]
locator = ticker.FixedLocator(b, nbins=10)
if isinstance(self.norm, colors.NoNorm) and self.boundaries is None:
intv = self._values[0], self._values[-1]
else:
intv = self.vmin, self.vmax
locator.create_dummy_axis(minpos=intv[0])
formatter.create_dummy_axis(minpos=intv[0])
locator.set_view_interval(*intv)
locator.set_data_interval(*intv)
formatter.set_view_interval(*intv)
formatter.set_data_interval(*intv)
b = np.array(locator())
if isinstance(locator, ticker.LogLocator):
eps = 1e-10
b = b[(b <= intv[1] * (1 + eps)) & (b >= intv[0] * (1 - eps))]
else:
eps = (intv[1] - intv[0]) * 1e-10
b = b[(b <= intv[1] + eps) & (b >= intv[0] - eps)]
self._tick_data_values = b
ticks = self._locate(b)
formatter.set_locs(b)
ticklabels = [formatter(t, i) for i, t in enumerate(b)]
offset_string = formatter.get_offset()
return ticks, ticklabels, offset_string
def _process_values(self, b=None):
'''
Set the :attr:`_boundaries` and :attr:`_values` attributes
based on the input boundaries and values. Input boundaries
can be *self.boundaries* or the argument *b*.
'''
if b is None:
b = self.boundaries
if b is not None:
self._boundaries = np.asarray(b, dtype=float)
if self.values is None:
self._values = 0.5 * (self._boundaries[:-1]
+ self._boundaries[1:])
if isinstance(self.norm, colors.NoNorm):
self._values = (self._values + 0.00001).astype(np.int16)
return
self._values = np.array(self.values)
return
if self.values is not None:
self._values = np.array(self.values)
if self.boundaries is None:
b = np.zeros(len(self.values) + 1, 'd')
b[1:-1] = 0.5 * (self._values[:-1] - self._values[1:])
b[0] = 2.0 * b[1] - b[2]
b[-1] = 2.0 * b[-2] - b[-3]
self._boundaries = b
return
self._boundaries = np.array(self.boundaries)
return
# Neither boundaries nor values are specified;
# make reasonable ones based on cmap and norm.
if isinstance(self.norm, colors.NoNorm):
b = self._uniform_y(self.cmap.N + 1) * self.cmap.N - 0.5
v = np.zeros((len(b) - 1,), dtype=np.int16)
v[self._inside] = np.arange(self.cmap.N, dtype=np.int16)
if self._extend_lower():
v[0] = -1
if self._extend_upper():
v[-1] = self.cmap.N
self._boundaries = b
self._values = v
return
elif isinstance(self.norm, colors.BoundaryNorm):
b = list(self.norm.boundaries)
if self._extend_lower():
b = [b[0] - 1] + b
if self._extend_upper():
b = b + [b[-1] + 1]
b = np.array(b)
v = np.zeros((len(b) - 1,), dtype=float)
bi = self.norm.boundaries
v[self._inside] = 0.5 * (bi[:-1] + bi[1:])
if self._extend_lower():
v[0] = b[0] - 1
if self._extend_upper():
v[-1] = b[-1] + 1
self._boundaries = b
self._values = v
return
else:
if not self.norm.scaled():
self.norm.vmin = 0
self.norm.vmax = 1
self.norm.vmin, self.norm.vmax = mtransforms.nonsingular(
self.norm.vmin,
self.norm.vmax,
expander=0.1)
b = self.norm.inverse(self._uniform_y(self.cmap.N + 1))
if isinstance(self.norm, colors.LogNorm):
# If using a lognorm, ensure extensions don't go negative
if self._extend_lower():
b[0] = 0.9 * b[0]
if self._extend_upper():
b[-1] = 1.1 * b[-1]
else:
if self._extend_lower():
b[0] = b[0] - 1
if self._extend_upper():
b[-1] = b[-1] + 1
self._process_values(b)
def _find_range(self):
'''
Set :attr:`vmin` and :attr:`vmax` attributes to the first and
last boundary excluding extended end boundaries.
'''
b = self._boundaries[self._inside]
self.vmin = b[0]
self.vmax = b[-1]
def _central_N(self):
'''number of boundaries **before** extension of ends'''
nb = len(self._boundaries)
if self.extend == 'both':
nb -= 2
elif self.extend in ('min', 'max'):
nb -= 1
return nb
def _extended_N(self):
'''
Based on the colormap and extend variable, return the
number of boundaries.
'''
N = self.cmap.N + 1
if self.extend == 'both':
N += 2
elif self.extend in ('min', 'max'):
N += 1
return N
def _get_extension_lengths(self, frac, automin, automax, default=0.05):
'''
Get the lengths of colorbar extensions.
A helper method for _uniform_y and _proportional_y.
'''
# Set the default value.
extendlength = np.array([default, default])
if isinstance(frac, six.string_types):
if frac.lower() == 'auto':
# Use the provided values when 'auto' is required.
extendlength[0] = automin
extendlength[1] = automax
else:
# Any other string is invalid.
raise ValueError('invalid value for extendfrac')
elif frac is not None:
try:
# Try to set min and max extension fractions directly.
extendlength[:] = frac
# If frac is a sequence containing None then NaN may
# be encountered. This is an error.
if np.isnan(extendlength).any():
raise ValueError()
except (TypeError, ValueError):
# Raise an error on encountering an invalid value for frac.
raise ValueError('invalid value for extendfrac')
return extendlength
def _uniform_y(self, N):
'''
Return colorbar data coordinates for *N* uniformly
spaced boundaries, plus ends if required.
'''
if self.extend == 'neither':
y = np.linspace(0, 1, N)
else:
automin = automax = 1. / (N - 1.)
extendlength = self._get_extension_lengths(self.extendfrac,
automin, automax,
default=0.05)
if self.extend == 'both':
y = np.zeros(N + 2, 'd')
y[0] = 0. - extendlength[0]
y[-1] = 1. + extendlength[1]
elif self.extend == 'min':
y = np.zeros(N + 1, 'd')
y[0] = 0. - extendlength[0]
else:
y = np.zeros(N + 1, 'd')
y[-1] = 1. + extendlength[1]
y[self._inside] = np.linspace(0, 1, N)
return y
def _proportional_y(self):
'''
Return colorbar data coordinates for the boundaries of
a proportional colorbar.
'''
if isinstance(self.norm, colors.BoundaryNorm):
y = (self._boundaries - self._boundaries[0])
y = y / (self._boundaries[-1] - self._boundaries[0])
else:
y = self.norm(self._boundaries.copy())
y = np.ma.filled(y, np.nan)
if self.extend == 'min':
# Exclude leftmost interval of y.
clen = y[-1] - y[1]
automin = (y[2] - y[1]) / clen
automax = (y[-1] - y[-2]) / clen
elif self.extend == 'max':
# Exclude rightmost interval in y.
clen = y[-2] - y[0]
automin = (y[1] - y[0]) / clen
automax = (y[-2] - y[-3]) / clen
elif self.extend == 'both':
# Exclude leftmost and rightmost intervals in y.
clen = y[-2] - y[1]
automin = (y[2] - y[1]) / clen
automax = (y[-2] - y[-3]) / clen
if self.extend in ('both', 'min', 'max'):
extendlength = self._get_extension_lengths(self.extendfrac,
automin, automax,
default=0.05)
if self.extend in ('both', 'min'):
y[0] = 0. - extendlength[0]
if self.extend in ('both', 'max'):
y[-1] = 1. + extendlength[1]
yi = y[self._inside]
norm = colors.Normalize(yi[0], yi[-1])
y[self._inside] = np.ma.filled(norm(yi), np.nan)
return y
def _mesh(self):
'''
Return X,Y, the coordinate arrays for the colorbar pcolormesh.
These are suitable for a vertical colorbar; swapping and
transposition for a horizontal colorbar are done outside
this function.
'''
x = np.array([0.0, 1.0])
if self.spacing == 'uniform':
y = self._uniform_y(self._central_N())
else:
y = self._proportional_y()
self._y = y
X, Y = np.meshgrid(x, y)
if self._extend_lower() and not self.extendrect:
X[0, :] = 0.5
if self._extend_upper() and not self.extendrect:
X[-1, :] = 0.5
return X, Y
def _locate(self, x):
'''
Given a set of color data values, return their
corresponding colorbar data coordinates.
'''
if isinstance(self.norm, (colors.NoNorm, colors.BoundaryNorm)):
b = self._boundaries
xn = x
else:
# Do calculations using normalized coordinates so
# as to make the interpolation more accurate.
b = self.norm(self._boundaries, clip=False).filled()
xn = self.norm(x, clip=False).filled()
# The rest is linear interpolation with extrapolation at ends.
ii = np.searchsorted(b, xn)
i0 = ii - 1
itop = (ii == len(b))
ibot = (ii == 0)
i0[itop] -= 1
ii[itop] -= 1
i0[ibot] += 1
ii[ibot] += 1
db = np.take(b, ii) - np.take(b, i0)
y = self._y
dy = np.take(y, ii) - np.take(y, i0)
z = np.take(y, i0) + (xn - np.take(b, i0)) * dy / db
return z
def set_alpha(self, alpha):
self.alpha = alpha
def remove(self):
"""
Remove this colorbar from the figure
"""
fig = self.ax.figure
fig.delaxes(self.ax)
class Colorbar(ColorbarBase):
"""
This class connects a :class:`ColorbarBase` to a
:class:`~matplotlib.cm.ScalarMappable` such as a
:class:`~matplotlib.image.AxesImage` generated via
:meth:`~matplotlib.axes.Axes.imshow`.
It is not intended to be instantiated directly; instead,
use :meth:`~matplotlib.figure.Figure.colorbar` or
:func:`~matplotlib.pyplot.colorbar` to make your colorbar.
"""
def __init__(self, ax, mappable, **kw):
# Ensure the given mappable's norm has appropriate vmin and vmax set
# even if mappable.draw has not yet been called.
mappable.autoscale_None()
self.mappable = mappable
kw['cmap'] = cmap = mappable.cmap
kw['norm'] = norm = mappable.norm
if isinstance(mappable, contour.ContourSet):
CS = mappable
kw['alpha'] = mappable.get_alpha()
kw['boundaries'] = CS._levels
kw['values'] = CS.cvalues
kw['extend'] = CS.extend
#kw['ticks'] = CS._levels
kw.setdefault('ticks', ticker.FixedLocator(CS.levels, nbins=10))
kw['filled'] = CS.filled
ColorbarBase.__init__(self, ax, **kw)
if not CS.filled:
self.add_lines(CS)
else:
if getattr(cmap, 'colorbar_extend', False) is not False:
kw.setdefault('extend', cmap.colorbar_extend)
if isinstance(mappable, martist.Artist):
kw['alpha'] = mappable.get_alpha()
ColorbarBase.__init__(self, ax, **kw)
def on_mappable_changed(self, mappable):
"""
Updates this colorbar to match the mappable's properties.
Typically this is automatically registered as an event handler
by :func:`colorbar_factory` and should not be called manually.
"""
self.set_cmap(mappable.get_cmap())
self.set_clim(mappable.get_clim())
self.update_normal(mappable)
def add_lines(self, CS, erase=True):
'''
Add the lines from a non-filled
:class:`~matplotlib.contour.ContourSet` to the colorbar.
Set *erase* to False if these lines should be added to
any pre-existing lines.
'''
if not isinstance(CS, contour.ContourSet) or CS.filled:
raise ValueError('add_lines is only for a ContourSet of lines')
tcolors = [c[0] for c in CS.tcolors]
tlinewidths = [t[0] for t in CS.tlinewidths]
# The following was an attempt to get the colorbar lines
# to follow subsequent changes in the contour lines,
# but more work is needed: specifically, a careful
# look at event sequences, and at how
# to make one object track another automatically.
#tcolors = [col.get_colors()[0] for col in CS.collections]
#tlinewidths = [col.get_linewidth()[0] for lw in CS.collections]
ColorbarBase.add_lines(self, CS.levels, tcolors, tlinewidths,
erase=erase)
def update_normal(self, mappable):
'''
update solid, lines, etc. Unlike update_bruteforce, it does
not clear the axes. This is meant to be called when the image
or contour plot to which this colorbar belongs is changed.
'''
self.draw_all()
if isinstance(self.mappable, contour.ContourSet):
CS = self.mappable
if not CS.filled:
self.add_lines(CS)
self.stale = True
def update_bruteforce(self, mappable):
'''
Destroy and rebuild the colorbar. This is
intended to become obsolete, and will probably be
deprecated and then removed. It is not called when
the pyplot.colorbar function or the Figure.colorbar
method are used to create the colorbar.
'''
# We are using an ugly brute-force method: clearing and
# redrawing the whole thing. The problem is that if any
# properties have been changed by methods other than the
# colorbar methods, those changes will be lost.
self.ax.cla()
# clearing the axes will delete outline, patch, solids, and lines:
self.outline = None
self.patch = None
self.solids = None
self.lines = list()
self.dividers = None
self.set_alpha(mappable.get_alpha())
self.cmap = mappable.cmap
self.norm = mappable.norm
self.config_axis()
self.draw_all()
if isinstance(self.mappable, contour.ContourSet):
CS = self.mappable
if not CS.filled:
self.add_lines(CS)
#if self.lines is not None:
# tcolors = [c[0] for c in CS.tcolors]
# self.lines.set_color(tcolors)
#Fixme? Recalculate boundaries, ticks if vmin, vmax have changed.
#Fixme: Some refactoring may be needed; we should not
# be recalculating everything if there was a simple alpha
# change.
def remove(self):
"""
Remove this colorbar from the figure. If the colorbar was created with
``use_gridspec=True`` then restore the gridspec to its previous value.
"""
ColorbarBase.remove(self)
self.mappable.callbacksSM.disconnect(self.mappable.colorbar_cid)
self.mappable.colorbar = None
self.mappable.colorbar_cid = None
try:
ax = self.mappable.axes
except AttributeError:
return
try:
gs = ax.get_subplotspec().get_gridspec()
subplotspec = gs.get_topmost_subplotspec()
except AttributeError:
# use_gridspec was False
pos = ax.get_position(original=True)
ax._set_position(pos)
else:
# use_gridspec was True
ax.set_subplotspec(subplotspec)
@docstring.Substitution(make_axes_kw_doc)
def make_axes(parents, location=None, orientation=None, fraction=0.15,
shrink=1.0, aspect=20, **kw):
'''
Resize and reposition parent axes, and return a child
axes suitable for a colorbar.
Keyword arguments may include the following (with defaults):
location : [None|'left'|'right'|'top'|'bottom']
The position, relative to **parents**, where the colorbar axes
should be created. If None, the value will either come from the
given ``orientation``, else it will default to 'right'.
orientation : [None|'vertical'|'horizontal']
The orientation of the colorbar. Typically, this keyword shouldn't
be used, as it can be derived from the ``location`` keyword.
%s
Returns (cax, kw), the child axes and the reduced kw dictionary to be
passed when creating the colorbar instance.
'''
locations = ["left", "right", "top", "bottom"]
if orientation is not None and location is not None:
raise TypeError('position and orientation are mutually exclusive. '
'Consider setting the position to any of {}'
.format(', '.join(locations)))
# provide a default location
if location is None and orientation is None:
location = 'right'
# allow the user to not specify the location by specifying the
# orientation instead
if location is None:
location = 'right' if orientation == 'vertical' else 'bottom'
if location not in locations:
raise ValueError('Invalid colorbar location. Must be one '
'of %s' % ', '.join(locations))
default_location_settings = {'left': {'anchor': (1.0, 0.5),
'panchor': (0.0, 0.5),
'pad': 0.10,
'orientation': 'vertical'},
'right': {'anchor': (0.0, 0.5),
'panchor': (1.0, 0.5),
'pad': 0.05,
'orientation': 'vertical'},
'top': {'anchor': (0.5, 0.0),
'panchor': (0.5, 1.0),
'pad': 0.05,
'orientation': 'horizontal'},
'bottom': {'anchor': (0.5, 1.0),
'panchor': (0.5, 0.0),
'pad': 0.15, # backwards compat
'orientation': 'horizontal'},
}
loc_settings = default_location_settings[location]
# put appropriate values into the kw dict for passing back to
# the Colorbar class
kw['orientation'] = loc_settings['orientation']
kw['ticklocation'] = location
anchor = kw.pop('anchor', loc_settings['anchor'])
parent_anchor = kw.pop('panchor', loc_settings['panchor'])
parents_iterable = cbook.iterable(parents)
# turn parents into a list if it is not already. We do this w/ np
# because `plt.subplots` can return an ndarray and is natural to
# pass to `colorbar`.
parents = np.atleast_1d(parents).ravel()
# check if using constrained_layout:
try:
gs = parents[0].get_subplotspec().get_gridspec()
using_constrained_layout = (gs._layoutbox is not None)
except AttributeError:
using_constrained_layout = False
# defaults are not appropriate for constrained_layout:
pad0 = loc_settings['pad']
if using_constrained_layout:
pad0 = 0.02
pad = kw.pop('pad', pad0)
fig = parents[0].get_figure()
if not all(fig is ax.get_figure() for ax in parents):
raise ValueError('Unable to create a colorbar axes as not all '
'parents share the same figure.')
# take a bounding box around all of the given axes
parents_bbox = mtransforms.Bbox.union(
[ax.get_position(original=True).frozen() for ax in parents])
pb = parents_bbox
if location in ('left', 'right'):
if location == 'left':
pbcb, _, pb1 = pb.splitx(fraction, fraction + pad)
else:
pb1, _, pbcb = pb.splitx(1 - fraction - pad, 1 - fraction)
pbcb = pbcb.shrunk(1.0, shrink).anchored(anchor, pbcb)
else:
if location == 'bottom':
pbcb, _, pb1 = pb.splity(fraction, fraction + pad)
else:
pb1, _, pbcb = pb.splity(1 - fraction - pad, 1 - fraction)
pbcb = pbcb.shrunk(shrink, 1.0).anchored(anchor, pbcb)
# define the aspect ratio in terms of y's per x rather than x's per y
aspect = 1.0 / aspect
# define a transform which takes us from old axes coordinates to
# new axes coordinates
shrinking_trans = mtransforms.BboxTransform(parents_bbox, pb1)
# transform each of the axes in parents using the new transform
for ax in parents:
new_posn = shrinking_trans.transform(ax.get_position())
new_posn = mtransforms.Bbox(new_posn)
ax._set_position(new_posn)
if parent_anchor is not False:
ax.set_anchor(parent_anchor)
cax = fig.add_axes(pbcb)
# OK, now make a layoutbox for the cb axis. Later, we will use this
# to make the colorbar fit nicely.
if not using_constrained_layout:
# no layout boxes:
lb = None
lbpos = None
# and we need to set the aspect ratio by hand...
cax.set_aspect(aspect, anchor=anchor, adjustable='box')
else:
if not parents_iterable:
# this is a single axis...
ax = parents[0]
lb, lbpos = constrained_layout.layoutcolorbarsingle(
ax, cax, shrink, aspect, location, pad=pad)
else: # there is more than one parent, so lets use gridspec
# the colorbar will be a sibling of this gridspec, so the
# parent is the same parent as the gridspec. Either the figure,
# or a subplotspec.
lb, lbpos = constrained_layout.layoutcolorbargridspec(
parents, cax, shrink, aspect, location, pad)
cax._layoutbox = lb
cax._poslayoutbox = lbpos
return cax, kw
@docstring.Substitution(make_axes_kw_doc)
def make_axes_gridspec(parent, **kw):
'''
Resize and reposition a parent axes, and return a child axes
suitable for a colorbar. This function is similar to
make_axes. Prmary differences are
* *make_axes_gridspec* only handles the *orientation* keyword
and cannot handle the "location" keyword.
* *make_axes_gridspec* should only be used with a subplot parent.
* *make_axes* creates an instance of Axes. *make_axes_gridspec*
creates an instance of Subplot.
* *make_axes* updates the position of the
parent. *make_axes_gridspec* replaces the grid_spec attribute
of the parent with a new one.
While this function is meant to be compatible with *make_axes*,
there could be some minor differences.
Keyword arguments may include the following (with defaults):
*orientation*
'vertical' or 'horizontal'
%s
All but the first of these are stripped from the input kw set.
Returns (cax, kw), the child axes and the reduced kw dictionary to be
passed when creating the colorbar instance.
'''
orientation = kw.setdefault('orientation', 'vertical')
kw['ticklocation'] = 'auto'
fraction = kw.pop('fraction', 0.15)
shrink = kw.pop('shrink', 1.0)
aspect = kw.pop('aspect', 20)
x1 = 1 - fraction
# for shrinking
pad_s = (1 - shrink) * 0.5
wh_ratios = [pad_s, shrink, pad_s]
# we need to none the tree of layoutboxes because
# constrained_layout can't remove and replace the tree
# hierarchy w/o a seg fault.
gs = parent.get_subplotspec().get_gridspec()
layoutbox.nonetree(gs._layoutbox)
gs_from_subplotspec = gridspec.GridSpecFromSubplotSpec
if orientation == 'vertical':
pad = kw.pop('pad', 0.05)
wh_space = 2 * pad / (1 - pad)
gs = gs_from_subplotspec(1, 2,
subplot_spec=parent.get_subplotspec(),
wspace=wh_space,
width_ratios=[x1 - pad, fraction])
gs2 = gs_from_subplotspec(3, 1,
subplot_spec=gs[1],
hspace=0.,
height_ratios=wh_ratios)
anchor = (0.0, 0.5)
panchor = (1.0, 0.5)
else:
pad = kw.pop('pad', 0.15)
wh_space = 2 * pad / (1 - pad)
gs = gs_from_subplotspec(2, 1,
subplot_spec=parent.get_subplotspec(),
hspace=wh_space,
height_ratios=[x1 - pad, fraction])
gs2 = gs_from_subplotspec(1, 3,
subplot_spec=gs[1],
wspace=0.,
width_ratios=wh_ratios)
aspect = 1 / aspect
anchor = (0.5, 1.0)
panchor = (0.5, 0.0)
parent.set_subplotspec(gs[0])
parent.update_params()
parent._set_position(parent.figbox)
parent.set_anchor(panchor)
fig = parent.get_figure()
cax = fig.add_subplot(gs2[1])
cax.set_aspect(aspect, anchor=anchor, adjustable='box')
return cax, kw
class ColorbarPatch(Colorbar):
"""
A Colorbar which is created using :class:`~matplotlib.patches.Patch`
rather than the default :func:`~matplotlib.axes.pcolor`.
It uses a list of Patch instances instead of a
:class:`~matplotlib.collections.PatchCollection` because the
latter does not allow the hatch pattern to vary among the
members of the collection.
"""
def __init__(self, ax, mappable, **kw):
# we do not want to override the behaviour of solids
# so add a new attribute which will be a list of the
# colored patches in the colorbar
self.solids_patches = []
Colorbar.__init__(self, ax, mappable, **kw)
def _add_solids(self, X, Y, C):
"""
Draw the colors using :class:`~matplotlib.patches.Patch`;
optionally add separators.
"""
# Save, set, and restore hold state to keep pcolor from
# clearing the axes. Ordinarily this will not be needed,
# since the axes object should already have hold set.
_hold = self.ax._hold
self.ax._hold = True
kw = {'alpha': self.alpha, }
n_segments = len(C)
# ensure there are sufficient hatches
hatches = self.mappable.hatches * n_segments
patches = []
for i in xrange(len(X) - 1):
val = C[i][0]
hatch = hatches[i]
xy = np.array([[X[i][0], Y[i][0]],
[X[i][1], Y[i][0]],
[X[i + 1][1], Y[i + 1][0]],
[X[i + 1][0], Y[i + 1][1]]])
if self.orientation == 'horizontal':
# if horizontal swap the xs and ys
xy = xy[..., ::-1]
patch = mpatches.PathPatch(mpath.Path(xy),
facecolor=self.cmap(self.norm(val)),
hatch=hatch, linewidth=0,
antialiased=False, **kw)
self.ax.add_patch(patch)
patches.append(patch)
if self.solids_patches:
for solid in self.solids_patches:
solid.remove()
self.solids_patches = patches
if self.dividers is not None:
self.dividers.remove()
self.dividers = None
if self.drawedges:
self.dividers = collections.LineCollection(
self._edges(X, Y),
colors=(mpl.rcParams['axes.edgecolor'],),
linewidths=(0.5 * mpl.rcParams['axes.linewidth'],))
self.ax.add_collection(self.dividers)
self.ax._hold = _hold
def colorbar_factory(cax, mappable, **kwargs):
"""
Creates a colorbar on the given axes for the given mappable.
Typically, for automatic colorbar placement given only a mappable use
:meth:`~matplotlib.figure.Figure.colorbar`.
"""
# if the given mappable is a contourset with any hatching, use
# ColorbarPatch else use Colorbar
if (isinstance(mappable, contour.ContourSet)
and any([hatch is not None for hatch in mappable.hatches])):
cb = ColorbarPatch(cax, mappable, **kwargs)
else:
cb = Colorbar(cax, mappable, **kwargs)
cid = mappable.callbacksSM.connect('changed', cb.on_mappable_changed)
mappable.colorbar = cb
mappable.colorbar_cid = cid
return cb
| 52,794 | 36.549787 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/texmanager.py
|
r"""
This module supports embedded TeX expressions in matplotlib via dvipng
and dvips for the raster and postscript backends. The tex and
dvipng/dvips information is cached in ~/.matplotlib/tex.cache for reuse between
sessions
Requirements:
* latex
* \*Agg backends: dvipng>=1.6
* PS backend: psfrag, dvips, and Ghostscript>=8.60
Backends:
* \*Agg
* PS
* PDF
For raster output, you can get RGBA numpy arrays from TeX expressions
as follows::
texmanager = TexManager()
s = ('\TeX\ is Number '
'$\displaystyle\sum_{n=1}^\infty\frac{-e^{i\pi}}{2^n}$!')
Z = texmanager.get_rgba(s, fontsize=12, dpi=80, rgb=(1,0,0))
To enable tex rendering of all text in your matplotlib figure, set
text.usetex in your matplotlibrc file or include these two lines in
your script::
from matplotlib import rc
rc('text', usetex=True)
"""
from __future__ import absolute_import, division, print_function
import six
import copy
import glob
import os
import shutil
import sys
import warnings
import logging
from hashlib import md5
import distutils.version
import numpy as np
import matplotlib as mpl
from matplotlib import rcParams
from matplotlib._png import read_png
from matplotlib.cbook import mkdirs, Locked
from matplotlib.compat.subprocess import subprocess, Popen, PIPE, STDOUT
import matplotlib.dviread as dviread
import re
_log = logging.getLogger(__name__)
@mpl.cbook.deprecated("2.1")
def dvipng_hack_alpha():
try:
p = Popen([str('dvipng'), '-version'], stdin=PIPE, stdout=PIPE,
stderr=STDOUT, close_fds=(sys.platform != 'win32'))
stdout, stderr = p.communicate()
except OSError:
_log.info('No dvipng was found')
return False
lines = stdout.decode(sys.getdefaultencoding()).split('\n')
for line in lines:
if line.startswith('dvipng '):
version = line.split()[-1]
_log.info('Found dvipng version %s', version)
version = distutils.version.LooseVersion(version)
return version < distutils.version.LooseVersion('1.6')
_log.info('Unexpected response from dvipng -version')
return False
class TexManager(object):
"""
Convert strings to dvi files using TeX, caching the results to a directory.
"""
cachedir = mpl.get_cachedir()
if cachedir is not None:
texcache = os.path.join(cachedir, 'tex.cache')
mkdirs(texcache)
else:
# Should only happen in a restricted environment (such as Google App
# Engine). Deal with this gracefully by not creating a cache directory.
texcache = None
# Caches.
rgba_arrayd = {}
grey_arrayd = {}
postscriptd = property(mpl.cbook.deprecated("2.2")(lambda self: {}))
pscnt = property(mpl.cbook.deprecated("2.2")(lambda self: 0))
serif = ('cmr', '')
sans_serif = ('cmss', '')
monospace = ('cmtt', '')
cursive = ('pzc', r'\usepackage{chancery}')
font_family = 'serif'
font_families = ('serif', 'sans-serif', 'cursive', 'monospace')
font_info = {
'new century schoolbook': ('pnc', r'\renewcommand{\rmdefault}{pnc}'),
'bookman': ('pbk', r'\renewcommand{\rmdefault}{pbk}'),
'times': ('ptm', r'\usepackage{mathptmx}'),
'palatino': ('ppl', r'\usepackage{mathpazo}'),
'zapf chancery': ('pzc', r'\usepackage{chancery}'),
'cursive': ('pzc', r'\usepackage{chancery}'),
'charter': ('pch', r'\usepackage{charter}'),
'serif': ('cmr', ''),
'sans-serif': ('cmss', ''),
'helvetica': ('phv', r'\usepackage{helvet}'),
'avant garde': ('pag', r'\usepackage{avant}'),
'courier': ('pcr', r'\usepackage{courier}'),
'monospace': ('cmtt', ''),
'computer modern roman': ('cmr', ''),
'computer modern sans serif': ('cmss', ''),
'computer modern typewriter': ('cmtt', '')}
_rc_cache = None
_rc_cache_keys = (('text.latex.preamble', ) +
tuple(['font.' + n for n in ('family', ) +
font_families]))
def __init__(self):
if self.texcache is None:
raise RuntimeError('Cannot create TexManager, as there is no '
'cache directory available')
mkdirs(self.texcache)
ff = rcParams['font.family']
if len(ff) == 1 and ff[0].lower() in self.font_families:
self.font_family = ff[0].lower()
elif (isinstance(ff, six.string_types)
and ff.lower() in self.font_families):
self.font_family = ff.lower()
else:
_log.info('font.family must be one of (%s) when text.usetex is '
'True. serif will be used by default.',
', '.join(self.font_families))
self.font_family = 'serif'
fontconfig = [self.font_family]
for font_family in self.font_families:
font_family_attr = font_family.replace('-', '_')
for font in rcParams['font.' + font_family]:
if font.lower() in self.font_info:
setattr(self, font_family_attr,
self.font_info[font.lower()])
_log.debug('family: %s, font: %s, info: %s',
font_family, font, self.font_info[font.lower()])
break
else:
_log.debug('%s font is not compatible with usetex.',
font_family)
else:
_log.info('No LaTeX-compatible font found for the %s font '
'family in rcParams. Using default.', font_family)
setattr(self, font_family_attr, self.font_info[font_family])
fontconfig.append(getattr(self, font_family_attr)[0])
# Add a hash of the latex preamble to self._fontconfig so that the
# correct png is selected for strings rendered with same font and dpi
# even if the latex preamble changes within the session
preamble_bytes = self.get_custom_preamble().encode('utf-8')
fontconfig.append(md5(preamble_bytes).hexdigest())
self._fontconfig = ''.join(fontconfig)
# The following packages and commands need to be included in the latex
# file's preamble:
cmd = [self.serif[1], self.sans_serif[1], self.monospace[1]]
if self.font_family == 'cursive':
cmd.append(self.cursive[1])
self._font_preamble = '\n'.join(
[r'\usepackage{type1cm}'] + cmd + [r'\usepackage{textcomp}'])
def get_basefile(self, tex, fontsize, dpi=None):
"""
Return a filename based on a hash of the string, fontsize, and dpi.
"""
s = ''.join([tex, self.get_font_config(), '%f' % fontsize,
self.get_custom_preamble(), str(dpi or '')])
return os.path.join(self.texcache, md5(s.encode('utf-8')).hexdigest())
def get_font_config(self):
"""Reinitializes self if relevant rcParams on have changed."""
if self._rc_cache is None:
self._rc_cache = dict.fromkeys(self._rc_cache_keys)
changed = [par for par in self._rc_cache_keys
if rcParams[par] != self._rc_cache[par]]
if changed:
_log.debug('following keys changed: %s', changed)
for k in changed:
_log.debug('%-20s: %-10s -> %-10s',
k, self._rc_cache[k], rcParams[k])
# deepcopy may not be necessary, but feels more future-proof
self._rc_cache[k] = copy.deepcopy(rcParams[k])
_log.debug('RE-INIT\nold fontconfig: %s', self._fontconfig)
self.__init__()
_log.debug('fontconfig: %s', self._fontconfig)
return self._fontconfig
def get_font_preamble(self):
"""
Return a string containing font configuration for the tex preamble.
"""
return self._font_preamble
def get_custom_preamble(self):
"""Return a string containing user additions to the tex preamble."""
return '\n'.join(rcParams['text.latex.preamble'])
def make_tex(self, tex, fontsize):
"""
Generate a tex file to render the tex string at a specific font size.
Return the file name.
"""
basefile = self.get_basefile(tex, fontsize)
texfile = '%s.tex' % basefile
custom_preamble = self.get_custom_preamble()
fontcmd = {'sans-serif': r'{\sffamily %s}',
'monospace': r'{\ttfamily %s}'}.get(self.font_family,
r'{\rmfamily %s}')
tex = fontcmd % tex
if rcParams['text.latex.unicode']:
unicode_preamble = r"""
\usepackage{ucs}
\usepackage[utf8x]{inputenc}"""
else:
unicode_preamble = ''
s = r"""
\documentclass{article}
%s
%s
%s
\usepackage[papersize={72in,72in},body={70in,70in},margin={1in,1in}]{geometry}
\pagestyle{empty}
\begin{document}
\fontsize{%f}{%f}%s
\end{document}
""" % (self._font_preamble, unicode_preamble, custom_preamble,
fontsize, fontsize * 1.25, tex)
with open(texfile, 'wb') as fh:
if rcParams['text.latex.unicode']:
fh.write(s.encode('utf8'))
else:
try:
fh.write(s.encode('ascii'))
except UnicodeEncodeError as err:
_log.info("You are using unicode and latex, but have not "
"enabled the 'text.latex.unicode' rcParam.")
raise
return texfile
_re_vbox = re.compile(
r"MatplotlibBox:\(([\d.]+)pt\+([\d.]+)pt\)x([\d.]+)pt")
def make_tex_preview(self, tex, fontsize):
"""
Generate a tex file to render the tex string at a specific font size.
It uses the preview.sty to determine the dimension (width, height,
descent) of the output.
Return the file name.
"""
basefile = self.get_basefile(tex, fontsize)
texfile = '%s.tex' % basefile
custom_preamble = self.get_custom_preamble()
fontcmd = {'sans-serif': r'{\sffamily %s}',
'monospace': r'{\ttfamily %s}'}.get(self.font_family,
r'{\rmfamily %s}')
tex = fontcmd % tex
if rcParams['text.latex.unicode']:
unicode_preamble = r"""
\usepackage{ucs}
\usepackage[utf8x]{inputenc}"""
else:
unicode_preamble = ''
# newbox, setbox, immediate, etc. are used to find the box
# extent of the rendered text.
s = r"""
\documentclass{article}
%s
%s
%s
\usepackage[active,showbox,tightpage]{preview}
\usepackage[papersize={72in,72in},body={70in,70in},margin={1in,1in}]{geometry}
%% we override the default showbox as it is treated as an error and makes
%% the exit status not zero
\def\showbox#1%%
{\immediate\write16{MatplotlibBox:(\the\ht#1+\the\dp#1)x\the\wd#1}}
\begin{document}
\begin{preview}
{\fontsize{%f}{%f}%s}
\end{preview}
\end{document}
""" % (self._font_preamble, unicode_preamble, custom_preamble,
fontsize, fontsize * 1.25, tex)
with open(texfile, 'wb') as fh:
if rcParams['text.latex.unicode']:
fh.write(s.encode('utf8'))
else:
try:
fh.write(s.encode('ascii'))
except UnicodeEncodeError as err:
_log.info("You are using unicode and latex, but have not "
"enabled the 'text.latex.unicode' rcParam.")
raise
return texfile
def _run_checked_subprocess(self, command, tex):
_log.debug(command)
try:
report = subprocess.check_output(command,
cwd=self.texcache,
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as exc:
raise RuntimeError(
'{prog} was not able to process the following string:\n'
'{tex!r}\n\n'
'Here is the full report generated by {prog}:\n'
'{exc}\n\n'.format(
prog=command[0],
tex=tex.encode('unicode_escape'),
exc=exc.output.decode('utf-8')))
_log.debug(report)
return report
def make_dvi(self, tex, fontsize):
"""
Generate a dvi file containing latex's layout of tex string.
Return the file name.
"""
if rcParams['text.latex.preview']:
return self.make_dvi_preview(tex, fontsize)
basefile = self.get_basefile(tex, fontsize)
dvifile = '%s.dvi' % basefile
if not os.path.exists(dvifile):
texfile = self.make_tex(tex, fontsize)
with Locked(self.texcache):
self._run_checked_subprocess(
["latex", "-interaction=nonstopmode", "--halt-on-error",
texfile], tex)
for fname in glob.glob(basefile + '*'):
if not fname.endswith(('dvi', 'tex')):
try:
os.remove(fname)
except OSError:
pass
return dvifile
def make_dvi_preview(self, tex, fontsize):
"""
Generate a dvi file containing latex's layout of tex string.
It calls make_tex_preview() method and store the size information
(width, height, descent) in a separate file.
Return the file name.
"""
basefile = self.get_basefile(tex, fontsize)
dvifile = '%s.dvi' % basefile
baselinefile = '%s.baseline' % basefile
if not os.path.exists(dvifile) or not os.path.exists(baselinefile):
texfile = self.make_tex_preview(tex, fontsize)
report = self._run_checked_subprocess(
["latex", "-interaction=nonstopmode", "--halt-on-error",
texfile], tex)
# find the box extent information in the latex output
# file and store them in ".baseline" file
m = TexManager._re_vbox.search(report.decode("utf-8"))
with open(basefile + '.baseline', "w") as fh:
fh.write(" ".join(m.groups()))
for fname in glob.glob(basefile + '*'):
if not fname.endswith(('dvi', 'tex', 'baseline')):
try:
os.remove(fname)
except OSError:
pass
return dvifile
def make_png(self, tex, fontsize, dpi):
"""
Generate a png file containing latex's rendering of tex string.
Return the file name.
"""
basefile = self.get_basefile(tex, fontsize, dpi)
pngfile = '%s.png' % basefile
# see get_rgba for a discussion of the background
if not os.path.exists(pngfile):
dvifile = self.make_dvi(tex, fontsize)
self._run_checked_subprocess(
["dvipng", "-bg", "Transparent", "-D", str(dpi),
"-T", "tight", "-o", pngfile, dvifile], tex)
return pngfile
@mpl.cbook.deprecated("2.2")
def make_ps(self, tex, fontsize):
"""
Generate a postscript file containing latex's rendering of tex string.
Return the file name.
"""
basefile = self.get_basefile(tex, fontsize)
psfile = '%s.epsf' % basefile
if not os.path.exists(psfile):
dvifile = self.make_dvi(tex, fontsize)
self._run_checked_subprocess(
["dvips", "-q", "-E", "-o", psfile, dvifile], tex)
return psfile
@mpl.cbook.deprecated("2.2")
def get_ps_bbox(self, tex, fontsize):
"""
Return a list of PS bboxes for latex's rendering of the tex string.
"""
psfile = self.make_ps(tex, fontsize)
with open(psfile) as ps:
for line in ps:
if line.startswith('%%BoundingBox:'):
return [int(val) for val in line.split()[1:]]
raise RuntimeError('Could not parse %s' % psfile)
def get_grey(self, tex, fontsize=None, dpi=None):
"""Return the alpha channel."""
key = tex, self.get_font_config(), fontsize, dpi
alpha = self.grey_arrayd.get(key)
if alpha is None:
pngfile = self.make_png(tex, fontsize, dpi)
X = read_png(os.path.join(self.texcache, pngfile))
self.grey_arrayd[key] = alpha = X[:, :, -1]
return alpha
def get_rgba(self, tex, fontsize=None, dpi=None, rgb=(0, 0, 0)):
"""Return latex's rendering of the tex string as an rgba array."""
if not fontsize:
fontsize = rcParams['font.size']
if not dpi:
dpi = rcParams['savefig.dpi']
r, g, b = rgb
key = tex, self.get_font_config(), fontsize, dpi, tuple(rgb)
Z = self.rgba_arrayd.get(key)
if Z is None:
alpha = self.get_grey(tex, fontsize, dpi)
Z = np.dstack([r, g, b, alpha])
self.rgba_arrayd[key] = Z
return Z
def get_text_width_height_descent(self, tex, fontsize, renderer=None):
"""Return width, height and descent of the text."""
if tex.strip() == '':
return 0, 0, 0
dpi_fraction = renderer.points_to_pixels(1.) if renderer else 1
if rcParams['text.latex.preview']:
# use preview.sty
basefile = self.get_basefile(tex, fontsize)
baselinefile = '%s.baseline' % basefile
if not os.path.exists(baselinefile):
dvifile = self.make_dvi_preview(tex, fontsize)
with open(baselinefile) as fh:
l = fh.read().split()
height, depth, width = [float(l1) * dpi_fraction for l1 in l]
return width, height + depth, depth
else:
# use dviread. It sometimes returns a wrong descent.
dvifile = self.make_dvi(tex, fontsize)
with dviread.Dvi(dvifile, 72 * dpi_fraction) as dvi:
page = next(iter(dvi))
# A total height (including the descent) needs to be returned.
return page.width, page.height + page.descent, page.descent
| 18,385 | 35.335968 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/artist.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from collections import OrderedDict, namedtuple
from functools import wraps
import inspect
import re
import warnings
import numpy as np
import matplotlib
from . import cbook, docstring, rcParams
from .path import Path
from .transforms import (Bbox, IdentityTransform, Transform, TransformedBbox,
TransformedPatchPath, TransformedPath)
# Note, matplotlib artists use the doc strings for set and get
# methods to enable the introspection methods of setp and getp. Every
# set_* method should have a docstring containing the line
#
# ACCEPTS: [ legal | values ]
#
# and aliases for setters and getters should have a docstring that
# starts with 'alias for ', as in 'alias for set_somemethod'
#
# You may wonder why we use so much boiler-plate manually defining the
# set_alias and get_alias functions, rather than using some clever
# python trick. The answer is that I need to be able to manipulate
# the docstring, and there is no clever way to do that in python 2.2,
# as far as I can see - see
#
# https://mail.python.org/pipermail/python-list/2004-October/242925.html
def allow_rasterization(draw):
"""
Decorator for Artist.draw method. Provides routines
that run before and after the draw call. The before and after functions
are useful for changing artist-dependent renderer attributes or making
other setup function calls, such as starting and flushing a mixed-mode
renderer.
"""
# the axes class has a second argument inframe for its draw method.
@wraps(draw)
def draw_wrapper(artist, renderer, *args, **kwargs):
try:
if artist.get_rasterized():
renderer.start_rasterizing()
if artist.get_agg_filter() is not None:
renderer.start_filter()
return draw(artist, renderer, *args, **kwargs)
finally:
if artist.get_agg_filter() is not None:
renderer.stop_filter(artist.get_agg_filter())
if artist.get_rasterized():
renderer.stop_rasterizing()
draw_wrapper._supports_rasterization = True
return draw_wrapper
def _stale_axes_callback(self, val):
if self.axes:
self.axes.stale = val
_XYPair = namedtuple("_XYPair", "x y")
class Artist(object):
"""
Abstract base class for someone who renders into a
:class:`FigureCanvas`.
"""
aname = 'Artist'
zorder = 0
# order of precedence when bulk setting/updating properties
# via update. The keys should be property names and the values
# integers
_prop_order = dict(color=-1)
def __init__(self):
self._stale = True
self.stale_callback = None
self._axes = None
self.figure = None
self._transform = None
self._transformSet = False
self._visible = True
self._animated = False
self._alpha = None
self.clipbox = None
self._clippath = None
self._clipon = True
self._label = ''
self._picker = None
self._contains = None
self._rasterized = None
self._agg_filter = None
self._mouseover = False
self.eventson = False # fire events only if eventson
self._oid = 0 # an observer id
self._propobservers = {} # a dict from oids to funcs
try:
self.axes = None
except AttributeError:
# Handle self.axes as a read-only property, as in Figure.
pass
self._remove_method = None
self._url = None
self._gid = None
self._snap = None
self._sketch = rcParams['path.sketch']
self._path_effects = rcParams['path.effects']
self._sticky_edges = _XYPair([], [])
def __getstate__(self):
d = self.__dict__.copy()
# remove the unpicklable remove method, this will get re-added on load
# (by the axes) if the artist lives on an axes.
d['_remove_method'] = None
d['stale_callback'] = None
return d
def remove(self):
"""
Remove the artist from the figure if possible. The effect
will not be visible until the figure is redrawn, e.g., with
:meth:`matplotlib.axes.Axes.draw_idle`. Call
:meth:`matplotlib.axes.Axes.relim` to update the axes limits
if desired.
Note: :meth:`~matplotlib.axes.Axes.relim` will not see
collections even if the collection was added to axes with
*autolim* = True.
Note: there is no support for removing the artist's legend entry.
"""
# There is no method to set the callback. Instead the parent should
# set the _remove_method attribute directly. This would be a
# protected attribute if Python supported that sort of thing. The
# callback has one parameter, which is the child to be removed.
if self._remove_method is not None:
self._remove_method(self)
# clear stale callback
self.stale_callback = None
_ax_flag = False
if hasattr(self, 'axes') and self.axes:
# remove from the mouse hit list
self.axes.mouseover_set.discard(self)
# mark the axes as stale
self.axes.stale = True
# decouple the artist from the axes
self.axes = None
_ax_flag = True
if self.figure:
self.figure = None
if not _ax_flag:
self.figure = True
else:
raise NotImplementedError('cannot remove artist')
# TODO: the fix for the collections relim problem is to move the
# limits calculation into the artist itself, including the property of
# whether or not the artist should affect the limits. Then there will
# be no distinction between axes.add_line, axes.add_patch, etc.
# TODO: add legend support
def have_units(self):
'Return *True* if units are set on the *x* or *y* axes'
ax = self.axes
if ax is None or ax.xaxis is None:
return False
return ax.xaxis.have_units() or ax.yaxis.have_units()
def convert_xunits(self, x):
"""For artists in an axes, if the xaxis has units support,
convert *x* using xaxis unit type
"""
ax = getattr(self, 'axes', None)
if ax is None or ax.xaxis is None:
return x
return ax.xaxis.convert_units(x)
def convert_yunits(self, y):
"""For artists in an axes, if the yaxis has units support,
convert *y* using yaxis unit type
"""
ax = getattr(self, 'axes', None)
if ax is None or ax.yaxis is None:
return y
return ax.yaxis.convert_units(y)
@property
def axes(self):
"""
The :class:`~matplotlib.axes.Axes` instance the artist
resides in, or *None*.
"""
return self._axes
@axes.setter
def axes(self, new_axes):
if (new_axes is not None and self._axes is not None
and new_axes != self._axes):
raise ValueError("Can not reset the axes. You are probably "
"trying to re-use an artist in more than one "
"Axes which is not supported")
self._axes = new_axes
if new_axes is not None and new_axes is not self:
self.stale_callback = _stale_axes_callback
return new_axes
@property
def stale(self):
"""
If the artist is 'stale' and needs to be re-drawn for the output to
match the internal state of the artist.
"""
return self._stale
@stale.setter
def stale(self, val):
self._stale = val
# if the artist is animated it does not take normal part in the
# draw stack and is not expected to be drawn as part of the normal
# draw loop (when not saving) so do not propagate this change
if self.get_animated():
return
if val and self.stale_callback is not None:
self.stale_callback(self, val)
def get_window_extent(self, renderer):
"""
Get the axes bounding box in display space.
Subclasses should override for inclusion in the bounding box
"tight" calculation. Default is to return an empty bounding
box at 0, 0.
Be careful when using this function, the results will not update
if the artist window extent of the artist changes. The extent
can change due to any changes in the transform stack, such as
changing the axes limits, the figure size, or the canvas used
(as is done when saving a figure). This can lead to unexpected
behavior where interactive figures will look fine on the screen,
but will save incorrectly.
"""
return Bbox([[0, 0], [0, 0]])
def add_callback(self, func):
"""
Adds a callback function that will be called whenever one of
the :class:`Artist`'s properties changes.
Returns an *id* that is useful for removing the callback with
:meth:`remove_callback` later.
"""
oid = self._oid
self._propobservers[oid] = func
self._oid += 1
return oid
def remove_callback(self, oid):
"""
Remove a callback based on its *id*.
.. seealso::
:meth:`add_callback`
For adding callbacks
"""
try:
del self._propobservers[oid]
except KeyError:
pass
def pchanged(self):
"""
Fire an event when property changed, calling all of the
registered callbacks.
"""
for oid, func in six.iteritems(self._propobservers):
func(self)
def is_transform_set(self):
"""
Returns *True* if :class:`Artist` has a transform explicitly
set.
"""
return self._transformSet
def set_transform(self, t):
"""
Set the artist transform.
Parameters
----------
t : `.Transform`
.. ACCEPTS: `.Transform`
"""
self._transform = t
self._transformSet = True
self.pchanged()
self.stale = True
def get_transform(self):
"""
Return the :class:`~matplotlib.transforms.Transform`
instance used by this artist.
"""
if self._transform is None:
self._transform = IdentityTransform()
elif (not isinstance(self._transform, Transform)
and hasattr(self._transform, '_as_mpl_transform')):
self._transform = self._transform._as_mpl_transform(self.axes)
return self._transform
@cbook.deprecated("2.2")
def hitlist(self, event):
"""
List the children of the artist which contain the mouse event *event*.
"""
L = []
try:
hascursor, info = self.contains(event)
if hascursor:
L.append(self)
except:
import traceback
traceback.print_exc()
print("while checking", self.__class__)
for a in self.get_children():
L.extend(a.hitlist(event))
return L
def get_children(self):
"""
Return a list of the child :class:`Artist`s this
:class:`Artist` contains.
"""
return []
def contains(self, mouseevent):
"""Test whether the artist contains the mouse event.
Returns the truth value and a dictionary of artist specific details of
selection, such as which points are contained in the pick radius. See
individual artists for details.
"""
if callable(self._contains):
return self._contains(self, mouseevent)
warnings.warn("'%s' needs 'contains' method" % self.__class__.__name__)
return False, {}
def set_contains(self, picker):
"""
Replace the contains test used by this artist. The new picker
should be a callable function which determines whether the
artist is hit by the mouse event::
hit, props = picker(artist, mouseevent)
If the mouse event is over the artist, return *hit* = *True*
and *props* is a dictionary of properties you want returned
with the contains test.
Parameters
----------
picker : callable
.. ACCEPTS: a callable function
"""
self._contains = picker
def get_contains(self):
"""
Return the _contains test used by the artist, or *None* for default.
"""
return self._contains
def pickable(self):
'Return *True* if :class:`Artist` is pickable.'
return (self.figure is not None and
self.figure.canvas is not None and
self._picker is not None)
def pick(self, mouseevent):
"""
Process pick event
each child artist will fire a pick event if *mouseevent* is over
the artist and the artist has picker set
"""
# Pick self
if self.pickable():
picker = self.get_picker()
if callable(picker):
inside, prop = picker(self, mouseevent)
else:
inside, prop = self.contains(mouseevent)
if inside:
self.figure.canvas.pick_event(mouseevent, self, **prop)
# Pick children
for a in self.get_children():
# make sure the event happened in the same axes
ax = getattr(a, 'axes', None)
if (mouseevent.inaxes is None or ax is None
or mouseevent.inaxes == ax):
# we need to check if mouseevent.inaxes is None
# because some objects associated with an axes (e.g., a
# tick label) can be outside the bounding box of the
# axes and inaxes will be None
# also check that ax is None so that it traverse objects
# which do no have an axes property but children might
a.pick(mouseevent)
def set_picker(self, picker):
"""
Set the epsilon for picking used by this artist
*picker* can be one of the following:
* *None*: picking is disabled for this artist (default)
* A boolean: if *True* then picking will be enabled and the
artist will fire a pick event if the mouse event is over
the artist
* A float: if picker is a number it is interpreted as an
epsilon tolerance in points and the artist will fire
off an event if it's data is within epsilon of the mouse
event. For some artists like lines and patch collections,
the artist may provide additional data to the pick event
that is generated, e.g., the indices of the data within
epsilon of the pick event
* A function: if picker is callable, it is a user supplied
function which determines whether the artist is hit by the
mouse event::
hit, props = picker(artist, mouseevent)
to determine the hit test. if the mouse event is over the
artist, return *hit=True* and props is a dictionary of
properties you want added to the PickEvent attributes.
Parameters
----------
picker : None or bool or float or callable
.. ACCEPTS: [None | bool | float | callable]
"""
self._picker = picker
def get_picker(self):
"""Return the picker object used by this artist."""
return self._picker
@cbook.deprecated("2.2", "artist.figure is not None")
def is_figure_set(self):
"""Returns whether the artist is assigned to a `.Figure`."""
return self.figure is not None
def get_url(self):
"""Returns the url."""
return self._url
def set_url(self, url):
"""
Sets the url for the artist.
Parameters
----------
url : str
.. ACCEPTS: a url string
"""
self._url = url
def get_gid(self):
"""Returns the group id."""
return self._gid
def set_gid(self, gid):
"""
Sets the (group) id for the artist.
Parameters
----------
gid : str
.. ACCEPTS: an id string
"""
self._gid = gid
def get_snap(self):
"""
Returns the snap setting which may be:
* True: snap vertices to the nearest pixel center
* False: leave vertices as-is
* None: (auto) If the path contains only rectilinear line
segments, round to the nearest pixel center
Only supported by the Agg and MacOSX backends.
"""
if rcParams['path.snap']:
return self._snap
else:
return False
def set_snap(self, snap):
"""
Sets the snap setting which may be:
* True: snap vertices to the nearest pixel center
* False: leave vertices as-is
* None: (auto) If the path contains only rectilinear line
segments, round to the nearest pixel center
Only supported by the Agg and MacOSX backends.
Parameters
----------
snap : bool or None
.. ACCEPTS: bool or None
"""
self._snap = snap
self.stale = True
def get_sketch_params(self):
"""
Returns the sketch parameters for the artist.
Returns
-------
sketch_params : tuple or `None`
A 3-tuple with the following elements:
* `scale`: The amplitude of the wiggle perpendicular to the
source line.
* `length`: The length of the wiggle along the line.
* `randomness`: The scale factor by which the length is
shrunken or expanded.
May return `None` if no sketch parameters were set.
"""
return self._sketch
def set_sketch_params(self, scale=None, length=None, randomness=None):
"""
Sets the sketch parameters.
Parameters
----------
scale : float, optional
The amplitude of the wiggle perpendicular to the source
line, in pixels. If scale is `None`, or not provided, no
sketch filter will be provided.
length : float, optional
The length of the wiggle along the line, in pixels
(default 128.0)
randomness : float, optional
The scale factor by which the length is shrunken or
expanded (default 16.0)
.. ACCEPTS: (scale: float, length: float, randomness: float)
"""
if scale is None:
self._sketch = None
else:
self._sketch = (scale, length or 128.0, randomness or 16.0)
self.stale = True
def set_path_effects(self, path_effects):
"""Set the path effects.
Parameters
----------
path_effects : `.AbstractPathEffect`
.. ACCEPTS: `.AbstractPathEffect`
"""
self._path_effects = path_effects
self.stale = True
def get_path_effects(self):
return self._path_effects
def get_figure(self):
"""Return the `.Figure` instance the artist belongs to."""
return self.figure
def set_figure(self, fig):
"""
Set the `.Figure` instance the artist belongs to.
Parameters
----------
fig : `.Figure`
.. ACCEPTS: a `.Figure` instance
"""
# if this is a no-op just return
if self.figure is fig:
return
# if we currently have a figure (the case of both `self.figure`
# and `fig` being none is taken care of above) we then user is
# trying to change the figure an artist is associated with which
# is not allowed for the same reason as adding the same instance
# to more than one Axes
if self.figure is not None:
raise RuntimeError("Can not put single artist in "
"more than one figure")
self.figure = fig
if self.figure and self.figure is not self:
self.pchanged()
self.stale = True
def set_clip_box(self, clipbox):
"""
Set the artist's clip `.Bbox`.
Parameters
----------
clipbox : `.Bbox`
.. ACCEPTS: a `.Bbox` instance
"""
self.clipbox = clipbox
self.pchanged()
self.stale = True
def set_clip_path(self, path, transform=None):
"""
Set the artist's clip path, which may be:
- a :class:`~matplotlib.patches.Patch` (or subclass) instance; or
- a :class:`~matplotlib.path.Path` instance, in which case a
:class:`~matplotlib.transforms.Transform` instance, which will be
applied to the path before using it for clipping, must be provided;
or
- ``None``, to remove a previously set clipping path.
For efficiency, if the path happens to be an axis-aligned rectangle,
this method will set the clipping box to the corresponding rectangle
and set the clipping path to ``None``.
ACCEPTS: [(`~matplotlib.path.Path`, `.Transform`) | `.Patch` | None]
"""
from matplotlib.patches import Patch, Rectangle
success = False
if transform is None:
if isinstance(path, Rectangle):
self.clipbox = TransformedBbox(Bbox.unit(),
path.get_transform())
self._clippath = None
success = True
elif isinstance(path, Patch):
self._clippath = TransformedPatchPath(path)
success = True
elif isinstance(path, tuple):
path, transform = path
if path is None:
self._clippath = None
success = True
elif isinstance(path, Path):
self._clippath = TransformedPath(path, transform)
success = True
elif isinstance(path, TransformedPatchPath):
self._clippath = path
success = True
elif isinstance(path, TransformedPath):
self._clippath = path
success = True
if not success:
raise TypeError(
"Invalid arguments to set_clip_path, of type {} and {}"
.format(type(path).__name__, type(transform).__name__))
# This may result in the callbacks being hit twice, but guarantees they
# will be hit at least once.
self.pchanged()
self.stale = True
def get_alpha(self):
"""
Return the alpha value used for blending - not supported on all
backends
"""
return self._alpha
def get_visible(self):
"Return the artist's visiblity"
return self._visible
def get_animated(self):
"Return the artist's animated state"
return self._animated
def get_clip_on(self):
'Return whether artist uses clipping'
return self._clipon
def get_clip_box(self):
'Return artist clipbox'
return self.clipbox
def get_clip_path(self):
'Return artist clip path'
return self._clippath
def get_transformed_clip_path_and_affine(self):
'''
Return the clip path with the non-affine part of its
transformation applied, and the remaining affine part of its
transformation.
'''
if self._clippath is not None:
return self._clippath.get_transformed_path_and_affine()
return None, None
def set_clip_on(self, b):
"""
Set whether artist uses clipping.
When False artists will be visible out side of the axes which
can lead to unexpected results.
Parameters
----------
b : bool
.. ACCEPTS: bool
"""
self._clipon = b
# This may result in the callbacks being hit twice, but ensures they
# are hit at least once
self.pchanged()
self.stale = True
def _set_gc_clip(self, gc):
'Set the clip properly for the gc'
if self._clipon:
if self.clipbox is not None:
gc.set_clip_rectangle(self.clipbox)
gc.set_clip_path(self._clippath)
else:
gc.set_clip_rectangle(None)
gc.set_clip_path(None)
def get_rasterized(self):
"""Return whether the artist is to be rasterized."""
return self._rasterized
def set_rasterized(self, rasterized):
"""
Force rasterized (bitmap) drawing in vector backend output.
Defaults to None, which implies the backend's default behavior.
Parameters
----------
rasterized : bool or None
.. ACCEPTS: bool or None
"""
if rasterized and not hasattr(self.draw, "_supports_rasterization"):
warnings.warn("Rasterization of '%s' will be ignored" % self)
self._rasterized = rasterized
def get_agg_filter(self):
"""Return filter function to be used for agg filter."""
return self._agg_filter
def set_agg_filter(self, filter_func):
"""Set the agg filter.
Parameters
----------
filter_func : callable
A filter function, which takes a (m, n, 3) float array and a dpi
value, and returns a (m, n, 3) array.
.. ACCEPTS: a filter function, which takes a (m, n, 3) float array
and a dpi value, and returns a (m, n, 3) array
"""
self._agg_filter = filter_func
self.stale = True
def draw(self, renderer, *args, **kwargs):
'Derived classes drawing method'
if not self.get_visible():
return
self.stale = False
def set_alpha(self, alpha):
"""
Set the alpha value used for blending - not supported on
all backends.
Parameters
----------
alpha : float
.. ACCEPTS: float (0.0 transparent through 1.0 opaque)
"""
self._alpha = alpha
self.pchanged()
self.stale = True
def set_visible(self, b):
"""
Set the artist's visibility.
Parameters
----------
b : bool
.. ACCEPTS: bool
"""
self._visible = b
self.pchanged()
self.stale = True
def set_animated(self, b):
"""
Set the artist's animation state.
Parameters
----------
b : bool
.. ACCEPTS: bool
"""
if self._animated != b:
self._animated = b
self.pchanged()
def update(self, props):
"""
Update this artist's properties from the dictionary *prop*.
"""
def _update_property(self, k, v):
"""Sorting out how to update property (setter or setattr).
Parameters
----------
k : str
The name of property to update
v : obj
The value to assign to the property
Returns
-------
ret : obj or None
If using a `set_*` method return it's return, else None.
"""
k = k.lower()
# white list attributes we want to be able to update through
# art.update, art.set, setp
if k in {'axes'}:
return setattr(self, k, v)
else:
func = getattr(self, 'set_' + k, None)
if not callable(func):
raise AttributeError('Unknown property %s' % k)
return func(v)
store = self.eventson
self.eventson = False
try:
ret = [_update_property(self, k, v)
for k, v in props.items()]
finally:
self.eventson = store
if len(ret):
self.pchanged()
self.stale = True
return ret
def get_label(self):
"""Get the label used for this artist in the legend."""
return self._label
def set_label(self, s):
"""
Set the label to *s* for auto legend.
Parameters
----------
s : object
*s* will be converted to a string by calling `str` (`unicode` on
Py2).
.. ACCEPTS: object
"""
if s is not None:
self._label = six.text_type(s)
else:
self._label = None
self.pchanged()
self.stale = True
def get_zorder(self):
"""Return the artist's zorder."""
return self.zorder
def set_zorder(self, level):
"""
Set the zorder for the artist. Artists with lower zorder
values are drawn first.
Parameters
----------
level : float
.. ACCEPTS: float
"""
if level is None:
level = self.__class__.zorder
self.zorder = level
self.pchanged()
self.stale = True
@property
def sticky_edges(self):
"""
`x` and `y` sticky edge lists.
When performing autoscaling, if a data limit coincides with a value in
the corresponding sticky_edges list, then no margin will be added--the
view limit "sticks" to the edge. A typical usecase is histograms,
where one usually expects no margin on the bottom edge (0) of the
histogram.
This attribute cannot be assigned to; however, the `x` and `y` lists
can be modified in place as needed.
Examples
--------
>>> artist.sticky_edges.x[:] = (xmin, xmax)
>>> artist.sticky_edges.y[:] = (ymin, ymax)
"""
return self._sticky_edges
def update_from(self, other):
'Copy properties from *other* to *self*.'
self._transform = other._transform
self._transformSet = other._transformSet
self._visible = other._visible
self._alpha = other._alpha
self.clipbox = other.clipbox
self._clipon = other._clipon
self._clippath = other._clippath
self._label = other._label
self._sketch = other._sketch
self._path_effects = other._path_effects
self.sticky_edges.x[:] = other.sticky_edges.x[:]
self.sticky_edges.y[:] = other.sticky_edges.y[:]
self.pchanged()
self.stale = True
def properties(self):
"""
return a dictionary mapping property name -> value for all Artist props
"""
return ArtistInspector(self).properties()
def set(self, **kwargs):
"""A property batch setter. Pass *kwargs* to set properties.
"""
props = OrderedDict(
sorted(kwargs.items(), reverse=True,
key=lambda x: (self._prop_order.get(x[0], 0), x[0])))
return self.update(props)
def findobj(self, match=None, include_self=True):
"""
Find artist objects.
Recursively find all :class:`~matplotlib.artist.Artist` instances
contained in self.
*match* can be
- None: return all objects contained in artist.
- function with signature ``boolean = match(artist)``
used to filter matches
- class instance: e.g., Line2D. Only return artists of class type.
If *include_self* is True (default), include self in the list to be
checked for a match.
"""
if match is None: # always return True
def matchfunc(x):
return True
elif isinstance(match, type) and issubclass(match, Artist):
def matchfunc(x):
return isinstance(x, match)
elif callable(match):
matchfunc = match
else:
raise ValueError('match must be None, a matplotlib.artist.Artist '
'subclass, or a callable')
artists = sum([c.findobj(matchfunc) for c in self.get_children()], [])
if include_self and matchfunc(self):
artists.append(self)
return artists
def get_cursor_data(self, event):
"""
Get the cursor data for a given event.
"""
return None
def format_cursor_data(self, data):
"""
Return *cursor data* string formatted.
"""
try:
data[0]
except (TypeError, IndexError):
data = [data]
return ', '.join('{:0.3g}'.format(item) for item in data if
isinstance(item, (np.floating, np.integer, int, float)))
@property
def mouseover(self):
return self._mouseover
@mouseover.setter
def mouseover(self, val):
val = bool(val)
self._mouseover = val
ax = self.axes
if ax:
if val:
ax.mouseover_set.add(self)
else:
ax.mouseover_set.discard(self)
class ArtistInspector(object):
"""
A helper class to inspect an :class:`~matplotlib.artist.Artist`
and return information about it's settable properties and their
current values.
"""
def __init__(self, o):
"""
Initialize the artist inspector with an
:class:`~matplotlib.artist.Artist` or iterable of :class:`Artists`.
If an iterable is used, we assume it is a homogeneous sequence (all
:class:`Artists` are of the same type) and it is your responsibility
to make sure this is so.
"""
if not isinstance(o, Artist):
if cbook.iterable(o):
o = list(o)
if len(o):
o = o[0]
self.oorig = o
if not inspect.isclass(o):
o = type(o)
self.o = o
self.aliasd = self.get_aliases()
def get_aliases(self):
"""
Get a dict mapping *fullname* -> *alias* for each *alias* in
the :class:`~matplotlib.artist.ArtistInspector`.
e.g., for lines::
{'markerfacecolor': 'mfc',
'linewidth' : 'lw',
}
"""
names = [name for name in dir(self.o)
if name.startswith(('set_', 'get_'))
and callable(getattr(self.o, name))]
aliases = {}
for name in names:
func = getattr(self.o, name)
if not self.is_alias(func):
continue
docstring = func.__doc__
fullname = docstring[10:]
aliases.setdefault(fullname[4:], {})[name[4:]] = None
return aliases
_get_valid_values_regex = re.compile(
r"\n\s*(?:\.\.\s+)?ACCEPTS:\s*((?:.|\n)*?)(?:$|(?:\n\n))"
)
def get_valid_values(self, attr):
"""
Get the legal arguments for the setter associated with *attr*.
This is done by querying the docstring of the function *set_attr*
for a line that begins with "ACCEPTS" or ".. ACCEPTS":
e.g., for a line linestyle, return
"[ ``'-'`` | ``'--'`` | ``'-.'`` | ``':'`` | ``'steps'`` | ``'None'``
]"
"""
name = 'set_%s' % attr
if not hasattr(self.o, name):
raise AttributeError('%s has no function %s' % (self.o, name))
func = getattr(self.o, name)
docstring = func.__doc__
if docstring is None:
return 'unknown'
if docstring.startswith('alias for '):
return None
match = self._get_valid_values_regex.search(docstring)
if match is not None:
return re.sub("\n *", " ", match.group(1))
return 'unknown'
def _get_setters_and_targets(self):
"""
Get the attribute strings and a full path to where the setter
is defined for all setters in an object.
"""
setters = []
for name in dir(self.o):
if not name.startswith('set_'):
continue
func = getattr(self.o, name)
if not callable(func):
continue
if six.PY2:
nargs = len(inspect.getargspec(func)[0])
else:
nargs = len(inspect.getfullargspec(func)[0])
if nargs < 2 or self.is_alias(func):
continue
source_class = self.o.__module__ + "." + self.o.__name__
for cls in self.o.mro():
if name in cls.__dict__:
source_class = cls.__module__ + "." + cls.__name__
break
setters.append((name[4:], source_class + "." + name))
return setters
def get_setters(self):
"""
Get the attribute strings with setters for object. e.g., for a line,
return ``['markerfacecolor', 'linewidth', ....]``.
"""
return [prop for prop, target in self._get_setters_and_targets()]
def is_alias(self, o):
"""
Return *True* if method object *o* is an alias for another
function.
"""
ds = o.__doc__
if ds is None:
return False
return ds.startswith('alias for ')
def aliased_name(self, s):
"""
return 'PROPNAME or alias' if *s* has an alias, else return
PROPNAME.
e.g., for the line markerfacecolor property, which has an
alias, return 'markerfacecolor or mfc' and for the transform
property, which does not, return 'transform'
"""
if s in self.aliasd:
return s + ''.join([' or %s' % x
for x in sorted(self.aliasd[s])])
else:
return s
def aliased_name_rest(self, s, target):
"""
return 'PROPNAME or alias' if *s* has an alias, else return
PROPNAME formatted for ReST
e.g., for the line markerfacecolor property, which has an
alias, return 'markerfacecolor or mfc' and for the transform
property, which does not, return 'transform'
"""
if s in self.aliasd:
aliases = ''.join([' or %s' % x
for x in sorted(self.aliasd[s])])
else:
aliases = ''
return ':meth:`%s <%s>`%s' % (s, target, aliases)
def pprint_setters(self, prop=None, leadingspace=2):
"""
If *prop* is *None*, return a list of strings of all settable
properties and their valid values.
If *prop* is not *None*, it is a valid property name and that
property will be returned as a string of property : valid
values.
"""
if leadingspace:
pad = ' ' * leadingspace
else:
pad = ''
if prop is not None:
accepts = self.get_valid_values(prop)
return '%s%s: %s' % (pad, prop, accepts)
attrs = self._get_setters_and_targets()
attrs.sort()
lines = []
for prop, path in attrs:
accepts = self.get_valid_values(prop)
name = self.aliased_name(prop)
lines.append('%s%s: %s' % (pad, name, accepts))
return lines
def pprint_setters_rest(self, prop=None, leadingspace=4):
"""
If *prop* is *None*, return a list of strings of all settable
properties and their valid values. Format the output for ReST
If *prop* is not *None*, it is a valid property name and that
property will be returned as a string of property : valid
values.
"""
if leadingspace:
pad = ' ' * leadingspace
else:
pad = ''
if prop is not None:
accepts = self.get_valid_values(prop)
return '%s%s: %s' % (pad, prop, accepts)
attrs = self._get_setters_and_targets()
attrs.sort()
lines = []
########
names = [self.aliased_name_rest(prop, target)
for prop, target in attrs]
accepts = [self.get_valid_values(prop) for prop, target in attrs]
col0_len = max(len(n) for n in names)
col1_len = max(len(a) for a in accepts)
lines.append('')
lines.append(pad + '.. table::')
lines.append(pad + ' :class: property-table')
pad += ' '
table_formatstr = pad + '=' * col0_len + ' ' + '=' * col1_len
lines.append('')
lines.append(table_formatstr)
lines.append(pad + 'Property'.ljust(col0_len + 3) +
'Description'.ljust(col1_len))
lines.append(table_formatstr)
lines.extend([pad + n.ljust(col0_len + 3) + a.ljust(col1_len)
for n, a in zip(names, accepts)])
lines.append(table_formatstr)
lines.append('')
return lines
def properties(self):
"""
return a dictionary mapping property name -> value
"""
o = self.oorig
getters = [name for name in dir(o)
if name.startswith('get_') and callable(getattr(o, name))]
getters.sort()
d = dict()
for name in getters:
func = getattr(o, name)
if self.is_alias(func):
continue
try:
with warnings.catch_warnings():
warnings.simplefilter('ignore')
val = func()
except:
continue
else:
d[name[4:]] = val
return d
def pprint_getters(self):
"""
Return the getters and actual values as list of strings.
"""
lines = []
for name, val in sorted(six.iteritems(self.properties())):
if getattr(val, 'shape', ()) != () and len(val) > 6:
s = str(val[:6]) + '...'
else:
s = str(val)
s = s.replace('\n', ' ')
if len(s) > 50:
s = s[:50] + '...'
name = self.aliased_name(name)
lines.append(' %s = %s' % (name, s))
return lines
def getp(obj, property=None):
"""
Return the value of object's property. *property* is an optional string
for the property you want to return
Example usage::
getp(obj) # get all the object properties
getp(obj, 'linestyle') # get the linestyle property
*obj* is a :class:`Artist` instance, e.g.,
:class:`~matplotllib.lines.Line2D` or an instance of a
:class:`~matplotlib.axes.Axes` or :class:`matplotlib.text.Text`.
If the *property* is 'somename', this function returns
obj.get_somename()
:func:`getp` can be used to query all the gettable properties with
``getp(obj)``. Many properties have aliases for shorter typing, e.g.
'lw' is an alias for 'linewidth'. In the output, aliases and full
property names will be listed as:
property or alias = value
e.g.:
linewidth or lw = 2
"""
if property is None:
insp = ArtistInspector(obj)
ret = insp.pprint_getters()
print('\n'.join(ret))
return
func = getattr(obj, 'get_' + property)
return func()
# alias
get = getp
def setp(obj, *args, **kwargs):
"""
Set a property on an artist object.
matplotlib supports the use of :func:`setp` ("set property") and
:func:`getp` to set and get object properties, as well as to do
introspection on the object. For example, to set the linestyle of a
line to be dashed, you can do::
>>> line, = plot([1,2,3])
>>> setp(line, linestyle='--')
If you want to know the valid types of arguments, you can provide
the name of the property you want to set without a value::
>>> setp(line, 'linestyle')
linestyle: [ '-' | '--' | '-.' | ':' | 'steps' | 'None' ]
If you want to see all the properties that can be set, and their
possible values, you can do::
>>> setp(line)
... long output listing omitted
You may specify another output file to `setp` if `sys.stdout` is not
acceptable for some reason using the `file` keyword-only argument::
>>> with fopen('output.log') as f:
>>> setp(line, file=f)
:func:`setp` operates on a single instance or a iterable of
instances. If you are in query mode introspecting the possible
values, only the first instance in the sequence is used. When
actually setting values, all the instances will be set. e.g.,
suppose you have a list of two lines, the following will make both
lines thicker and red::
>>> x = arange(0,1.0,0.01)
>>> y1 = sin(2*pi*x)
>>> y2 = sin(4*pi*x)
>>> lines = plot(x, y1, x, y2)
>>> setp(lines, linewidth=2, color='r')
:func:`setp` works with the MATLAB style string/value pairs or
with python kwargs. For example, the following are equivalent::
>>> setp(lines, 'linewidth', 2, 'color', 'r') # MATLAB style
>>> setp(lines, linewidth=2, color='r') # python style
"""
if isinstance(obj, Artist):
objs = [obj]
else:
objs = list(cbook.flatten(obj))
if not objs:
return
insp = ArtistInspector(objs[0])
# file has to be popped before checking if kwargs is empty
printArgs = {}
if 'file' in kwargs:
printArgs['file'] = kwargs.pop('file')
if not kwargs and len(args) < 2:
if args:
print(insp.pprint_setters(prop=args[0]), **printArgs)
else:
print('\n'.join(insp.pprint_setters()), **printArgs)
return
if len(args) % 2:
raise ValueError('The set args must be string, value pairs')
# put args into ordereddict to maintain order
funcvals = OrderedDict()
for i in range(0, len(args) - 1, 2):
funcvals[args[i]] = args[i + 1]
ret = [o.update(funcvals) for o in objs]
ret.extend([o.set(**kwargs) for o in objs])
return [x for x in cbook.flatten(ret)]
def kwdoc(a):
hardcopy = matplotlib.rcParams['docstring.hardcopy']
if hardcopy:
return '\n'.join(ArtistInspector(a).pprint_setters_rest(
leadingspace=4))
else:
return '\n'.join(ArtistInspector(a).pprint_setters(leadingspace=2))
docstring.interpd.update(Artist=kwdoc(Artist))
| 46,680 | 30.456199 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/backend_tools.py
|
"""
Abstract base classes define the primitives for Tools.
These tools are used by `matplotlib.backend_managers.ToolManager`
:class:`ToolBase`
Simple stateless tool
:class:`ToolToggleBase`
Tool that has two states, only one Toggle tool can be
active at any given time for the same
`matplotlib.backend_managers.ToolManager`
"""
from matplotlib import rcParams
from matplotlib._pylab_helpers import Gcf
import matplotlib.cbook as cbook
from weakref import WeakKeyDictionary
import six
import time
import warnings
import numpy as np
class Cursors(object):
"""Simple namespace for cursor reference"""
HAND, POINTER, SELECT_REGION, MOVE, WAIT = list(range(5))
cursors = Cursors()
# Views positions tool
_views_positions = 'viewpos'
class ToolBase(object):
"""
Base tool class
A base tool, only implements `trigger` method or not method at all.
The tool is instantiated by `matplotlib.backend_managers.ToolManager`
Attributes
----------
toolmanager: `matplotlib.backend_managers.ToolManager`
ToolManager that controls this Tool
figure: `FigureCanvas`
Figure instance that is affected by this Tool
name: String
Used as **Id** of the tool, has to be unique among tools of the same
ToolManager
"""
default_keymap = None
"""
Keymap to associate with this tool
**String**: List of comma separated keys that will be used to call this
tool when the keypress event of *self.figure.canvas* is emitted
"""
description = None
"""
Description of the Tool
**String**: If the Tool is included in the Toolbar this text is used
as a Tooltip
"""
image = None
"""
Filename of the image
**String**: Filename of the image to use in the toolbar. If None, the
`name` is used as a label in the toolbar button
"""
def __init__(self, toolmanager, name):
warnings.warn('Treat the new Tool classes introduced in v1.5 as ' +
'experimental for now, the API will likely change in ' +
'version 2.1, and some tools might change name')
self._name = name
self._toolmanager = toolmanager
self._figure = None
@property
def figure(self):
return self._figure
@figure.setter
def figure(self, figure):
self.set_figure(figure)
@property
def canvas(self):
if not self._figure:
return None
return self._figure.canvas
@property
def toolmanager(self):
return self._toolmanager
def set_figure(self, figure):
"""
Assign a figure to the tool
Parameters
----------
figure: `Figure`
"""
self._figure = figure
def trigger(self, sender, event, data=None):
"""
Called when this tool gets used
This method is called by
`matplotlib.backend_managers.ToolManager.trigger_tool`
Parameters
----------
event: `Event`
The Canvas event that caused this tool to be called
sender: object
Object that requested the tool to be triggered
data: object
Extra data
"""
pass
@property
def name(self):
"""Tool Id"""
return self._name
def destroy(self):
"""
Destroy the tool
This method is called when the tool is removed by
`matplotlib.backend_managers.ToolManager.remove_tool`
"""
pass
class ToolToggleBase(ToolBase):
"""
Toggleable tool
Every time it is triggered, it switches between enable and disable
Parameters
----------
``*args``
Variable length argument to be used by the Tool
``**kwargs``
`toggled` if present and True, sets the initial state of the Tool
Arbitrary keyword arguments to be consumed by the Tool
"""
radio_group = None
"""Attribute to group 'radio' like tools (mutually exclusive)
**String** that identifies the group or **None** if not belonging to a
group
"""
cursor = None
"""Cursor to use when the tool is active"""
default_toggled = False
"""Default of toggled state"""
def __init__(self, *args, **kwargs):
self._toggled = kwargs.pop('toggled', self.default_toggled)
ToolBase.__init__(self, *args, **kwargs)
def trigger(self, sender, event, data=None):
"""Calls `enable` or `disable` based on `toggled` value"""
if self._toggled:
self.disable(event)
else:
self.enable(event)
self._toggled = not self._toggled
def enable(self, event=None):
"""
Enable the toggle tool
`trigger` calls this method when `toggled` is False
"""
pass
def disable(self, event=None):
"""
Disable the toggle tool
`trigger` call this method when `toggled` is True.
This can happen in different circumstances
* Click on the toolbar tool button
* Call to `matplotlib.backend_managers.ToolManager.trigger_tool`
* Another `ToolToggleBase` derived tool is triggered
(from the same `ToolManager`)
"""
pass
@property
def toggled(self):
"""State of the toggled tool"""
return self._toggled
def set_figure(self, figure):
toggled = self.toggled
if toggled:
if self.figure:
self.trigger(self, None)
else:
# if no figure the internal state is not changed
# we change it here so next call to trigger will change it back
self._toggled = False
ToolBase.set_figure(self, figure)
if toggled:
if figure:
self.trigger(self, None)
else:
# if there is no figure, trigger won't change the internal
# state we change it back
self._toggled = True
class SetCursorBase(ToolBase):
"""
Change to the current cursor while inaxes
This tool, keeps track of all `ToolToggleBase` derived tools, and calls
set_cursor when a tool gets triggered
"""
def __init__(self, *args, **kwargs):
ToolBase.__init__(self, *args, **kwargs)
self._idDrag = None
self._cursor = None
self._default_cursor = cursors.POINTER
self._last_cursor = self._default_cursor
self.toolmanager.toolmanager_connect('tool_added_event',
self._add_tool_cbk)
# process current tools
for tool in self.toolmanager.tools.values():
self._add_tool(tool)
def set_figure(self, figure):
if self._idDrag:
self.canvas.mpl_disconnect(self._idDrag)
ToolBase.set_figure(self, figure)
if figure:
self._idDrag = self.canvas.mpl_connect(
'motion_notify_event', self._set_cursor_cbk)
def _tool_trigger_cbk(self, event):
if event.tool.toggled:
self._cursor = event.tool.cursor
else:
self._cursor = None
self._set_cursor_cbk(event.canvasevent)
def _add_tool(self, tool):
"""set the cursor when the tool is triggered"""
if getattr(tool, 'cursor', None) is not None:
self.toolmanager.toolmanager_connect('tool_trigger_%s' % tool.name,
self._tool_trigger_cbk)
def _add_tool_cbk(self, event):
"""Process every newly added tool"""
if event.tool is self:
return
self._add_tool(event.tool)
def _set_cursor_cbk(self, event):
if not event:
return
if not getattr(event, 'inaxes', False) or not self._cursor:
if self._last_cursor != self._default_cursor:
self.set_cursor(self._default_cursor)
self._last_cursor = self._default_cursor
elif self._cursor:
cursor = self._cursor
if cursor and self._last_cursor != cursor:
self.set_cursor(cursor)
self._last_cursor = cursor
def set_cursor(self, cursor):
"""
Set the cursor
This method has to be implemented per backend
"""
raise NotImplementedError
class ToolCursorPosition(ToolBase):
"""
Send message with the current pointer position
This tool runs in the background reporting the position of the cursor
"""
def __init__(self, *args, **kwargs):
self._idDrag = None
ToolBase.__init__(self, *args, **kwargs)
def set_figure(self, figure):
if self._idDrag:
self.canvas.mpl_disconnect(self._idDrag)
ToolBase.set_figure(self, figure)
if figure:
self._idDrag = self.canvas.mpl_connect(
'motion_notify_event', self.send_message)
def send_message(self, event):
"""Call `matplotlib.backend_managers.ToolManager.message_event`"""
if self.toolmanager.messagelock.locked():
return
message = ' '
if event.inaxes and event.inaxes.get_navigate():
try:
s = event.inaxes.format_coord(event.xdata, event.ydata)
except (ValueError, OverflowError):
pass
else:
artists = [a for a in event.inaxes.mouseover_set
if a.contains(event) and a.get_visible()]
if artists:
a = cbook._topmost_artist(artists)
if a is not event.inaxes.patch:
data = a.get_cursor_data(event)
if data is not None:
s += ' [%s]' % a.format_cursor_data(data)
message = s
self.toolmanager.message_event(message, self)
class RubberbandBase(ToolBase):
"""Draw and remove rubberband"""
def trigger(self, sender, event, data):
"""Call `draw_rubberband` or `remove_rubberband` based on data"""
if not self.figure.canvas.widgetlock.available(sender):
return
if data is not None:
self.draw_rubberband(*data)
else:
self.remove_rubberband()
def draw_rubberband(self, *data):
"""
Draw rubberband
This method must get implemented per backend
"""
raise NotImplementedError
def remove_rubberband(self):
"""
Remove rubberband
This method should get implemented per backend
"""
pass
class ToolQuit(ToolBase):
"""Tool to call the figure manager destroy method"""
description = 'Quit the figure'
default_keymap = rcParams['keymap.quit']
def trigger(self, sender, event, data=None):
Gcf.destroy_fig(self.figure)
class ToolQuitAll(ToolBase):
"""Tool to call the figure manager destroy method"""
description = 'Quit all figures'
default_keymap = rcParams['keymap.quit_all']
def trigger(self, sender, event, data=None):
Gcf.destroy_all()
class ToolEnableAllNavigation(ToolBase):
"""Tool to enable all axes for toolmanager interaction"""
description = 'Enables all axes toolmanager'
default_keymap = rcParams['keymap.all_axes']
def trigger(self, sender, event, data=None):
if event.inaxes is None:
return
for a in self.figure.get_axes():
if (event.x is not None and event.y is not None
and a.in_axes(event)):
a.set_navigate(True)
class ToolEnableNavigation(ToolBase):
"""Tool to enable a specific axes for toolmanager interaction"""
description = 'Enables one axes toolmanager'
default_keymap = (1, 2, 3, 4, 5, 6, 7, 8, 9)
def trigger(self, sender, event, data=None):
if event.inaxes is None:
return
n = int(event.key) - 1
for i, a in enumerate(self.figure.get_axes()):
if (event.x is not None and event.y is not None
and a.in_axes(event)):
a.set_navigate(i == n)
class _ToolGridBase(ToolBase):
"""Common functionality between ToolGrid and ToolMinorGrid."""
_cycle = [(False, False), (True, False), (True, True), (False, True)]
def trigger(self, sender, event, data=None):
ax = event.inaxes
if ax is None:
return
try:
x_state, x_which, y_state, y_which = self._get_next_grid_states(ax)
except ValueError:
pass
else:
ax.grid(x_state, which=x_which, axis="x")
ax.grid(y_state, which=y_which, axis="y")
ax.figure.canvas.draw_idle()
@staticmethod
def _get_uniform_grid_state(ticks):
"""
Check whether all grid lines are in the same visibility state.
Returns True/False if all grid lines are on or off, None if they are
not all in the same state.
"""
if all(tick.gridOn for tick in ticks):
return True
elif not any(tick.gridOn for tick in ticks):
return False
else:
return None
class ToolGrid(_ToolGridBase):
"""Tool to toggle the major grids of the figure"""
description = 'Toogle major grids'
default_keymap = rcParams['keymap.grid']
def _get_next_grid_states(self, ax):
if None in map(self._get_uniform_grid_state,
[ax.xaxis.minorTicks, ax.yaxis.minorTicks]):
# Bail out if minor grids are not in a uniform state.
raise ValueError
x_state, y_state = map(self._get_uniform_grid_state,
[ax.xaxis.majorTicks, ax.yaxis.majorTicks])
cycle = self._cycle
# Bail out (via ValueError) if major grids are not in a uniform state.
x_state, y_state = (
cycle[(cycle.index((x_state, y_state)) + 1) % len(cycle)])
return (x_state, "major" if x_state else "both",
y_state, "major" if y_state else "both")
class ToolMinorGrid(_ToolGridBase):
"""Tool to toggle the major and minor grids of the figure"""
description = 'Toogle major and minor grids'
default_keymap = rcParams['keymap.grid_minor']
def _get_next_grid_states(self, ax):
if None in map(self._get_uniform_grid_state,
[ax.xaxis.majorTicks, ax.yaxis.majorTicks]):
# Bail out if major grids are not in a uniform state.
raise ValueError
x_state, y_state = map(self._get_uniform_grid_state,
[ax.xaxis.minorTicks, ax.yaxis.minorTicks])
cycle = self._cycle
# Bail out (via ValueError) if minor grids are not in a uniform state.
x_state, y_state = (
cycle[(cycle.index((x_state, y_state)) + 1) % len(cycle)])
return x_state, "both", y_state, "both"
class ToolFullScreen(ToolToggleBase):
"""Tool to toggle full screen"""
description = 'Toogle Fullscreen mode'
default_keymap = rcParams['keymap.fullscreen']
def enable(self, event):
self.figure.canvas.manager.full_screen_toggle()
def disable(self, event):
self.figure.canvas.manager.full_screen_toggle()
class AxisScaleBase(ToolToggleBase):
"""Base Tool to toggle between linear and logarithmic"""
def trigger(self, sender, event, data=None):
if event.inaxes is None:
return
ToolToggleBase.trigger(self, sender, event, data)
def enable(self, event):
self.set_scale(event.inaxes, 'log')
self.figure.canvas.draw_idle()
def disable(self, event):
self.set_scale(event.inaxes, 'linear')
self.figure.canvas.draw_idle()
class ToolYScale(AxisScaleBase):
"""Tool to toggle between linear and logarithmic scales on the Y axis"""
description = 'Toogle Scale Y axis'
default_keymap = rcParams['keymap.yscale']
def set_scale(self, ax, scale):
ax.set_yscale(scale)
class ToolXScale(AxisScaleBase):
"""Tool to toggle between linear and logarithmic scales on the X axis"""
description = 'Toogle Scale X axis'
default_keymap = rcParams['keymap.xscale']
def set_scale(self, ax, scale):
ax.set_xscale(scale)
class ToolViewsPositions(ToolBase):
"""
Auxiliary Tool to handle changes in views and positions
Runs in the background and should get used by all the tools that
need to access the figure's history of views and positions, e.g.
* `ToolZoom`
* `ToolPan`
* `ToolHome`
* `ToolBack`
* `ToolForward`
"""
def __init__(self, *args, **kwargs):
self.views = WeakKeyDictionary()
self.positions = WeakKeyDictionary()
self.home_views = WeakKeyDictionary()
ToolBase.__init__(self, *args, **kwargs)
def add_figure(self, figure):
"""Add the current figure to the stack of views and positions"""
if figure not in self.views:
self.views[figure] = cbook.Stack()
self.positions[figure] = cbook.Stack()
self.home_views[figure] = WeakKeyDictionary()
# Define Home
self.push_current(figure)
# Make sure we add a home view for new axes as they're added
figure.add_axobserver(lambda fig: self.update_home_views(fig))
def clear(self, figure):
"""Reset the axes stack"""
if figure in self.views:
self.views[figure].clear()
self.positions[figure].clear()
self.home_views[figure].clear()
self.update_home_views()
def update_view(self):
"""
Update the view limits and position for each axes from the current
stack position. If any axes are present in the figure that aren't in
the current stack position, use the home view limits for those axes and
don't update *any* positions.
"""
views = self.views[self.figure]()
if views is None:
return
pos = self.positions[self.figure]()
if pos is None:
return
home_views = self.home_views[self.figure]
all_axes = self.figure.get_axes()
for a in all_axes:
if a in views:
cur_view = views[a]
else:
cur_view = home_views[a]
a._set_view(cur_view)
if set(all_axes).issubset(pos):
for a in all_axes:
# Restore both the original and modified positions
a._set_position(pos[a][0], 'original')
a._set_position(pos[a][1], 'active')
self.figure.canvas.draw_idle()
def push_current(self, figure=None):
"""
Push the current view limits and position onto their respective stacks
"""
if not figure:
figure = self.figure
views = WeakKeyDictionary()
pos = WeakKeyDictionary()
for a in figure.get_axes():
views[a] = a._get_view()
pos[a] = self._axes_pos(a)
self.views[figure].push(views)
self.positions[figure].push(pos)
def _axes_pos(self, ax):
"""
Return the original and modified positions for the specified axes
Parameters
----------
ax : (matplotlib.axes.AxesSubplot)
The axes to get the positions for
Returns
-------
limits : (tuple)
A tuple of the original and modified positions
"""
return (ax.get_position(True).frozen(),
ax.get_position().frozen())
def update_home_views(self, figure=None):
"""
Make sure that self.home_views has an entry for all axes present in the
figure
"""
if not figure:
figure = self.figure
for a in figure.get_axes():
if a not in self.home_views[figure]:
self.home_views[figure][a] = a._get_view()
def refresh_locators(self):
"""Redraw the canvases, update the locators"""
for a in self.figure.get_axes():
xaxis = getattr(a, 'xaxis', None)
yaxis = getattr(a, 'yaxis', None)
zaxis = getattr(a, 'zaxis', None)
locators = []
if xaxis is not None:
locators.append(xaxis.get_major_locator())
locators.append(xaxis.get_minor_locator())
if yaxis is not None:
locators.append(yaxis.get_major_locator())
locators.append(yaxis.get_minor_locator())
if zaxis is not None:
locators.append(zaxis.get_major_locator())
locators.append(zaxis.get_minor_locator())
for loc in locators:
loc.refresh()
self.figure.canvas.draw_idle()
def home(self):
"""Recall the first view and position from the stack"""
self.views[self.figure].home()
self.positions[self.figure].home()
def back(self):
"""Back one step in the stack of views and positions"""
self.views[self.figure].back()
self.positions[self.figure].back()
def forward(self):
"""Forward one step in the stack of views and positions"""
self.views[self.figure].forward()
self.positions[self.figure].forward()
class ViewsPositionsBase(ToolBase):
"""Base class for `ToolHome`, `ToolBack` and `ToolForward`"""
_on_trigger = None
def trigger(self, sender, event, data=None):
self.toolmanager.get_tool(_views_positions).add_figure(self.figure)
getattr(self.toolmanager.get_tool(_views_positions),
self._on_trigger)()
self.toolmanager.get_tool(_views_positions).update_view()
class ToolHome(ViewsPositionsBase):
"""Restore the original view lim"""
description = 'Reset original view'
image = 'home'
default_keymap = rcParams['keymap.home']
_on_trigger = 'home'
class ToolBack(ViewsPositionsBase):
"""Move back up the view lim stack"""
description = 'Back to previous view'
image = 'back'
default_keymap = rcParams['keymap.back']
_on_trigger = 'back'
class ToolForward(ViewsPositionsBase):
"""Move forward in the view lim stack"""
description = 'Forward to next view'
image = 'forward'
default_keymap = rcParams['keymap.forward']
_on_trigger = 'forward'
class ConfigureSubplotsBase(ToolBase):
"""Base tool for the configuration of subplots"""
description = 'Configure subplots'
image = 'subplots'
class SaveFigureBase(ToolBase):
"""Base tool for figure saving"""
description = 'Save the figure'
image = 'filesave'
default_keymap = rcParams['keymap.save']
class ZoomPanBase(ToolToggleBase):
"""Base class for `ToolZoom` and `ToolPan`"""
def __init__(self, *args):
ToolToggleBase.__init__(self, *args)
self._button_pressed = None
self._xypress = None
self._idPress = None
self._idRelease = None
self._idScroll = None
self.base_scale = 2.
self.scrollthresh = .5 # .5 second scroll threshold
self.lastscroll = time.time()-self.scrollthresh
def enable(self, event):
"""Connect press/release events and lock the canvas"""
self.figure.canvas.widgetlock(self)
self._idPress = self.figure.canvas.mpl_connect(
'button_press_event', self._press)
self._idRelease = self.figure.canvas.mpl_connect(
'button_release_event', self._release)
self._idScroll = self.figure.canvas.mpl_connect(
'scroll_event', self.scroll_zoom)
def disable(self, event):
"""Release the canvas and disconnect press/release events"""
self._cancel_action()
self.figure.canvas.widgetlock.release(self)
self.figure.canvas.mpl_disconnect(self._idPress)
self.figure.canvas.mpl_disconnect(self._idRelease)
self.figure.canvas.mpl_disconnect(self._idScroll)
def trigger(self, sender, event, data=None):
self.toolmanager.get_tool(_views_positions).add_figure(self.figure)
ToolToggleBase.trigger(self, sender, event, data)
def scroll_zoom(self, event):
# https://gist.github.com/tacaswell/3144287
if event.inaxes is None:
return
if event.button == 'up':
# deal with zoom in
scl = self.base_scale
elif event.button == 'down':
# deal with zoom out
scl = 1/self.base_scale
else:
# deal with something that should never happen
scl = 1
ax = event.inaxes
ax._set_view_from_bbox([event.x, event.y, scl])
# If last scroll was done within the timing threshold, delete the
# previous view
if (time.time()-self.lastscroll) < self.scrollthresh:
self.toolmanager.get_tool(_views_positions).back()
self.figure.canvas.draw_idle() # force re-draw
self.lastscroll = time.time()
self.toolmanager.get_tool(_views_positions).push_current()
class ToolZoom(ZoomPanBase):
"""Zoom to rectangle"""
description = 'Zoom to rectangle'
image = 'zoom_to_rect'
default_keymap = rcParams['keymap.zoom']
cursor = cursors.SELECT_REGION
radio_group = 'default'
def __init__(self, *args):
ZoomPanBase.__init__(self, *args)
self._ids_zoom = []
def _cancel_action(self):
for zoom_id in self._ids_zoom:
self.figure.canvas.mpl_disconnect(zoom_id)
self.toolmanager.trigger_tool('rubberband', self)
self.toolmanager.get_tool(_views_positions).refresh_locators()
self._xypress = None
self._button_pressed = None
self._ids_zoom = []
return
def _press(self, event):
"""the _press mouse button in zoom to rect mode callback"""
# If we're already in the middle of a zoom, pressing another
# button works to "cancel"
if self._ids_zoom != []:
self._cancel_action()
if event.button == 1:
self._button_pressed = 1
elif event.button == 3:
self._button_pressed = 3
else:
self._cancel_action()
return
x, y = event.x, event.y
self._xypress = []
for i, a in enumerate(self.figure.get_axes()):
if (x is not None and y is not None and a.in_axes(event) and
a.get_navigate() and a.can_zoom()):
self._xypress.append((x, y, a, i, a._get_view()))
id1 = self.figure.canvas.mpl_connect(
'motion_notify_event', self._mouse_move)
id2 = self.figure.canvas.mpl_connect(
'key_press_event', self._switch_on_zoom_mode)
id3 = self.figure.canvas.mpl_connect(
'key_release_event', self._switch_off_zoom_mode)
self._ids_zoom = id1, id2, id3
self._zoom_mode = event.key
def _switch_on_zoom_mode(self, event):
self._zoom_mode = event.key
self._mouse_move(event)
def _switch_off_zoom_mode(self, event):
self._zoom_mode = None
self._mouse_move(event)
def _mouse_move(self, event):
"""the drag callback in zoom mode"""
if self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, view = self._xypress[0]
(x1, y1), (x2, y2) = np.clip(
[[lastx, lasty], [x, y]], a.bbox.min, a.bbox.max)
if self._zoom_mode == "x":
y1, y2 = a.bbox.intervaly
elif self._zoom_mode == "y":
x1, x2 = a.bbox.intervalx
self.toolmanager.trigger_tool(
'rubberband', self, data=(x1, y1, x2, y2))
def _release(self, event):
"""the release mouse button callback in zoom to rect mode"""
for zoom_id in self._ids_zoom:
self.figure.canvas.mpl_disconnect(zoom_id)
self._ids_zoom = []
if not self._xypress:
self._cancel_action()
return
last_a = []
for cur_xypress in self._xypress:
x, y = event.x, event.y
lastx, lasty, a, _ind, view = cur_xypress
# ignore singular clicks - 5 pixels is a threshold
if abs(x - lastx) < 5 or abs(y - lasty) < 5:
self._cancel_action()
return
# detect twinx,y axes and avoid double zooming
twinx, twiny = False, False
if last_a:
for la in last_a:
if a.get_shared_x_axes().joined(a, la):
twinx = True
if a.get_shared_y_axes().joined(a, la):
twiny = True
last_a.append(a)
if self._button_pressed == 1:
direction = 'in'
elif self._button_pressed == 3:
direction = 'out'
else:
continue
a._set_view_from_bbox((lastx, lasty, x, y), direction,
self._zoom_mode, twinx, twiny)
self._zoom_mode = None
self.toolmanager.get_tool(_views_positions).push_current()
self._cancel_action()
class ToolPan(ZoomPanBase):
"""Pan axes with left mouse, zoom with right"""
default_keymap = rcParams['keymap.pan']
description = 'Pan axes with left mouse, zoom with right'
image = 'move'
cursor = cursors.MOVE
radio_group = 'default'
def __init__(self, *args):
ZoomPanBase.__init__(self, *args)
self._idDrag = None
def _cancel_action(self):
self._button_pressed = None
self._xypress = []
self.figure.canvas.mpl_disconnect(self._idDrag)
self.toolmanager.messagelock.release(self)
self.toolmanager.get_tool(_views_positions).refresh_locators()
def _press(self, event):
if event.button == 1:
self._button_pressed = 1
elif event.button == 3:
self._button_pressed = 3
else:
self._cancel_action()
return
x, y = event.x, event.y
self._xypress = []
for i, a in enumerate(self.figure.get_axes()):
if (x is not None and y is not None and a.in_axes(event) and
a.get_navigate() and a.can_pan()):
a.start_pan(x, y, event.button)
self._xypress.append((a, i))
self.toolmanager.messagelock(self)
self._idDrag = self.figure.canvas.mpl_connect(
'motion_notify_event', self._mouse_move)
def _release(self, event):
if self._button_pressed is None:
self._cancel_action()
return
self.figure.canvas.mpl_disconnect(self._idDrag)
self.toolmanager.messagelock.release(self)
for a, _ind in self._xypress:
a.end_pan()
if not self._xypress:
self._cancel_action()
return
self.toolmanager.get_tool(_views_positions).push_current()
self._cancel_action()
def _mouse_move(self, event):
for a, _ind in self._xypress:
# safer to use the recorded button at the _press than current
# button: # multiple button can get pressed during motion...
a.drag_pan(self._button_pressed, event.key, event.x, event.y)
self.toolmanager.canvas.draw_idle()
default_tools = {'home': ToolHome, 'back': ToolBack, 'forward': ToolForward,
'zoom': ToolZoom, 'pan': ToolPan,
'subplots': 'ToolConfigureSubplots',
'save': 'ToolSaveFigure',
'grid': ToolGrid,
'grid_minor': ToolMinorGrid,
'fullscreen': ToolFullScreen,
'quit': ToolQuit,
'quit_all': ToolQuitAll,
'allnav': ToolEnableAllNavigation,
'nav': ToolEnableNavigation,
'xscale': ToolXScale,
'yscale': ToolYScale,
'position': ToolCursorPosition,
_views_positions: ToolViewsPositions,
'cursor': 'ToolSetCursor',
'rubberband': 'ToolRubberband',
}
"""Default tools"""
default_toolbar_tools = [['navigation', ['home', 'back', 'forward']],
['zoompan', ['pan', 'zoom', 'subplots']],
['io', ['save']]]
"""Default tools in the toolbar"""
def add_tools_to_manager(toolmanager, tools=default_tools):
"""
Add multiple tools to `ToolManager`
Parameters
----------
toolmanager: ToolManager
`backend_managers.ToolManager` object that will get the tools added
tools : {str: class_like}, optional
The tools to add in a {name: tool} dict, see `add_tool` for more
info.
"""
for name, tool in six.iteritems(tools):
toolmanager.add_tool(name, tool)
def add_tools_to_container(container, tools=default_toolbar_tools):
"""
Add multiple tools to the container.
Parameters
----------
container: Container
`backend_bases.ToolContainerBase` object that will get the tools added
tools : list, optional
List in the form
[[group1, [tool1, tool2 ...]], [group2, [...]]]
Where the tools given by tool1, and tool2 will display in group1.
See `add_tool` for details.
"""
for group, grouptools in tools:
for position, tool in enumerate(grouptools):
container.add_tool(tool, group, position)
| 33,644 | 30.095194 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/lines.py
|
"""
This module contains all the 2D line class which can draw with a
variety of line styles, markers and colors.
"""
# TODO: expose cap and join style attrs
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import warnings
import numpy as np
from . import artist, cbook, colors as mcolors, docstring, rcParams
from .artist import Artist, allow_rasterization
from .cbook import (
_to_unmasked_float_array, iterable, is_numlike, ls_mapper, ls_mapper_r,
STEP_LOOKUP_MAP)
from .markers import MarkerStyle
from .path import Path
from .transforms import Bbox, TransformedPath, IdentityTransform
# Imported here for backward compatibility, even though they don't
# really belong.
from . import _path
from .markers import (
CARETLEFT, CARETRIGHT, CARETUP, CARETDOWN,
CARETLEFTBASE, CARETRIGHTBASE, CARETUPBASE, CARETDOWNBASE,
TICKLEFT, TICKRIGHT, TICKUP, TICKDOWN)
def _get_dash_pattern(style):
"""Convert linestyle -> dash pattern
"""
# go from short hand -> full strings
if isinstance(style, six.string_types):
style = ls_mapper.get(style, style)
# un-dashed styles
if style in ['solid', 'None']:
offset, dashes = None, None
# dashed styles
elif style in ['dashed', 'dashdot', 'dotted']:
offset = 0
dashes = tuple(rcParams['lines.{}_pattern'.format(style)])
#
elif isinstance(style, tuple):
offset, dashes = style
else:
raise ValueError('Unrecognized linestyle: %s' % str(style))
# normalize offset to be positive and shorter than the dash cycle
if dashes is not None and offset is not None:
dsum = sum(dashes)
if dsum:
offset %= dsum
return offset, dashes
def _scale_dashes(offset, dashes, lw):
if not rcParams['lines.scale_dashes']:
return offset, dashes
scaled_offset = scaled_dashes = None
if offset is not None:
scaled_offset = offset * lw
if dashes is not None:
scaled_dashes = [x * lw if x is not None else None
for x in dashes]
return scaled_offset, scaled_dashes
def segment_hits(cx, cy, x, y, radius):
"""
Determine if any line segments are within radius of a
point. Returns the list of line segments that are within that
radius.
"""
# Process single points specially
if len(x) < 2:
res, = np.nonzero((cx - x) ** 2 + (cy - y) ** 2 <= radius ** 2)
return res
# We need to lop the last element off a lot.
xr, yr = x[:-1], y[:-1]
# Only look at line segments whose nearest point to C on the line
# lies within the segment.
dx, dy = x[1:] - xr, y[1:] - yr
Lnorm_sq = dx ** 2 + dy ** 2 # Possibly want to eliminate Lnorm==0
u = ((cx - xr) * dx + (cy - yr) * dy) / Lnorm_sq
candidates = (u >= 0) & (u <= 1)
# Note that there is a little area near one side of each point
# which will be near neither segment, and another which will
# be near both, depending on the angle of the lines. The
# following radius test eliminates these ambiguities.
point_hits = (cx - x) ** 2 + (cy - y) ** 2 <= radius ** 2
candidates = candidates & ~(point_hits[:-1] | point_hits[1:])
# For those candidates which remain, determine how far they lie away
# from the line.
px, py = xr + u * dx, yr + u * dy
line_hits = (cx - px) ** 2 + (cy - py) ** 2 <= radius ** 2
line_hits = line_hits & candidates
points, = point_hits.ravel().nonzero()
lines, = line_hits.ravel().nonzero()
return np.concatenate((points, lines))
def _mark_every_path(markevery, tpath, affine, ax_transform):
"""
Helper function that sorts out how to deal the input
`markevery` and returns the points where markers should be drawn.
Takes in the `markevery` value and the line path and returns the
sub-sampled path.
"""
# pull out the two bits of data we want from the path
codes, verts = tpath.codes, tpath.vertices
def _slice_or_none(in_v, slc):
'''
Helper function to cope with `codes` being an
ndarray or `None`
'''
if in_v is None:
return None
return in_v[slc]
# if just a float, assume starting at 0.0 and make a tuple
if isinstance(markevery, float):
markevery = (0.0, markevery)
# if just an int, assume starting at 0 and make a tuple
elif isinstance(markevery, int):
markevery = (0, markevery)
# if just an numpy int, assume starting at 0 and make a tuple
elif isinstance(markevery, np.integer):
markevery = (0, markevery.item())
if isinstance(markevery, tuple):
if len(markevery) != 2:
raise ValueError('`markevery` is a tuple but its '
'len is not 2; '
'markevery=%s' % (markevery,))
start, step = markevery
# if step is an int, old behavior
if isinstance(step, int):
#tuple of 2 int is for backwards compatibility,
if not(isinstance(start, int)):
raise ValueError('`markevery` is a tuple with '
'len 2 and second element is an int, but '
'the first element is not an int; '
'markevery=%s' % (markevery,))
# just return, we are done here
return Path(verts[slice(start, None, step)],
_slice_or_none(codes, slice(start, None, step)))
elif isinstance(step, float):
if not (isinstance(start, int) or
isinstance(start, float)):
raise ValueError('`markevery` is a tuple with '
'len 2 and second element is a float, but '
'the first element is not a float or an '
'int; '
'markevery=%s' % (markevery,))
#calc cumulative distance along path (in display
# coords):
disp_coords = affine.transform(tpath.vertices)
delta = np.empty((len(disp_coords), 2),
dtype=float)
delta[0, :] = 0.0
delta[1:, :] = (disp_coords[1:, :] -
disp_coords[:-1, :])
delta = np.sum(delta**2, axis=1)
delta = np.sqrt(delta)
delta = np.cumsum(delta)
#calc distance between markers along path based on
# the axes bounding box diagonal being a distance
# of unity:
scale = ax_transform.transform(
np.array([[0, 0], [1, 1]]))
scale = np.diff(scale, axis=0)
scale = np.sum(scale**2)
scale = np.sqrt(scale)
marker_delta = np.arange(start * scale,
delta[-1],
step * scale)
#find closest actual data point that is closest to
# the theoretical distance along the path:
inds = np.abs(delta[np.newaxis, :] -
marker_delta[:, np.newaxis])
inds = inds.argmin(axis=1)
inds = np.unique(inds)
# return, we are done here
return Path(verts[inds],
_slice_or_none(codes, inds))
else:
raise ValueError('`markevery` is a tuple with '
'len 2, but its second element is not an int '
'or a float; '
'markevery=%s' % (markevery,))
elif isinstance(markevery, slice):
# mazol tov, it's already a slice, just return
return Path(verts[markevery],
_slice_or_none(codes, markevery))
elif iterable(markevery):
#fancy indexing
try:
return Path(verts[markevery],
_slice_or_none(codes, markevery))
except (ValueError, IndexError):
raise ValueError('`markevery` is iterable but '
'not a valid form of numpy fancy indexing; '
'markevery=%s' % (markevery,))
else:
raise ValueError('Value of `markevery` is not '
'recognized; '
'markevery=%s' % (markevery,))
class Line2D(Artist):
"""
A line - the line can have both a solid linestyle connecting all
the vertices, and a marker at each vertex. Additionally, the
drawing of the solid line is influenced by the drawstyle, e.g., one
can create "stepped" lines in various styles.
"""
lineStyles = _lineStyles = { # hidden names deprecated
'-': '_draw_solid',
'--': '_draw_dashed',
'-.': '_draw_dash_dot',
':': '_draw_dotted',
'None': '_draw_nothing',
' ': '_draw_nothing',
'': '_draw_nothing',
}
_drawStyles_l = {
'default': '_draw_lines',
'steps-mid': '_draw_steps_mid',
'steps-pre': '_draw_steps_pre',
'steps-post': '_draw_steps_post',
}
_drawStyles_s = {
'steps': '_draw_steps_pre',
}
# drawStyles should now be deprecated.
drawStyles = {}
drawStyles.update(_drawStyles_l)
drawStyles.update(_drawStyles_s)
# Need a list ordered with long names first:
drawStyleKeys = list(_drawStyles_l) + list(_drawStyles_s)
# Referenced here to maintain API. These are defined in
# MarkerStyle
markers = MarkerStyle.markers
filled_markers = MarkerStyle.filled_markers
fillStyles = MarkerStyle.fillstyles
zorder = 2
validCap = ('butt', 'round', 'projecting')
validJoin = ('miter', 'round', 'bevel')
def __str__(self):
if self._label != "":
return "Line2D(%s)" % (self._label)
elif self._x is None:
return "Line2D()"
elif len(self._x) > 3:
return "Line2D((%g,%g),(%g,%g),...,(%g,%g))"\
% (self._x[0], self._y[0], self._x[0],
self._y[0], self._x[-1], self._y[-1])
else:
return "Line2D(%s)"\
% (",".join(["(%g,%g)" % (x, y) for x, y
in zip(self._x, self._y)]))
def __init__(self, xdata, ydata,
linewidth=None, # all Nones default to rc
linestyle=None,
color=None,
marker=None,
markersize=None,
markeredgewidth=None,
markeredgecolor=None,
markerfacecolor=None,
markerfacecoloralt='none',
fillstyle=None,
antialiased=None,
dash_capstyle=None,
solid_capstyle=None,
dash_joinstyle=None,
solid_joinstyle=None,
pickradius=5,
drawstyle=None,
markevery=None,
**kwargs
):
"""
Create a :class:`~matplotlib.lines.Line2D` instance with *x*
and *y* data in sequences *xdata*, *ydata*.
The kwargs are :class:`~matplotlib.lines.Line2D` properties:
%(Line2D)s
See :meth:`set_linestyle` for a decription of the line styles,
:meth:`set_marker` for a description of the markers, and
:meth:`set_drawstyle` for a description of the draw styles.
"""
Artist.__init__(self)
#convert sequences to numpy arrays
if not iterable(xdata):
raise RuntimeError('xdata must be a sequence')
if not iterable(ydata):
raise RuntimeError('ydata must be a sequence')
if linewidth is None:
linewidth = rcParams['lines.linewidth']
if linestyle is None:
linestyle = rcParams['lines.linestyle']
if marker is None:
marker = rcParams['lines.marker']
if color is None:
color = rcParams['lines.color']
if markersize is None:
markersize = rcParams['lines.markersize']
if antialiased is None:
antialiased = rcParams['lines.antialiased']
if dash_capstyle is None:
dash_capstyle = rcParams['lines.dash_capstyle']
if dash_joinstyle is None:
dash_joinstyle = rcParams['lines.dash_joinstyle']
if solid_capstyle is None:
solid_capstyle = rcParams['lines.solid_capstyle']
if solid_joinstyle is None:
solid_joinstyle = rcParams['lines.solid_joinstyle']
if isinstance(linestyle, six.string_types):
ds, ls = self._split_drawstyle_linestyle(linestyle)
if ds is not None and drawstyle is not None and ds != drawstyle:
raise ValueError("Inconsistent drawstyle ({0!r}) and "
"linestyle ({1!r})".format(drawstyle,
linestyle)
)
linestyle = ls
if ds is not None:
drawstyle = ds
if drawstyle is None:
drawstyle = 'default'
self._dashcapstyle = None
self._dashjoinstyle = None
self._solidjoinstyle = None
self._solidcapstyle = None
self.set_dash_capstyle(dash_capstyle)
self.set_dash_joinstyle(dash_joinstyle)
self.set_solid_capstyle(solid_capstyle)
self.set_solid_joinstyle(solid_joinstyle)
self._linestyles = None
self._drawstyle = None
self._linewidth = linewidth
# scaled dash + offset
self._dashSeq = None
self._dashOffset = 0
# unscaled dash + offset
# this is needed scaling the dash pattern by linewidth
self._us_dashSeq = None
self._us_dashOffset = 0
self.set_linestyle(linestyle)
self.set_drawstyle(drawstyle)
self.set_linewidth(linewidth)
self._color = None
self.set_color(color)
self._marker = MarkerStyle(marker, fillstyle)
self._markevery = None
self._markersize = None
self._antialiased = None
self.set_markevery(markevery)
self.set_antialiased(antialiased)
self.set_markersize(markersize)
self._markeredgecolor = None
self._markeredgewidth = None
self._markerfacecolor = None
self._markerfacecoloralt = None
self.set_markerfacecolor(markerfacecolor)
self.set_markerfacecoloralt(markerfacecoloralt)
self.set_markeredgecolor(markeredgecolor)
self.set_markeredgewidth(markeredgewidth)
self.verticalOffset = None
# update kwargs before updating data to give the caller a
# chance to init axes (and hence unit support)
self.update(kwargs)
self.pickradius = pickradius
self.ind_offset = 0
if is_numlike(self._picker):
self.pickradius = self._picker
self._xorig = np.asarray([])
self._yorig = np.asarray([])
self._invalidx = True
self._invalidy = True
self._x = None
self._y = None
self._xy = None
self._path = None
self._transformed_path = None
self._subslice = False
self._x_filled = None # used in subslicing; only x is needed
self.set_data(xdata, ydata)
def contains(self, mouseevent):
"""
Test whether the mouse event occurred on the line. The pick
radius determines the precision of the location test (usually
within five points of the value). Use
:meth:`~matplotlib.lines.Line2D.get_pickradius` or
:meth:`~matplotlib.lines.Line2D.set_pickradius` to view or
modify it.
Returns *True* if any values are within the radius along with
``{'ind': pointlist}``, where *pointlist* is the set of points
within the radius.
TODO: sort returned indices by distance
"""
if callable(self._contains):
return self._contains(self, mouseevent)
if not is_numlike(self.pickradius):
raise ValueError("pick radius should be a distance")
# Make sure we have data to plot
if self._invalidy or self._invalidx:
self.recache()
if len(self._xy) == 0:
return False, {}
# Convert points to pixels
transformed_path = self._get_transformed_path()
path, affine = transformed_path.get_transformed_path_and_affine()
path = affine.transform_path(path)
xy = path.vertices
xt = xy[:, 0]
yt = xy[:, 1]
# Convert pick radius from points to pixels
if self.figure is None:
warnings.warn('no figure set when check if mouse is on line')
pixels = self.pickradius
else:
pixels = self.figure.dpi / 72. * self.pickradius
# the math involved in checking for containment (here and inside of
# segment_hits) assumes that it is OK to overflow. In case the
# application has set the error flags such that an exception is raised
# on overflow, we temporarily set the appropriate error flags here and
# set them back when we are finished.
with np.errstate(all='ignore'):
# Check for collision
if self._linestyle in ['None', None]:
# If no line, return the nearby point(s)
d = (xt - mouseevent.x) ** 2 + (yt - mouseevent.y) ** 2
ind, = np.nonzero(np.less_equal(d, pixels ** 2))
else:
# If line, return the nearby segment(s)
ind = segment_hits(mouseevent.x, mouseevent.y, xt, yt, pixels)
if self._drawstyle.startswith("steps"):
ind //= 2
ind += self.ind_offset
# Return the point(s) within radius
return len(ind) > 0, dict(ind=ind)
def get_pickradius(self):
"""return the pick radius used for containment tests"""
return self.pickradius
def set_pickradius(self, d):
"""Set the pick radius used for containment tests.
.. ACCEPTS: float distance in points
Parameters
----------
d : float
Pick radius, in points.
"""
self.pickradius = d
def get_fillstyle(self):
"""
return the marker fillstyle
"""
return self._marker.get_fillstyle()
def set_fillstyle(self, fs):
"""
Set the marker fill style; 'full' means fill the whole marker.
'none' means no filling; other options are for half-filled markers.
ACCEPTS: ['full' | 'left' | 'right' | 'bottom' | 'top' | 'none']
"""
self._marker.set_fillstyle(fs)
self.stale = True
def set_markevery(self, every):
"""Set the markevery property to subsample the plot when using markers.
e.g., if `every=5`, every 5-th marker will be plotted.
ACCEPTS: [None | int | length-2 tuple of int | slice |
list/array of int | float | length-2 tuple of float]
Parameters
----------
every: None | int | length-2 tuple of int | slice | list/array of int \
| float | length-2 tuple of float
Which markers to plot.
- every=None, every point will be plotted.
- every=N, every N-th marker will be plotted starting with
marker 0.
- every=(start, N), every N-th marker, starting at point
start, will be plotted.
- every=slice(start, end, N), every N-th marker, starting at
point start, upto but not including point end, will be plotted.
- every=[i, j, m, n], only markers at points i, j, m, and n
will be plotted.
- every=0.1, (i.e. a float) then markers will be spaced at
approximately equal distances along the line; the distance
along the line between markers is determined by multiplying the
display-coordinate distance of the axes bounding-box diagonal
by the value of every.
- every=(0.5, 0.1) (i.e. a length-2 tuple of float), the
same functionality as every=0.1 is exhibited but the first
marker will be 0.5 multiplied by the
display-cordinate-diagonal-distance along the line.
Notes
-----
Setting the markevery property will only show markers at actual data
points. When using float arguments to set the markevery property
on irregularly spaced data, the markers will likely not appear evenly
spaced because the actual data points do not coincide with the
theoretical spacing between markers.
When using a start offset to specify the first marker, the offset will
be from the first data point which may be different from the first
the visible data point if the plot is zoomed in.
If zooming in on a plot when using float arguments then the actual
data points that have markers will change because the distance between
markers is always determined from the display-coordinates
axes-bounding-box-diagonal regardless of the actual axes data limits.
"""
if self._markevery != every:
self.stale = True
self._markevery = every
def get_markevery(self):
"""return the markevery setting"""
return self._markevery
def set_picker(self, p):
"""Sets the event picker details for the line.
ACCEPTS: float distance in points or callable pick function
``fn(artist, event)``
"""
if callable(p):
self._contains = p
else:
self.pickradius = p
self._picker = p
def get_window_extent(self, renderer):
bbox = Bbox([[0, 0], [0, 0]])
trans_data_to_xy = self.get_transform().transform
bbox.update_from_data_xy(trans_data_to_xy(self.get_xydata()),
ignore=True)
# correct for marker size, if any
if self._marker:
ms = (self._markersize / 72.0 * self.figure.dpi) * 0.5
bbox = bbox.padded(ms)
return bbox
@Artist.axes.setter
def axes(self, ax):
# call the set method from the base-class property
Artist.axes.fset(self, ax)
if ax is not None:
# connect unit-related callbacks
if ax.xaxis is not None:
self._xcid = ax.xaxis.callbacks.connect('units',
self.recache_always)
if ax.yaxis is not None:
self._ycid = ax.yaxis.callbacks.connect('units',
self.recache_always)
def set_data(self, *args):
"""
Set the x and y data
ACCEPTS: 2D array (rows are x, y) or two 1D arrays
"""
if len(args) == 1:
x, y = args[0]
else:
x, y = args
self.set_xdata(x)
self.set_ydata(y)
def recache_always(self):
self.recache(always=True)
def recache(self, always=False):
if always or self._invalidx:
xconv = self.convert_xunits(self._xorig)
x = _to_unmasked_float_array(xconv).ravel()
else:
x = self._x
if always or self._invalidy:
yconv = self.convert_yunits(self._yorig)
y = _to_unmasked_float_array(yconv).ravel()
else:
y = self._y
self._xy = np.column_stack(np.broadcast_arrays(x, y)).astype(float)
self._x, self._y = self._xy.T # views
self._subslice = False
if (self.axes and len(x) > 1000 and self._is_sorted(x) and
self.axes.name == 'rectilinear' and
self.axes.get_xscale() == 'linear' and
self._markevery is None and
self.get_clip_on() is True):
self._subslice = True
nanmask = np.isnan(x)
if nanmask.any():
self._x_filled = self._x.copy()
indices = np.arange(len(x))
self._x_filled[nanmask] = np.interp(indices[nanmask],
indices[~nanmask], self._x[~nanmask])
else:
self._x_filled = self._x
if self._path is not None:
interpolation_steps = self._path._interpolation_steps
else:
interpolation_steps = 1
xy = STEP_LOOKUP_MAP[self._drawstyle](*self._xy.T)
self._path = Path(np.asarray(xy).T,
_interpolation_steps=interpolation_steps)
self._transformed_path = None
self._invalidx = False
self._invalidy = False
def _transform_path(self, subslice=None):
"""
Puts a TransformedPath instance at self._transformed_path;
all invalidation of the transform is then handled by the
TransformedPath instance.
"""
# Masked arrays are now handled by the Path class itself
if subslice is not None:
xy = STEP_LOOKUP_MAP[self._drawstyle](*self._xy[subslice, :].T)
_path = Path(np.asarray(xy).T,
_interpolation_steps=self._path._interpolation_steps)
else:
_path = self._path
self._transformed_path = TransformedPath(_path, self.get_transform())
def _get_transformed_path(self):
"""
Return the :class:`~matplotlib.transforms.TransformedPath` instance
of this line.
"""
if self._transformed_path is None:
self._transform_path()
return self._transformed_path
def set_transform(self, t):
"""
set the Transformation instance used by this artist
ACCEPTS: a :class:`matplotlib.transforms.Transform` instance
"""
Artist.set_transform(self, t)
self._invalidx = True
self._invalidy = True
self.stale = True
def _is_sorted(self, x):
"""return True if x is sorted in ascending order"""
# We don't handle the monotonically decreasing case.
return _path.is_sorted(x)
@allow_rasterization
def draw(self, renderer):
"""draw the Line with `renderer` unless visibility is False"""
if not self.get_visible():
return
if self._invalidy or self._invalidx:
self.recache()
self.ind_offset = 0 # Needed for contains() method.
if self._subslice and self.axes:
x0, x1 = self.axes.get_xbound()
i0, = self._x_filled.searchsorted([x0], 'left')
i1, = self._x_filled.searchsorted([x1], 'right')
subslice = slice(max(i0 - 1, 0), i1 + 1)
self.ind_offset = subslice.start
self._transform_path(subslice)
transf_path = self._get_transformed_path()
if self.get_path_effects():
from matplotlib.patheffects import PathEffectRenderer
renderer = PathEffectRenderer(self.get_path_effects(), renderer)
renderer.open_group('line2d', self.get_gid())
if self._lineStyles[self._linestyle] != '_draw_nothing':
tpath, affine = transf_path.get_transformed_path_and_affine()
if len(tpath.vertices):
gc = renderer.new_gc()
self._set_gc_clip(gc)
ln_color_rgba = self._get_rgba_ln_color()
gc.set_foreground(ln_color_rgba, isRGBA=True)
gc.set_alpha(ln_color_rgba[3])
gc.set_antialiased(self._antialiased)
gc.set_linewidth(self._linewidth)
if self.is_dashed():
cap = self._dashcapstyle
join = self._dashjoinstyle
else:
cap = self._solidcapstyle
join = self._solidjoinstyle
gc.set_joinstyle(join)
gc.set_capstyle(cap)
gc.set_snap(self.get_snap())
if self.get_sketch_params() is not None:
gc.set_sketch_params(*self.get_sketch_params())
gc.set_dashes(self._dashOffset, self._dashSeq)
renderer.draw_path(gc, tpath, affine.frozen())
gc.restore()
if self._marker and self._markersize > 0:
gc = renderer.new_gc()
self._set_gc_clip(gc)
rgbaFace = self._get_rgba_face()
rgbaFaceAlt = self._get_rgba_face(alt=True)
edgecolor = self.get_markeredgecolor()
if cbook._str_lower_equal(edgecolor, "none"):
gc.set_linewidth(0)
gc.set_foreground(rgbaFace, isRGBA=True)
else:
gc.set_foreground(edgecolor)
gc.set_linewidth(self._markeredgewidth)
mec = self._markeredgecolor
if (cbook._str_equal(mec, "auto")
and not cbook._str_lower_equal(
self.get_markerfacecolor(), "none")):
gc.set_alpha(rgbaFace[3])
else:
gc.set_alpha(self.get_alpha())
marker = self._marker
tpath, affine = transf_path.get_transformed_points_and_affine()
if len(tpath.vertices):
# subsample the markers if markevery is not None
markevery = self.get_markevery()
if markevery is not None:
subsampled = _mark_every_path(markevery, tpath,
affine, self.axes.transAxes)
else:
subsampled = tpath
snap = marker.get_snap_threshold()
if type(snap) == float:
snap = renderer.points_to_pixels(self._markersize) >= snap
gc.set_snap(snap)
gc.set_joinstyle(marker.get_joinstyle())
gc.set_capstyle(marker.get_capstyle())
marker_path = marker.get_path()
marker_trans = marker.get_transform()
w = renderer.points_to_pixels(self._markersize)
if cbook._str_equal(marker.get_marker(), ","):
gc.set_linewidth(0)
else:
# Don't scale for pixels, and don't stroke them
marker_trans = marker_trans.scale(w)
renderer.draw_markers(gc, marker_path, marker_trans,
subsampled, affine.frozen(),
rgbaFace)
alt_marker_path = marker.get_alt_path()
if alt_marker_path:
alt_marker_trans = marker.get_alt_transform()
alt_marker_trans = alt_marker_trans.scale(w)
if (cbook._str_equal(mec, "auto")
and not cbook._str_lower_equal(
self.get_markerfacecoloralt(), "none")):
gc.set_alpha(rgbaFaceAlt[3])
else:
gc.set_alpha(self.get_alpha())
renderer.draw_markers(
gc, alt_marker_path, alt_marker_trans, subsampled,
affine.frozen(), rgbaFaceAlt)
gc.restore()
renderer.close_group('line2d')
self.stale = False
def get_antialiased(self):
return self._antialiased
def get_color(self):
return self._color
def get_drawstyle(self):
return self._drawstyle
def get_linestyle(self):
return self._linestyle
def get_linewidth(self):
return self._linewidth
def get_marker(self):
return self._marker.get_marker()
def get_markeredgecolor(self):
mec = self._markeredgecolor
if isinstance(mec, six.string_types) and mec == 'auto':
if rcParams['_internal.classic_mode']:
if self._marker.get_marker() in ('.', ','):
return self._color
if self._marker.is_filled() and self.get_fillstyle() != 'none':
return 'k' # Bad hard-wired default...
return self._color
else:
return mec
def get_markeredgewidth(self):
return self._markeredgewidth
def _get_markerfacecolor(self, alt=False):
if alt:
fc = self._markerfacecoloralt
else:
fc = self._markerfacecolor
if (isinstance(fc, six.string_types) and fc.lower() == 'auto'):
if self.get_fillstyle() == 'none':
return 'none'
else:
return self._color
else:
return fc
def get_markerfacecolor(self):
return self._get_markerfacecolor(alt=False)
def get_markerfacecoloralt(self):
return self._get_markerfacecolor(alt=True)
def get_markersize(self):
return self._markersize
def get_data(self, orig=True):
"""
Return the xdata, ydata.
If *orig* is *True*, return the original data.
"""
return self.get_xdata(orig=orig), self.get_ydata(orig=orig)
def get_xdata(self, orig=True):
"""
Return the xdata.
If *orig* is *True*, return the original data, else the
processed data.
"""
if orig:
return self._xorig
if self._invalidx:
self.recache()
return self._x
def get_ydata(self, orig=True):
"""
Return the ydata.
If *orig* is *True*, return the original data, else the
processed data.
"""
if orig:
return self._yorig
if self._invalidy:
self.recache()
return self._y
def get_path(self):
"""
Return the :class:`~matplotlib.path.Path` object associated
with this line.
"""
if self._invalidy or self._invalidx:
self.recache()
return self._path
def get_xydata(self):
"""
Return the *xy* data as a Nx2 numpy array.
"""
if self._invalidy or self._invalidx:
self.recache()
return self._xy
def set_antialiased(self, b):
"""
Set whether to use antialiased rendering.
Parameters
----------
b : bool
.. ACCEPTS: bool
"""
if self._antialiased != b:
self.stale = True
self._antialiased = b
def set_color(self, color):
"""
Set the color of the line
ACCEPTS: any matplotlib color
"""
self._color = color
self.stale = True
def set_drawstyle(self, drawstyle):
"""
Set the drawstyle of the plot
'default' connects the points with lines. The steps variants
produce step-plots. 'steps' is equivalent to 'steps-pre' and
is maintained for backward-compatibility.
ACCEPTS: ['default' | 'steps' | 'steps-pre' | 'steps-mid' |
'steps-post']
"""
if drawstyle is None:
drawstyle = 'default'
if drawstyle not in self.drawStyles:
raise ValueError('Unrecognized drawstyle {!r}'.format(drawstyle))
if self._drawstyle != drawstyle:
self.stale = True
# invalidate to trigger a recache of the path
self._invalidx = True
self._drawstyle = drawstyle
def set_linewidth(self, w):
"""
Set the line width in points
ACCEPTS: float value in points
"""
w = float(w)
if self._linewidth != w:
self.stale = True
self._linewidth = w
# rescale the dashes + offset
self._dashOffset, self._dashSeq = _scale_dashes(
self._us_dashOffset, self._us_dashSeq, self._linewidth)
def _split_drawstyle_linestyle(self, ls):
'''Split drawstyle from linestyle string
If `ls` is only a drawstyle default to returning a linestyle
of '-'.
Parameters
----------
ls : str
The linestyle to be processed
Returns
-------
ret_ds : str or None
If the linestyle string does not contain a drawstyle prefix
return None, otherwise return it.
ls : str
The linestyle with the drawstyle (if any) stripped.
'''
ret_ds = None
for ds in self.drawStyleKeys: # long names are first in the list
if ls.startswith(ds):
ret_ds = ds
if len(ls) > len(ds):
ls = ls[len(ds):]
else:
ls = '-'
break
return ret_ds, ls
def set_linestyle(self, ls):
"""
Set the linestyle of the line (also accepts drawstyles,
e.g., ``'steps--'``)
=========================== =================
linestyle description
=========================== =================
``'-'`` or ``'solid'`` solid line
``'--'`` or ``'dashed'`` dashed line
``'-.'`` or ``'dashdot'`` dash-dotted line
``':'`` or ``'dotted'`` dotted line
``'None'`` draw nothing
``' '`` draw nothing
``''`` draw nothing
=========================== =================
'steps' is equivalent to 'steps-pre' and is maintained for
backward-compatibility.
Alternatively a dash tuple of the following form can be provided::
(offset, onoffseq),
where ``onoffseq`` is an even length tuple of on and off ink
in points.
ACCEPTS: ['solid' | 'dashed', 'dashdot', 'dotted' |
(offset, on-off-dash-seq) |
``'-'`` | ``'--'`` | ``'-.'`` | ``':'`` | ``'None'`` |
``' '`` | ``''``]
.. seealso::
:meth:`set_drawstyle`
To set the drawing style (stepping) of the plot.
Parameters
----------
ls : { ``'-'``, ``'--'``, ``'-.'``, ``':'``} and more see description
The line style.
"""
if isinstance(ls, six.string_types):
ds, ls = self._split_drawstyle_linestyle(ls)
if ds is not None:
self.set_drawstyle(ds)
if ls in [' ', '', 'none']:
ls = 'None'
if ls not in self._lineStyles:
try:
ls = ls_mapper_r[ls]
except KeyError:
raise ValueError(("You passed in an invalid linestyle, "
"`{0}`. See "
"docs of Line2D.set_linestyle for "
"valid values.").format(ls))
self._linestyle = ls
else:
self._linestyle = '--'
# get the unscaled dashes
self._us_dashOffset, self._us_dashSeq = _get_dash_pattern(ls)
# compute the linewidth scaled dashes
self._dashOffset, self._dashSeq = _scale_dashes(
self._us_dashOffset, self._us_dashSeq, self._linewidth)
@docstring.dedent_interpd
def set_marker(self, marker):
"""
Set the line marker
ACCEPTS: :mod:`A valid marker style <matplotlib.markers>`
Parameters
----------
marker: marker style
See `~matplotlib.markers` for full description of possible
argument
"""
self._marker.set_marker(marker)
self.stale = True
def set_markeredgecolor(self, ec):
"""
Set the marker edge color
ACCEPTS: any matplotlib color
"""
if ec is None:
ec = 'auto'
if self._markeredgecolor is None or \
np.any(self._markeredgecolor != ec):
self.stale = True
self._markeredgecolor = ec
def set_markeredgewidth(self, ew):
"""
Set the marker edge width in points
ACCEPTS: float value in points
"""
if ew is None:
ew = rcParams['lines.markeredgewidth']
if self._markeredgewidth != ew:
self.stale = True
self._markeredgewidth = ew
def set_markerfacecolor(self, fc):
"""
Set the marker face color.
ACCEPTS: any matplotlib color
"""
if fc is None:
fc = 'auto'
if np.any(self._markerfacecolor != fc):
self.stale = True
self._markerfacecolor = fc
def set_markerfacecoloralt(self, fc):
"""
Set the alternate marker face color.
ACCEPTS: any matplotlib color
"""
if fc is None:
fc = 'auto'
if np.any(self._markerfacecoloralt != fc):
self.stale = True
self._markerfacecoloralt = fc
def set_markersize(self, sz):
"""
Set the marker size in points
ACCEPTS: float
"""
sz = float(sz)
if self._markersize != sz:
self.stale = True
self._markersize = sz
def set_xdata(self, x):
"""
Set the data np.array for x
ACCEPTS: 1D array
"""
self._xorig = x
self._invalidx = True
self.stale = True
def set_ydata(self, y):
"""
Set the data np.array for y
ACCEPTS: 1D array
"""
self._yorig = y
self._invalidy = True
self.stale = True
def set_dashes(self, seq):
"""
Set the dash sequence, sequence of dashes with on off ink in
points. If seq is empty or if seq = (None, None), the
linestyle will be set to solid.
ACCEPTS: sequence of on/off ink in points
"""
if seq == (None, None) or len(seq) == 0:
self.set_linestyle('-')
else:
self.set_linestyle((0, seq))
def update_from(self, other):
"""copy properties from other to self"""
Artist.update_from(self, other)
self._linestyle = other._linestyle
self._linewidth = other._linewidth
self._color = other._color
self._markersize = other._markersize
self._markerfacecolor = other._markerfacecolor
self._markerfacecoloralt = other._markerfacecoloralt
self._markeredgecolor = other._markeredgecolor
self._markeredgewidth = other._markeredgewidth
self._dashSeq = other._dashSeq
self._us_dashSeq = other._us_dashSeq
self._dashOffset = other._dashOffset
self._us_dashOffset = other._us_dashOffset
self._dashcapstyle = other._dashcapstyle
self._dashjoinstyle = other._dashjoinstyle
self._solidcapstyle = other._solidcapstyle
self._solidjoinstyle = other._solidjoinstyle
self._linestyle = other._linestyle
self._marker = MarkerStyle(other._marker.get_marker(),
other._marker.get_fillstyle())
self._drawstyle = other._drawstyle
def _get_rgba_face(self, alt=False):
return mcolors.to_rgba(self._get_markerfacecolor(alt=alt), self._alpha)
def _get_rgba_ln_color(self, alt=False):
return mcolors.to_rgba(self._color, self._alpha)
# some aliases....
def set_aa(self, val):
'alias for set_antialiased'
self.set_antialiased(val)
def set_c(self, val):
'alias for set_color'
self.set_color(val)
def set_ls(self, val):
"""alias for set_linestyle"""
self.set_linestyle(val)
def set_lw(self, val):
"""alias for set_linewidth"""
self.set_linewidth(val)
def set_mec(self, val):
"""alias for set_markeredgecolor"""
self.set_markeredgecolor(val)
def set_mew(self, val):
"""alias for set_markeredgewidth"""
self.set_markeredgewidth(val)
def set_mfc(self, val):
"""alias for set_markerfacecolor"""
self.set_markerfacecolor(val)
def set_mfcalt(self, val):
"""alias for set_markerfacecoloralt"""
self.set_markerfacecoloralt(val)
def set_ms(self, val):
"""alias for set_markersize"""
self.set_markersize(val)
def get_aa(self):
"""alias for get_antialiased"""
return self.get_antialiased()
def get_c(self):
"""alias for get_color"""
return self.get_color()
def get_ls(self):
"""alias for get_linestyle"""
return self.get_linestyle()
def get_lw(self):
"""alias for get_linewidth"""
return self.get_linewidth()
def get_mec(self):
"""alias for get_markeredgecolor"""
return self.get_markeredgecolor()
def get_mew(self):
"""alias for get_markeredgewidth"""
return self.get_markeredgewidth()
def get_mfc(self):
"""alias for get_markerfacecolor"""
return self.get_markerfacecolor()
def get_mfcalt(self, alt=False):
"""alias for get_markerfacecoloralt"""
return self.get_markerfacecoloralt()
def get_ms(self):
"""alias for get_markersize"""
return self.get_markersize()
def set_dash_joinstyle(self, s):
"""
Set the join style for dashed linestyles
ACCEPTS: ['miter' | 'round' | 'bevel']
"""
s = s.lower()
if s not in self.validJoin:
raise ValueError('set_dash_joinstyle passed "%s";\n' % (s,)
+ 'valid joinstyles are %s' % (self.validJoin,))
if self._dashjoinstyle != s:
self.stale = True
self._dashjoinstyle = s
def set_solid_joinstyle(self, s):
"""
Set the join style for solid linestyles
ACCEPTS: ['miter' | 'round' | 'bevel']
"""
s = s.lower()
if s not in self.validJoin:
raise ValueError('set_solid_joinstyle passed "%s";\n' % (s,)
+ 'valid joinstyles are %s' % (self.validJoin,))
if self._solidjoinstyle != s:
self.stale = True
self._solidjoinstyle = s
def get_dash_joinstyle(self):
"""
Get the join style for dashed linestyles
"""
return self._dashjoinstyle
def get_solid_joinstyle(self):
"""
Get the join style for solid linestyles
"""
return self._solidjoinstyle
def set_dash_capstyle(self, s):
"""
Set the cap style for dashed linestyles
ACCEPTS: ['butt' | 'round' | 'projecting']
"""
s = s.lower()
if s not in self.validCap:
raise ValueError('set_dash_capstyle passed "%s";\n' % (s,)
+ 'valid capstyles are %s' % (self.validCap,))
if self._dashcapstyle != s:
self.stale = True
self._dashcapstyle = s
def set_solid_capstyle(self, s):
"""
Set the cap style for solid linestyles
ACCEPTS: ['butt' | 'round' | 'projecting']
"""
s = s.lower()
if s not in self.validCap:
raise ValueError('set_solid_capstyle passed "%s";\n' % (s,)
+ 'valid capstyles are %s' % (self.validCap,))
if self._solidcapstyle != s:
self.stale = True
self._solidcapstyle = s
def get_dash_capstyle(self):
"""
Get the cap style for dashed linestyles
"""
return self._dashcapstyle
def get_solid_capstyle(self):
"""
Get the cap style for solid linestyles
"""
return self._solidcapstyle
def is_dashed(self):
'return True if line is dashstyle'
return self._linestyle in ('--', '-.', ':')
class VertexSelector(object):
"""
Manage the callbacks to maintain a list of selected vertices for
:class:`matplotlib.lines.Line2D`. Derived classes should override
:meth:`~matplotlib.lines.VertexSelector.process_selected` to do
something with the picks.
Here is an example which highlights the selected verts with red
circles::
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.lines as lines
class HighlightSelected(lines.VertexSelector):
def __init__(self, line, fmt='ro', **kwargs):
lines.VertexSelector.__init__(self, line)
self.markers, = self.axes.plot([], [], fmt, **kwargs)
def process_selected(self, ind, xs, ys):
self.markers.set_data(xs, ys)
self.canvas.draw()
fig = plt.figure()
ax = fig.add_subplot(111)
x, y = np.random.rand(2, 30)
line, = ax.plot(x, y, 'bs-', picker=5)
selector = HighlightSelected(line)
plt.show()
"""
def __init__(self, line):
"""
Initialize the class with a :class:`matplotlib.lines.Line2D`
instance. The line should already be added to some
:class:`matplotlib.axes.Axes` instance and should have the
picker property set.
"""
if line.axes is None:
raise RuntimeError('You must first add the line to the Axes')
if line.get_picker() is None:
raise RuntimeError('You must first set the picker property '
'of the line')
self.axes = line.axes
self.line = line
self.canvas = self.axes.figure.canvas
self.cid = self.canvas.mpl_connect('pick_event', self.onpick)
self.ind = set()
def process_selected(self, ind, xs, ys):
"""
Default "do nothing" implementation of the
:meth:`process_selected` method.
*ind* are the indices of the selected vertices. *xs* and *ys*
are the coordinates of the selected vertices.
"""
pass
def onpick(self, event):
"""When the line is picked, update the set of selected indices."""
if event.artist is not self.line:
return
self.ind ^= set(event.ind)
ind = sorted(self.ind)
xdata, ydata = self.line.get_data()
self.process_selected(ind, xdata[ind], ydata[ind])
lineStyles = Line2D._lineStyles
lineMarkers = MarkerStyle.markers
drawStyles = Line2D.drawStyles
fillStyles = MarkerStyle.fillstyles
docstring.interpd.update(Line2D=artist.kwdoc(Line2D))
# You can not set the docstring of an instancemethod,
# but you can on the underlying function. Go figure.
docstring.dedent_interpd(Line2D.__init__)
| 50,665 | 32.6875 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/markers.py
|
"""
This module contains functions to handle markers. Used by both the
marker functionality of `~matplotlib.axes.Axes.plot` and
`~matplotlib.axes.Axes.scatter`.
All possible markers are defined here:
============================== ===============================================
marker description
============================== ===============================================
`"."` point
`","` pixel
`"o"` circle
`"v"` triangle_down
`"^"` triangle_up
`"<"` triangle_left
`">"` triangle_right
`"1"` tri_down
`"2"` tri_up
`"3"` tri_left
`"4"` tri_right
`"8"` octagon
`"s"` square
`"p"` pentagon
`"P"` plus (filled)
`"*"` star
`"h"` hexagon1
`"H"` hexagon2
`"+"` plus
`"x"` x
`"X"` x (filled)
`"D"` diamond
`"d"` thin_diamond
`"|"` vline
`"_"` hline
TICKLEFT tickleft
TICKRIGHT tickright
TICKUP tickup
TICKDOWN tickdown
CARETLEFT caretleft (centered at tip)
CARETRIGHT caretright (centered at tip)
CARETUP caretup (centered at tip)
CARETDOWN caretdown (centered at tip)
CARETLEFTBASE caretleft (centered at base)
CARETRIGHTBASE caretright (centered at base)
CARETUPBASE caretup (centered at base)
`"None"`, `" "` or `""` nothing
``'$...$'`` render the string using mathtext.
`verts` a list of (x, y) pairs used for Path vertices.
The center of the marker is located at (0,0) and
the size is normalized.
path a `~matplotlib.path.Path` instance.
(`numsides`, `style`, `angle`) The marker can also be a tuple (`numsides`,
`style`, `angle`), which will create a custom,
regular symbol.
`numsides`:
the number of sides
`style`:
the style of the regular symbol:
0
a regular polygon
1
a star-like symbol
2
an asterisk
3
a circle (`numsides` and `angle` is
ignored)
`angle`:
the angle of rotation of the symbol
============================== ===============================================
For backward compatibility, the form (`verts`, 0) is also accepted,
but it is equivalent to just `verts` for giving a raw set of vertices
that define the shape.
`None` is the default which means 'nothing', however this table is
referred to from other docs for the valid inputs from marker inputs and in
those cases `None` still means 'default'.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange
from collections import Sized
import numpy as np
from . import rcParams
from .cbook import is_math_text, is_numlike
from .path import Path
from .transforms import IdentityTransform, Affine2D
# special-purpose marker identifiers:
(TICKLEFT, TICKRIGHT, TICKUP, TICKDOWN,
CARETLEFT, CARETRIGHT, CARETUP, CARETDOWN,
CARETLEFTBASE, CARETRIGHTBASE, CARETUPBASE, CARETDOWNBASE) = xrange(12)
_empty_path = Path(np.empty((0, 2)))
class MarkerStyle(object):
markers = {
'.': 'point',
',': 'pixel',
'o': 'circle',
'v': 'triangle_down',
'^': 'triangle_up',
'<': 'triangle_left',
'>': 'triangle_right',
'1': 'tri_down',
'2': 'tri_up',
'3': 'tri_left',
'4': 'tri_right',
'8': 'octagon',
's': 'square',
'p': 'pentagon',
'*': 'star',
'h': 'hexagon1',
'H': 'hexagon2',
'+': 'plus',
'x': 'x',
'D': 'diamond',
'd': 'thin_diamond',
'|': 'vline',
'_': 'hline',
'P': 'plus_filled',
'X': 'x_filled',
TICKLEFT: 'tickleft',
TICKRIGHT: 'tickright',
TICKUP: 'tickup',
TICKDOWN: 'tickdown',
CARETLEFT: 'caretleft',
CARETRIGHT: 'caretright',
CARETUP: 'caretup',
CARETDOWN: 'caretdown',
CARETLEFTBASE: 'caretleftbase',
CARETRIGHTBASE: 'caretrightbase',
CARETUPBASE: 'caretupbase',
CARETDOWNBASE: 'caretdownbase',
"None": 'nothing',
None: 'nothing',
' ': 'nothing',
'': 'nothing'
}
# Just used for informational purposes. is_filled()
# is calculated in the _set_* functions.
filled_markers = (
'o', 'v', '^', '<', '>', '8', 's', 'p', '*', 'h', 'H', 'D', 'd',
'P', 'X')
fillstyles = ('full', 'left', 'right', 'bottom', 'top', 'none')
_half_fillstyles = ('left', 'right', 'bottom', 'top')
# TODO: Is this ever used as a non-constant?
_point_size_reduction = 0.5
def __init__(self, marker=None, fillstyle=None):
"""
MarkerStyle
Attributes
----------
markers : list of known marks
fillstyles : list of known fillstyles
filled_markers : list of known filled markers.
Parameters
----------
marker : string or array_like, optional, default: None
See the descriptions of possible markers in the module docstring.
fillstyle : string, optional, default: 'full'
'full', 'left", 'right', 'bottom', 'top', 'none'
"""
self._marker_function = None
self.set_fillstyle(fillstyle)
self.set_marker(marker)
def __getstate__(self):
d = self.__dict__.copy()
d.pop('_marker_function')
return d
def __setstate__(self, statedict):
self.__dict__ = statedict
self.set_marker(self._marker)
def _recache(self):
if self._marker_function is None:
return
self._path = _empty_path
self._transform = IdentityTransform()
self._alt_path = None
self._alt_transform = None
self._snap_threshold = None
self._joinstyle = 'round'
self._capstyle = 'butt'
self._filled = True
self._marker_function()
if six.PY3:
def __bool__(self):
return bool(len(self._path.vertices))
else:
def __nonzero__(self):
return bool(len(self._path.vertices))
def is_filled(self):
return self._filled
def get_fillstyle(self):
return self._fillstyle
def set_fillstyle(self, fillstyle):
"""
Sets fillstyle
Parameters
----------
fillstyle : string amongst known fillstyles
"""
if fillstyle is None:
fillstyle = rcParams['markers.fillstyle']
if fillstyle not in self.fillstyles:
raise ValueError("Unrecognized fillstyle %s"
% ' '.join(self.fillstyles))
self._fillstyle = fillstyle
self._recache()
def get_joinstyle(self):
return self._joinstyle
def get_capstyle(self):
return self._capstyle
def get_marker(self):
return self._marker
def set_marker(self, marker):
if (isinstance(marker, np.ndarray) and marker.ndim == 2 and
marker.shape[1] == 2):
self._marker_function = self._set_vertices
elif (isinstance(marker, Sized) and len(marker) in (2, 3) and
marker[1] in (0, 1, 2, 3)):
self._marker_function = self._set_tuple_marker
elif (not isinstance(marker, (np.ndarray, list)) and
marker in self.markers):
self._marker_function = getattr(
self, '_set_' + self.markers[marker])
elif isinstance(marker, six.string_types) and is_math_text(marker):
self._marker_function = self._set_mathtext_path
elif isinstance(marker, Path):
self._marker_function = self._set_path_marker
else:
try:
Path(marker)
self._marker_function = self._set_vertices
except ValueError:
raise ValueError('Unrecognized marker style'
' {0}'.format(marker))
self._marker = marker
self._recache()
def get_path(self):
return self._path
def get_transform(self):
return self._transform.frozen()
def get_alt_path(self):
return self._alt_path
def get_alt_transform(self):
return self._alt_transform.frozen()
def get_snap_threshold(self):
return self._snap_threshold
def _set_nothing(self):
self._filled = False
def _set_custom_marker(self, path):
verts = path.vertices
rescale = max(np.max(np.abs(verts[:, 0])),
np.max(np.abs(verts[:, 1])))
self._transform = Affine2D().scale(0.5 / rescale)
self._path = path
def _set_path_marker(self):
self._set_custom_marker(self._marker)
def _set_vertices(self):
verts = self._marker
marker = Path(verts)
self._set_custom_marker(marker)
def _set_tuple_marker(self):
marker = self._marker
if is_numlike(marker[0]):
if len(marker) == 2:
numsides, rotation = marker[0], 0.0
elif len(marker) == 3:
numsides, rotation = marker[0], marker[2]
symstyle = marker[1]
if symstyle == 0:
self._path = Path.unit_regular_polygon(numsides)
self._joinstyle = 'miter'
elif symstyle == 1:
self._path = Path.unit_regular_star(numsides)
self._joinstyle = 'bevel'
elif symstyle == 2:
self._path = Path.unit_regular_asterisk(numsides)
self._filled = False
self._joinstyle = 'bevel'
elif symstyle == 3:
self._path = Path.unit_circle()
self._transform = Affine2D().scale(0.5).rotate_deg(rotation)
else:
verts = np.asarray(marker[0])
path = Path(verts)
self._set_custom_marker(path)
def _set_mathtext_path(self):
"""
Draws mathtext markers '$...$' using TextPath object.
Submitted by tcb
"""
from matplotlib.text import TextPath
from matplotlib.font_manager import FontProperties
# again, the properties could be initialised just once outside
# this function
# Font size is irrelevant here, it will be rescaled based on
# the drawn size later
props = FontProperties(size=1.0)
text = TextPath(xy=(0, 0), s=self.get_marker(), fontproperties=props,
usetex=rcParams['text.usetex'])
if len(text.vertices) == 0:
return
xmin, ymin = text.vertices.min(axis=0)
xmax, ymax = text.vertices.max(axis=0)
width = xmax - xmin
height = ymax - ymin
max_dim = max(width, height)
self._transform = Affine2D() \
.translate(-xmin + 0.5 * -width, -ymin + 0.5 * -height) \
.scale(1.0 / max_dim)
self._path = text
self._snap = False
def _half_fill(self):
fs = self.get_fillstyle()
result = fs in self._half_fillstyles
return result
def _set_circle(self, reduction=1.0):
self._transform = Affine2D().scale(0.5 * reduction)
self._snap_threshold = np.inf
fs = self.get_fillstyle()
if not self._half_fill():
self._path = Path.unit_circle()
else:
# build a right-half circle
if fs == 'bottom':
rotate = 270.
elif fs == 'top':
rotate = 90.
elif fs == 'left':
rotate = 180.
else:
rotate = 0.
self._path = self._alt_path = Path.unit_circle_righthalf()
self._transform.rotate_deg(rotate)
self._alt_transform = self._transform.frozen().rotate_deg(180.)
def _set_pixel(self):
self._path = Path.unit_rectangle()
# Ideally, you'd want -0.5, -0.5 here, but then the snapping
# algorithm in the Agg backend will round this to a 2x2
# rectangle from (-1, -1) to (1, 1). By offsetting it
# slightly, we can force it to be (0, 0) to (1, 1), which both
# makes it only be a single pixel and places it correctly
# aligned to 1-width stroking (i.e. the ticks). This hack is
# the best of a number of bad alternatives, mainly because the
# backends are not aware of what marker is actually being used
# beyond just its path data.
self._transform = Affine2D().translate(-0.49999, -0.49999)
self._snap_threshold = None
def _set_point(self):
self._set_circle(reduction=self._point_size_reduction)
_triangle_path = Path(
[[0.0, 1.0], [-1.0, -1.0], [1.0, -1.0], [0.0, 1.0]],
[Path.MOVETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
# Going down halfway looks to small. Golden ratio is too far.
_triangle_path_u = Path(
[[0.0, 1.0], [-3 / 5., -1 / 5.], [3 / 5., -1 / 5.], [0.0, 1.0]],
[Path.MOVETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
_triangle_path_d = Path(
[[-3 / 5., -1 / 5.], [3 / 5., -1 / 5.], [1.0, -1.0], [-1.0, -1.0],
[-3 / 5., -1 / 5.]],
[Path.MOVETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
_triangle_path_l = Path(
[[0.0, 1.0], [0.0, -1.0], [-1.0, -1.0], [0.0, 1.0]],
[Path.MOVETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
_triangle_path_r = Path(
[[0.0, 1.0], [0.0, -1.0], [1.0, -1.0], [0.0, 1.0]],
[Path.MOVETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
def _set_triangle(self, rot, skip):
self._transform = Affine2D().scale(0.5, 0.5).rotate_deg(rot)
self._snap_threshold = 5.0
fs = self.get_fillstyle()
if not self._half_fill():
self._path = self._triangle_path
else:
mpaths = [self._triangle_path_u,
self._triangle_path_l,
self._triangle_path_d,
self._triangle_path_r]
if fs == 'top':
self._path = mpaths[(0 + skip) % 4]
self._alt_path = mpaths[(2 + skip) % 4]
elif fs == 'bottom':
self._path = mpaths[(2 + skip) % 4]
self._alt_path = mpaths[(0 + skip) % 4]
elif fs == 'left':
self._path = mpaths[(1 + skip) % 4]
self._alt_path = mpaths[(3 + skip) % 4]
else:
self._path = mpaths[(3 + skip) % 4]
self._alt_path = mpaths[(1 + skip) % 4]
self._alt_transform = self._transform
self._joinstyle = 'miter'
def _set_triangle_up(self):
return self._set_triangle(0.0, 0)
def _set_triangle_down(self):
return self._set_triangle(180.0, 2)
def _set_triangle_left(self):
return self._set_triangle(90.0, 3)
def _set_triangle_right(self):
return self._set_triangle(270.0, 1)
def _set_square(self):
self._transform = Affine2D().translate(-0.5, -0.5)
self._snap_threshold = 2.0
fs = self.get_fillstyle()
if not self._half_fill():
self._path = Path.unit_rectangle()
else:
# build a bottom filled square out of two rectangles, one
# filled. Use the rotation to support left, right, bottom
# or top
if fs == 'bottom':
rotate = 0.
elif fs == 'top':
rotate = 180.
elif fs == 'left':
rotate = 270.
else:
rotate = 90.
self._path = Path([[0.0, 0.0], [1.0, 0.0], [1.0, 0.5],
[0.0, 0.5], [0.0, 0.0]])
self._alt_path = Path([[0.0, 0.5], [1.0, 0.5], [1.0, 1.0],
[0.0, 1.0], [0.0, 0.5]])
self._transform.rotate_deg(rotate)
self._alt_transform = self._transform
self._joinstyle = 'miter'
def _set_diamond(self):
self._transform = Affine2D().translate(-0.5, -0.5).rotate_deg(45)
self._snap_threshold = 5.0
fs = self.get_fillstyle()
if not self._half_fill():
self._path = Path.unit_rectangle()
else:
self._path = Path([[0.0, 0.0], [1.0, 0.0], [1.0, 1.0], [0.0, 0.0]])
self._alt_path = Path([[0.0, 0.0], [0.0, 1.0],
[1.0, 1.0], [0.0, 0.0]])
if fs == 'bottom':
rotate = 270.
elif fs == 'top':
rotate = 90.
elif fs == 'left':
rotate = 180.
else:
rotate = 0.
self._transform.rotate_deg(rotate)
self._alt_transform = self._transform
self._joinstyle = 'miter'
def _set_thin_diamond(self):
self._set_diamond()
self._transform.scale(0.6, 1.0)
def _set_pentagon(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 5.0
polypath = Path.unit_regular_polygon(5)
fs = self.get_fillstyle()
if not self._half_fill():
self._path = polypath
else:
verts = polypath.vertices
y = (1 + np.sqrt(5)) / 4.
top = Path([verts[0], verts[1], verts[4], verts[0]])
bottom = Path([verts[1], verts[2], verts[3], verts[4], verts[1]])
left = Path([verts[0], verts[1], verts[2], [0, -y], verts[0]])
right = Path([verts[0], verts[4], verts[3], [0, -y], verts[0]])
if fs == 'top':
mpath, mpath_alt = top, bottom
elif fs == 'bottom':
mpath, mpath_alt = bottom, top
elif fs == 'left':
mpath, mpath_alt = left, right
else:
mpath, mpath_alt = right, left
self._path = mpath
self._alt_path = mpath_alt
self._alt_transform = self._transform
self._joinstyle = 'miter'
def _set_star(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 5.0
fs = self.get_fillstyle()
polypath = Path.unit_regular_star(5, innerCircle=0.381966)
if not self._half_fill():
self._path = polypath
else:
verts = polypath.vertices
top = Path(np.vstack((verts[0:4, :], verts[7:10, :], verts[0])))
bottom = Path(np.vstack((verts[3:8, :], verts[3])))
left = Path(np.vstack((verts[0:6, :], verts[0])))
right = Path(np.vstack((verts[0], verts[5:10, :], verts[0])))
if fs == 'top':
mpath, mpath_alt = top, bottom
elif fs == 'bottom':
mpath, mpath_alt = bottom, top
elif fs == 'left':
mpath, mpath_alt = left, right
else:
mpath, mpath_alt = right, left
self._path = mpath
self._alt_path = mpath_alt
self._alt_transform = self._transform
self._joinstyle = 'bevel'
def _set_hexagon1(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = None
fs = self.get_fillstyle()
polypath = Path.unit_regular_polygon(6)
if not self._half_fill():
self._path = polypath
else:
verts = polypath.vertices
# not drawing inside lines
x = np.abs(np.cos(5 * np.pi / 6.))
top = Path(np.vstack(([-x, 0], verts[(1, 0, 5), :], [x, 0])))
bottom = Path(np.vstack(([-x, 0], verts[2:5, :], [x, 0])))
left = Path(verts[(0, 1, 2, 3), :])
right = Path(verts[(0, 5, 4, 3), :])
if fs == 'top':
mpath, mpath_alt = top, bottom
elif fs == 'bottom':
mpath, mpath_alt = bottom, top
elif fs == 'left':
mpath, mpath_alt = left, right
else:
mpath, mpath_alt = right, left
self._path = mpath
self._alt_path = mpath_alt
self._alt_transform = self._transform
self._joinstyle = 'miter'
def _set_hexagon2(self):
self._transform = Affine2D().scale(0.5).rotate_deg(30)
self._snap_threshold = None
fs = self.get_fillstyle()
polypath = Path.unit_regular_polygon(6)
if not self._half_fill():
self._path = polypath
else:
verts = polypath.vertices
# not drawing inside lines
x, y = np.sqrt(3) / 4, 3 / 4.
top = Path(verts[(1, 0, 5, 4, 1), :])
bottom = Path(verts[(1, 2, 3, 4), :])
left = Path(np.vstack(([x, y], verts[(0, 1, 2), :],
[-x, -y], [x, y])))
right = Path(np.vstack(([x, y], verts[(5, 4, 3), :], [-x, -y])))
if fs == 'top':
mpath, mpath_alt = top, bottom
elif fs == 'bottom':
mpath, mpath_alt = bottom, top
elif fs == 'left':
mpath, mpath_alt = left, right
else:
mpath, mpath_alt = right, left
self._path = mpath
self._alt_path = mpath_alt
self._alt_transform = self._transform
self._joinstyle = 'miter'
def _set_octagon(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 5.0
fs = self.get_fillstyle()
polypath = Path.unit_regular_polygon(8)
if not self._half_fill():
self._transform.rotate_deg(22.5)
self._path = polypath
else:
x = np.sqrt(2.) / 4.
half = Path([[0, -1], [0, 1], [-x, 1], [-1, x],
[-1, -x], [-x, -1], [0, -1]])
if fs == 'bottom':
rotate = 90.
elif fs == 'top':
rotate = 270.
elif fs == 'right':
rotate = 180.
else:
rotate = 0.
self._transform.rotate_deg(rotate)
self._path = self._alt_path = half
self._alt_transform = self._transform.frozen().rotate_deg(180.0)
self._joinstyle = 'miter'
_line_marker_path = Path([[0.0, -1.0], [0.0, 1.0]])
def _set_vline(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 1.0
self._filled = False
self._path = self._line_marker_path
def _set_hline(self):
self._set_vline()
self._transform = self._transform.rotate_deg(90)
_tickhoriz_path = Path([[0.0, 0.0], [1.0, 0.0]])
def _set_tickleft(self):
self._transform = Affine2D().scale(-1.0, 1.0)
self._snap_threshold = 1.0
self._filled = False
self._path = self._tickhoriz_path
def _set_tickright(self):
self._transform = Affine2D().scale(1.0, 1.0)
self._snap_threshold = 1.0
self._filled = False
self._path = self._tickhoriz_path
_tickvert_path = Path([[-0.0, 0.0], [-0.0, 1.0]])
def _set_tickup(self):
self._transform = Affine2D().scale(1.0, 1.0)
self._snap_threshold = 1.0
self._filled = False
self._path = self._tickvert_path
def _set_tickdown(self):
self._transform = Affine2D().scale(1.0, -1.0)
self._snap_threshold = 1.0
self._filled = False
self._path = self._tickvert_path
_tri_path = Path([[0.0, 0.0], [0.0, -1.0],
[0.0, 0.0], [0.8, 0.5],
[0.0, 0.0], [-0.8, 0.5]],
[Path.MOVETO, Path.LINETO,
Path.MOVETO, Path.LINETO,
Path.MOVETO, Path.LINETO])
def _set_tri_down(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 5.0
self._filled = False
self._path = self._tri_path
def _set_tri_up(self):
self._set_tri_down()
self._transform = self._transform.rotate_deg(180)
def _set_tri_left(self):
self._set_tri_down()
self._transform = self._transform.rotate_deg(270)
def _set_tri_right(self):
self._set_tri_down()
self._transform = self._transform.rotate_deg(90)
_caret_path = Path([[-1.0, 1.5], [0.0, 0.0], [1.0, 1.5]])
def _set_caretdown(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 3.0
self._filled = False
self._path = self._caret_path
self._joinstyle = 'miter'
def _set_caretup(self):
self._set_caretdown()
self._transform = self._transform.rotate_deg(180)
def _set_caretleft(self):
self._set_caretdown()
self._transform = self._transform.rotate_deg(270)
def _set_caretright(self):
self._set_caretdown()
self._transform = self._transform.rotate_deg(90)
_caret_path_base = Path([[-1.0, 0.0], [0.0, -1.5], [1.0, 0]])
def _set_caretdownbase(self):
self._set_caretdown()
self._path = self._caret_path_base
def _set_caretupbase(self):
self._set_caretdownbase()
self._transform = self._transform.rotate_deg(180)
def _set_caretleftbase(self):
self._set_caretdownbase()
self._transform = self._transform.rotate_deg(270)
def _set_caretrightbase(self):
self._set_caretdownbase()
self._transform = self._transform.rotate_deg(90)
_plus_path = Path([[-1.0, 0.0], [1.0, 0.0],
[0.0, -1.0], [0.0, 1.0]],
[Path.MOVETO, Path.LINETO,
Path.MOVETO, Path.LINETO])
def _set_plus(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 1.0
self._filled = False
self._path = self._plus_path
_x_path = Path([[-1.0, -1.0], [1.0, 1.0],
[-1.0, 1.0], [1.0, -1.0]],
[Path.MOVETO, Path.LINETO,
Path.MOVETO, Path.LINETO])
def _set_x(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 3.0
self._filled = False
self._path = self._x_path
_plus_filled_path = Path([(1/3, 0), (2/3, 0), (2/3, 1/3),
(1, 1/3), (1, 2/3), (2/3, 2/3),
(2/3, 1), (1/3, 1), (1/3, 2/3),
(0, 2/3), (0, 1/3), (1/3, 1/3),
(1/3, 0)],
[Path.MOVETO, Path.LINETO, Path.LINETO,
Path.LINETO, Path.LINETO, Path.LINETO,
Path.LINETO, Path.LINETO, Path.LINETO,
Path.LINETO, Path.LINETO, Path.LINETO,
Path.CLOSEPOLY])
_plus_filled_path_t = Path([(1, 1/2), (1, 2/3), (2/3, 2/3),
(2/3, 1), (1/3, 1), (1/3, 2/3),
(0, 2/3), (0, 1/2), (1, 1/2)],
[Path.MOVETO, Path.LINETO, Path.LINETO,
Path.LINETO, Path.LINETO, Path.LINETO,
Path.LINETO, Path.LINETO,
Path.CLOSEPOLY])
def _set_plus_filled(self):
self._transform = Affine2D().translate(-0.5, -0.5)
self._snap_threshold = 5.0
self._joinstyle = 'miter'
fs = self.get_fillstyle()
if not self._half_fill():
self._path = self._plus_filled_path
else:
# Rotate top half path to support all partitions
if fs == 'top':
rotate, rotate_alt = 0, 180
elif fs == 'bottom':
rotate, rotate_alt = 180, 0
elif fs == 'left':
rotate, rotate_alt = 90, 270
else:
rotate, rotate_alt = 270, 90
self._path = self._plus_filled_path_t
self._alt_path = self._plus_filled_path_t
self._alt_transform = Affine2D().translate(-0.5, -0.5)
self._transform.rotate_deg(rotate)
self._alt_transform.rotate_deg(rotate_alt)
_x_filled_path = Path([(0.25, 0), (0.5, 0.25), (0.75, 0), (1, 0.25),
(0.75, 0.5), (1, 0.75), (0.75, 1), (0.5, 0.75),
(0.25, 1), (0, 0.75), (0.25, 0.5), (0, 0.25),
(0.25, 0)],
[Path.MOVETO, Path.LINETO, Path.LINETO,
Path.LINETO, Path.LINETO, Path.LINETO,
Path.LINETO, Path.LINETO, Path.LINETO,
Path.LINETO, Path.LINETO, Path.LINETO,
Path.CLOSEPOLY])
_x_filled_path_t = Path([(0.75, 0.5), (1, 0.75), (0.75, 1),
(0.5, 0.75), (0.25, 1), (0, 0.75),
(0.25, 0.5), (0.75, 0.5)],
[Path.MOVETO, Path.LINETO, Path.LINETO,
Path.LINETO, Path.LINETO, Path.LINETO,
Path.LINETO, Path.CLOSEPOLY])
def _set_x_filled(self):
self._transform = Affine2D().translate(-0.5, -0.5)
self._snap_threshold = 5.0
self._joinstyle = 'miter'
fs = self.get_fillstyle()
if not self._half_fill():
self._path = self._x_filled_path
else:
# Rotate top half path to support all partitions
if fs == 'top':
rotate, rotate_alt = 0, 180
elif fs == 'bottom':
rotate, rotate_alt = 180, 0
elif fs == 'left':
rotate, rotate_alt = 90, 270
else:
rotate, rotate_alt = 270, 90
self._path = self._x_filled_path_t
self._alt_path = self._x_filled_path_t
self._alt_transform = Affine2D().translate(-0.5, -0.5)
self._transform.rotate_deg(rotate)
self._alt_transform.rotate_deg(rotate_alt)
| 31,310 | 33.906355 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/_cm.py
|
"""
Nothing here but dictionaries for generating LinearSegmentedColormaps,
and a dictionary of these dictionaries.
Documentation for each is in pyplot.colormaps(). Please update this
with the purpose and type of your colormap if you add data for one here.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
_binary_data = {
'red': ((0., 1., 1.), (1., 0., 0.)),
'green': ((0., 1., 1.), (1., 0., 0.)),
'blue': ((0., 1., 1.), (1., 0., 0.))
}
_autumn_data = {'red': ((0., 1.0, 1.0), (1.0, 1.0, 1.0)),
'green': ((0., 0., 0.), (1.0, 1.0, 1.0)),
'blue': ((0., 0., 0.), (1.0, 0., 0.))}
_bone_data = {'red': ((0., 0., 0.),
(0.746032, 0.652778, 0.652778),
(1.0, 1.0, 1.0)),
'green': ((0., 0., 0.),
(0.365079, 0.319444, 0.319444),
(0.746032, 0.777778, 0.777778),
(1.0, 1.0, 1.0)),
'blue': ((0., 0., 0.),
(0.365079, 0.444444, 0.444444),
(1.0, 1.0, 1.0))}
_cool_data = {'red': ((0., 0., 0.), (1.0, 1.0, 1.0)),
'green': ((0., 1., 1.), (1.0, 0., 0.)),
'blue': ((0., 1., 1.), (1.0, 1., 1.))}
_copper_data = {'red': ((0., 0., 0.),
(0.809524, 1.000000, 1.000000),
(1.0, 1.0, 1.0)),
'green': ((0., 0., 0.),
(1.0, 0.7812, 0.7812)),
'blue': ((0., 0., 0.),
(1.0, 0.4975, 0.4975))}
_flag_data = {
'red': lambda x: 0.75 * np.sin((x * 31.5 + 0.25) * np.pi) + 0.5,
'green': lambda x: np.sin(x * 31.5 * np.pi),
'blue': lambda x: 0.75 * np.sin((x * 31.5 - 0.25) * np.pi) + 0.5,
}
_prism_data = {
'red': lambda x: 0.75 * np.sin((x * 20.9 + 0.25) * np.pi) + 0.67,
'green': lambda x: 0.75 * np.sin((x * 20.9 - 0.25) * np.pi) + 0.33,
'blue': lambda x: -1.1 * np.sin((x * 20.9) * np.pi),
}
def cubehelix(gamma=1.0, s=0.5, r=-1.5, h=1.0):
"""Return custom data dictionary of (r,g,b) conversion functions, which
can be used with :func:`register_cmap`, for the cubehelix color scheme.
Unlike most other color schemes cubehelix was designed by D.A. Green to
be monotonically increasing in terms of perceived brightness.
Also, when printed on a black and white postscript printer, the scheme
results in a greyscale with monotonically increasing brightness.
This color scheme is named cubehelix because the r,g,b values produced
can be visualised as a squashed helix around the diagonal in the
r,g,b color cube.
For a unit color cube (i.e. 3-D coordinates for r,g,b each in the
range 0 to 1) the color scheme starts at (r,g,b) = (0,0,0), i.e. black,
and finishes at (r,g,b) = (1,1,1), i.e. white. For some fraction *x*,
between 0 and 1, the color is the corresponding grey value at that
fraction along the black to white diagonal (x,x,x) plus a color
element. This color element is calculated in a plane of constant
perceived intensity and controlled by the following parameters.
Optional keyword arguments:
========= =======================================================
Keyword Description
========= =======================================================
gamma gamma factor to emphasise either low intensity values
(gamma < 1), or high intensity values (gamma > 1);
defaults to 1.0.
s the start color; defaults to 0.5 (i.e. purple).
r the number of r,g,b rotations in color that are made
from the start to the end of the color scheme; defaults
to -1.5 (i.e. -> B -> G -> R -> B).
h the hue parameter which controls how saturated the
colors are. If this parameter is zero then the color
scheme is purely a greyscale; defaults to 1.0.
========= =======================================================
"""
def get_color_function(p0, p1):
def color(x):
# Apply gamma factor to emphasise low or high intensity values
xg = x ** gamma
# Calculate amplitude and angle of deviation from the black
# to white diagonal in the plane of constant
# perceived intensity.
a = h * xg * (1 - xg) / 2
phi = 2 * np.pi * (s / 3 + r * x)
return xg + a * (p0 * np.cos(phi) + p1 * np.sin(phi))
return color
return {
'red': get_color_function(-0.14861, 1.78277),
'green': get_color_function(-0.29227, -0.90649),
'blue': get_color_function(1.97294, 0.0),
}
_cubehelix_data = cubehelix()
_bwr_data = ((0.0, 0.0, 1.0), (1.0, 1.0, 1.0), (1.0, 0.0, 0.0))
_brg_data = ((0.0, 0.0, 1.0), (1.0, 0.0, 0.0), (0.0, 1.0, 0.0))
# Gnuplot palette functions
gfunc = {
0: lambda x: 0,
1: lambda x: 0.5,
2: lambda x: 1,
3: lambda x: x,
4: lambda x: x ** 2,
5: lambda x: x ** 3,
6: lambda x: x ** 4,
7: lambda x: np.sqrt(x),
8: lambda x: np.sqrt(np.sqrt(x)),
9: lambda x: np.sin(x * np.pi / 2),
10: lambda x: np.cos(x * np.pi / 2),
11: lambda x: np.abs(x - 0.5),
12: lambda x: (2 * x - 1) ** 2,
13: lambda x: np.sin(x * np.pi),
14: lambda x: np.abs(np.cos(x * np.pi)),
15: lambda x: np.sin(x * 2 * np.pi),
16: lambda x: np.cos(x * 2 * np.pi),
17: lambda x: np.abs(np.sin(x * 2 * np.pi)),
18: lambda x: np.abs(np.cos(x * 2 * np.pi)),
19: lambda x: np.abs(np.sin(x * 4 * np.pi)),
20: lambda x: np.abs(np.cos(x * 4 * np.pi)),
21: lambda x: 3 * x,
22: lambda x: 3 * x - 1,
23: lambda x: 3 * x - 2,
24: lambda x: np.abs(3 * x - 1),
25: lambda x: np.abs(3 * x - 2),
26: lambda x: (3 * x - 1) / 2,
27: lambda x: (3 * x - 2) / 2,
28: lambda x: np.abs((3 * x - 1) / 2),
29: lambda x: np.abs((3 * x - 2) / 2),
30: lambda x: x / 0.32 - 0.78125,
31: lambda x: 2 * x - 0.84,
32: lambda x: gfunc32(x),
33: lambda x: np.abs(2 * x - 0.5),
34: lambda x: 2 * x,
35: lambda x: 2 * x - 0.5,
36: lambda x: 2 * x - 1.
}
def gfunc32(x):
ret = np.zeros(len(x))
m = (x < 0.25)
ret[m] = 4 * x[m]
m = (x >= 0.25) & (x < 0.92)
ret[m] = -2 * x[m] + 1.84
m = (x >= 0.92)
ret[m] = x[m] / 0.08 - 11.5
return ret
_gnuplot_data = {
'red': gfunc[7],
'green': gfunc[5],
'blue': gfunc[15],
}
_gnuplot2_data = {
'red': gfunc[30],
'green': gfunc[31],
'blue': gfunc[32],
}
_ocean_data = {
'red': gfunc[23],
'green': gfunc[28],
'blue': gfunc[3],
}
_afmhot_data = {
'red': gfunc[34],
'green': gfunc[35],
'blue': gfunc[36],
}
_rainbow_data = {
'red': gfunc[33],
'green': gfunc[13],
'blue': gfunc[10],
}
_seismic_data = (
(0.0, 0.0, 0.3), (0.0, 0.0, 1.0),
(1.0, 1.0, 1.0), (1.0, 0.0, 0.0),
(0.5, 0.0, 0.0))
_terrain_data = (
(0.00, (0.2, 0.2, 0.6)),
(0.15, (0.0, 0.6, 1.0)),
(0.25, (0.0, 0.8, 0.4)),
(0.50, (1.0, 1.0, 0.6)),
(0.75, (0.5, 0.36, 0.33)),
(1.00, (1.0, 1.0, 1.0)))
_gray_data = {'red': ((0., 0, 0), (1., 1, 1)),
'green': ((0., 0, 0), (1., 1, 1)),
'blue': ((0., 0, 0), (1., 1, 1))}
_hot_data = {'red': ((0., 0.0416, 0.0416),
(0.365079, 1.000000, 1.000000),
(1.0, 1.0, 1.0)),
'green': ((0., 0., 0.),
(0.365079, 0.000000, 0.000000),
(0.746032, 1.000000, 1.000000),
(1.0, 1.0, 1.0)),
'blue': ((0., 0., 0.),
(0.746032, 0.000000, 0.000000),
(1.0, 1.0, 1.0))}
_hsv_data = {'red': ((0., 1., 1.),
(0.158730, 1.000000, 1.000000),
(0.174603, 0.968750, 0.968750),
(0.333333, 0.031250, 0.031250),
(0.349206, 0.000000, 0.000000),
(0.666667, 0.000000, 0.000000),
(0.682540, 0.031250, 0.031250),
(0.841270, 0.968750, 0.968750),
(0.857143, 1.000000, 1.000000),
(1.0, 1.0, 1.0)),
'green': ((0., 0., 0.),
(0.158730, 0.937500, 0.937500),
(0.174603, 1.000000, 1.000000),
(0.507937, 1.000000, 1.000000),
(0.666667, 0.062500, 0.062500),
(0.682540, 0.000000, 0.000000),
(1.0, 0., 0.)),
'blue': ((0., 0., 0.),
(0.333333, 0.000000, 0.000000),
(0.349206, 0.062500, 0.062500),
(0.507937, 1.000000, 1.000000),
(0.841270, 1.000000, 1.000000),
(0.857143, 0.937500, 0.937500),
(1.0, 0.09375, 0.09375))}
_jet_data = {'red': ((0., 0, 0), (0.35, 0, 0), (0.66, 1, 1), (0.89, 1, 1),
(1, 0.5, 0.5)),
'green': ((0., 0, 0), (0.125, 0, 0), (0.375, 1, 1), (0.64, 1, 1),
(0.91, 0, 0), (1, 0, 0)),
'blue': ((0., 0.5, 0.5), (0.11, 1, 1), (0.34, 1, 1),
(0.65, 0, 0), (1, 0, 0))}
_pink_data = {'red': ((0., 0.1178, 0.1178), (0.015873, 0.195857, 0.195857),
(0.031746, 0.250661, 0.250661),
(0.047619, 0.295468, 0.295468),
(0.063492, 0.334324, 0.334324),
(0.079365, 0.369112, 0.369112),
(0.095238, 0.400892, 0.400892),
(0.111111, 0.430331, 0.430331),
(0.126984, 0.457882, 0.457882),
(0.142857, 0.483867, 0.483867),
(0.158730, 0.508525, 0.508525),
(0.174603, 0.532042, 0.532042),
(0.190476, 0.554563, 0.554563),
(0.206349, 0.576204, 0.576204),
(0.222222, 0.597061, 0.597061),
(0.238095, 0.617213, 0.617213),
(0.253968, 0.636729, 0.636729),
(0.269841, 0.655663, 0.655663),
(0.285714, 0.674066, 0.674066),
(0.301587, 0.691980, 0.691980),
(0.317460, 0.709441, 0.709441),
(0.333333, 0.726483, 0.726483),
(0.349206, 0.743134, 0.743134),
(0.365079, 0.759421, 0.759421),
(0.380952, 0.766356, 0.766356),
(0.396825, 0.773229, 0.773229),
(0.412698, 0.780042, 0.780042),
(0.428571, 0.786796, 0.786796),
(0.444444, 0.793492, 0.793492),
(0.460317, 0.800132, 0.800132),
(0.476190, 0.806718, 0.806718),
(0.492063, 0.813250, 0.813250),
(0.507937, 0.819730, 0.819730),
(0.523810, 0.826160, 0.826160),
(0.539683, 0.832539, 0.832539),
(0.555556, 0.838870, 0.838870),
(0.571429, 0.845154, 0.845154),
(0.587302, 0.851392, 0.851392),
(0.603175, 0.857584, 0.857584),
(0.619048, 0.863731, 0.863731),
(0.634921, 0.869835, 0.869835),
(0.650794, 0.875897, 0.875897),
(0.666667, 0.881917, 0.881917),
(0.682540, 0.887896, 0.887896),
(0.698413, 0.893835, 0.893835),
(0.714286, 0.899735, 0.899735),
(0.730159, 0.905597, 0.905597),
(0.746032, 0.911421, 0.911421),
(0.761905, 0.917208, 0.917208),
(0.777778, 0.922958, 0.922958),
(0.793651, 0.928673, 0.928673),
(0.809524, 0.934353, 0.934353),
(0.825397, 0.939999, 0.939999),
(0.841270, 0.945611, 0.945611),
(0.857143, 0.951190, 0.951190),
(0.873016, 0.956736, 0.956736),
(0.888889, 0.962250, 0.962250),
(0.904762, 0.967733, 0.967733),
(0.920635, 0.973185, 0.973185),
(0.936508, 0.978607, 0.978607),
(0.952381, 0.983999, 0.983999),
(0.968254, 0.989361, 0.989361),
(0.984127, 0.994695, 0.994695), (1.0, 1.0, 1.0)),
'green': ((0., 0., 0.), (0.015873, 0.102869, 0.102869),
(0.031746, 0.145479, 0.145479),
(0.047619, 0.178174, 0.178174),
(0.063492, 0.205738, 0.205738),
(0.079365, 0.230022, 0.230022),
(0.095238, 0.251976, 0.251976),
(0.111111, 0.272166, 0.272166),
(0.126984, 0.290957, 0.290957),
(0.142857, 0.308607, 0.308607),
(0.158730, 0.325300, 0.325300),
(0.174603, 0.341178, 0.341178),
(0.190476, 0.356348, 0.356348),
(0.206349, 0.370899, 0.370899),
(0.222222, 0.384900, 0.384900),
(0.238095, 0.398410, 0.398410),
(0.253968, 0.411476, 0.411476),
(0.269841, 0.424139, 0.424139),
(0.285714, 0.436436, 0.436436),
(0.301587, 0.448395, 0.448395),
(0.317460, 0.460044, 0.460044),
(0.333333, 0.471405, 0.471405),
(0.349206, 0.482498, 0.482498),
(0.365079, 0.493342, 0.493342),
(0.380952, 0.517549, 0.517549),
(0.396825, 0.540674, 0.540674),
(0.412698, 0.562849, 0.562849),
(0.428571, 0.584183, 0.584183),
(0.444444, 0.604765, 0.604765),
(0.460317, 0.624669, 0.624669),
(0.476190, 0.643958, 0.643958),
(0.492063, 0.662687, 0.662687),
(0.507937, 0.680900, 0.680900),
(0.523810, 0.698638, 0.698638),
(0.539683, 0.715937, 0.715937),
(0.555556, 0.732828, 0.732828),
(0.571429, 0.749338, 0.749338),
(0.587302, 0.765493, 0.765493),
(0.603175, 0.781313, 0.781313),
(0.619048, 0.796819, 0.796819),
(0.634921, 0.812029, 0.812029),
(0.650794, 0.826960, 0.826960),
(0.666667, 0.841625, 0.841625),
(0.682540, 0.856040, 0.856040),
(0.698413, 0.870216, 0.870216),
(0.714286, 0.884164, 0.884164),
(0.730159, 0.897896, 0.897896),
(0.746032, 0.911421, 0.911421),
(0.761905, 0.917208, 0.917208),
(0.777778, 0.922958, 0.922958),
(0.793651, 0.928673, 0.928673),
(0.809524, 0.934353, 0.934353),
(0.825397, 0.939999, 0.939999),
(0.841270, 0.945611, 0.945611),
(0.857143, 0.951190, 0.951190),
(0.873016, 0.956736, 0.956736),
(0.888889, 0.962250, 0.962250),
(0.904762, 0.967733, 0.967733),
(0.920635, 0.973185, 0.973185),
(0.936508, 0.978607, 0.978607),
(0.952381, 0.983999, 0.983999),
(0.968254, 0.989361, 0.989361),
(0.984127, 0.994695, 0.994695), (1.0, 1.0, 1.0)),
'blue': ((0., 0., 0.), (0.015873, 0.102869, 0.102869),
(0.031746, 0.145479, 0.145479),
(0.047619, 0.178174, 0.178174),
(0.063492, 0.205738, 0.205738),
(0.079365, 0.230022, 0.230022),
(0.095238, 0.251976, 0.251976),
(0.111111, 0.272166, 0.272166),
(0.126984, 0.290957, 0.290957),
(0.142857, 0.308607, 0.308607),
(0.158730, 0.325300, 0.325300),
(0.174603, 0.341178, 0.341178),
(0.190476, 0.356348, 0.356348),
(0.206349, 0.370899, 0.370899),
(0.222222, 0.384900, 0.384900),
(0.238095, 0.398410, 0.398410),
(0.253968, 0.411476, 0.411476),
(0.269841, 0.424139, 0.424139),
(0.285714, 0.436436, 0.436436),
(0.301587, 0.448395, 0.448395),
(0.317460, 0.460044, 0.460044),
(0.333333, 0.471405, 0.471405),
(0.349206, 0.482498, 0.482498),
(0.365079, 0.493342, 0.493342),
(0.380952, 0.503953, 0.503953),
(0.396825, 0.514344, 0.514344),
(0.412698, 0.524531, 0.524531),
(0.428571, 0.534522, 0.534522),
(0.444444, 0.544331, 0.544331),
(0.460317, 0.553966, 0.553966),
(0.476190, 0.563436, 0.563436),
(0.492063, 0.572750, 0.572750),
(0.507937, 0.581914, 0.581914),
(0.523810, 0.590937, 0.590937),
(0.539683, 0.599824, 0.599824),
(0.555556, 0.608581, 0.608581),
(0.571429, 0.617213, 0.617213),
(0.587302, 0.625727, 0.625727),
(0.603175, 0.634126, 0.634126),
(0.619048, 0.642416, 0.642416),
(0.634921, 0.650600, 0.650600),
(0.650794, 0.658682, 0.658682),
(0.666667, 0.666667, 0.666667),
(0.682540, 0.674556, 0.674556),
(0.698413, 0.682355, 0.682355),
(0.714286, 0.690066, 0.690066),
(0.730159, 0.697691, 0.697691),
(0.746032, 0.705234, 0.705234),
(0.761905, 0.727166, 0.727166),
(0.777778, 0.748455, 0.748455),
(0.793651, 0.769156, 0.769156),
(0.809524, 0.789314, 0.789314),
(0.825397, 0.808969, 0.808969),
(0.841270, 0.828159, 0.828159),
(0.857143, 0.846913, 0.846913),
(0.873016, 0.865261, 0.865261),
(0.888889, 0.883229, 0.883229),
(0.904762, 0.900837, 0.900837),
(0.920635, 0.918109, 0.918109),
(0.936508, 0.935061, 0.935061),
(0.952381, 0.951711, 0.951711),
(0.968254, 0.968075, 0.968075),
(0.984127, 0.984167, 0.984167), (1.0, 1.0, 1.0))}
_spring_data = {'red': ((0., 1., 1.), (1.0, 1.0, 1.0)),
'green': ((0., 0., 0.), (1.0, 1.0, 1.0)),
'blue': ((0., 1., 1.), (1.0, 0.0, 0.0))}
_summer_data = {'red': ((0., 0., 0.), (1.0, 1.0, 1.0)),
'green': ((0., 0.5, 0.5), (1.0, 1.0, 1.0)),
'blue': ((0., 0.4, 0.4), (1.0, 0.4, 0.4))}
_winter_data = {'red': ((0., 0., 0.), (1.0, 0.0, 0.0)),
'green': ((0., 0., 0.), (1.0, 1.0, 1.0)),
'blue': ((0., 1., 1.), (1.0, 0.5, 0.5))}
_nipy_spectral_data = {
'red': [(0.0, 0.0, 0.0), (0.05, 0.4667, 0.4667),
(0.10, 0.5333, 0.5333), (0.15, 0.0, 0.0),
(0.20, 0.0, 0.0), (0.25, 0.0, 0.0),
(0.30, 0.0, 0.0), (0.35, 0.0, 0.0),
(0.40, 0.0, 0.0), (0.45, 0.0, 0.0),
(0.50, 0.0, 0.0), (0.55, 0.0, 0.0),
(0.60, 0.0, 0.0), (0.65, 0.7333, 0.7333),
(0.70, 0.9333, 0.9333), (0.75, 1.0, 1.0),
(0.80, 1.0, 1.0), (0.85, 1.0, 1.0),
(0.90, 0.8667, 0.8667), (0.95, 0.80, 0.80),
(1.0, 0.80, 0.80)],
'green': [(0.0, 0.0, 0.0), (0.05, 0.0, 0.0),
(0.10, 0.0, 0.0), (0.15, 0.0, 0.0),
(0.20, 0.0, 0.0), (0.25, 0.4667, 0.4667),
(0.30, 0.6000, 0.6000), (0.35, 0.6667, 0.6667),
(0.40, 0.6667, 0.6667), (0.45, 0.6000, 0.6000),
(0.50, 0.7333, 0.7333), (0.55, 0.8667, 0.8667),
(0.60, 1.0, 1.0), (0.65, 1.0, 1.0),
(0.70, 0.9333, 0.9333), (0.75, 0.8000, 0.8000),
(0.80, 0.6000, 0.6000), (0.85, 0.0, 0.0),
(0.90, 0.0, 0.0), (0.95, 0.0, 0.0),
(1.0, 0.80, 0.80)],
'blue': [(0.0, 0.0, 0.0), (0.05, 0.5333, 0.5333),
(0.10, 0.6000, 0.6000), (0.15, 0.6667, 0.6667),
(0.20, 0.8667, 0.8667), (0.25, 0.8667, 0.8667),
(0.30, 0.8667, 0.8667), (0.35, 0.6667, 0.6667),
(0.40, 0.5333, 0.5333), (0.45, 0.0, 0.0),
(0.5, 0.0, 0.0), (0.55, 0.0, 0.0),
(0.60, 0.0, 0.0), (0.65, 0.0, 0.0),
(0.70, 0.0, 0.0), (0.75, 0.0, 0.0),
(0.80, 0.0, 0.0), (0.85, 0.0, 0.0),
(0.90, 0.0, 0.0), (0.95, 0.0, 0.0),
(1.0, 0.80, 0.80)],
}
# 34 colormaps based on color specifications and designs
# developed by Cynthia Brewer (http://colorbrewer.org).
# The ColorBrewer palettes have been included under the terms
# of an Apache-stype license (for details, see the file
# LICENSE_COLORBREWER in the license directory of the matplotlib
# source distribution).
# RGB values taken from Brewer's Excel sheet, divided by 255
_Blues_data = (
(0.96862745098039216, 0.98431372549019602, 1.0 ),
(0.87058823529411766, 0.92156862745098034, 0.96862745098039216),
(0.77647058823529413, 0.85882352941176465, 0.93725490196078431),
(0.61960784313725492, 0.792156862745098 , 0.88235294117647056),
(0.41960784313725491, 0.68235294117647061, 0.83921568627450982),
(0.25882352941176473, 0.5725490196078431 , 0.77647058823529413),
(0.12941176470588237, 0.44313725490196076, 0.70980392156862748),
(0.03137254901960784, 0.31764705882352939, 0.61176470588235299),
(0.03137254901960784, 0.18823529411764706, 0.41960784313725491)
)
_BrBG_data = (
(0.32941176470588235, 0.18823529411764706, 0.0196078431372549 ),
(0.5490196078431373 , 0.31764705882352939, 0.0392156862745098 ),
(0.74901960784313726, 0.50588235294117645, 0.17647058823529413),
(0.87450980392156863, 0.76078431372549016, 0.49019607843137253),
(0.96470588235294119, 0.90980392156862744, 0.76470588235294112),
(0.96078431372549022, 0.96078431372549022, 0.96078431372549022),
(0.7803921568627451 , 0.91764705882352937, 0.89803921568627454),
(0.50196078431372548, 0.80392156862745101, 0.75686274509803919),
(0.20784313725490197, 0.59215686274509804, 0.5607843137254902 ),
(0.00392156862745098, 0.4 , 0.36862745098039218),
(0.0 , 0.23529411764705882, 0.18823529411764706)
)
_BuGn_data = (
(0.96862745098039216, 0.9882352941176471 , 0.99215686274509807),
(0.89803921568627454, 0.96078431372549022, 0.97647058823529409),
(0.8 , 0.92549019607843142, 0.90196078431372551),
(0.6 , 0.84705882352941175, 0.78823529411764703),
(0.4 , 0.76078431372549016, 0.64313725490196083),
(0.25490196078431371, 0.68235294117647061, 0.46274509803921571),
(0.13725490196078433, 0.54509803921568623, 0.27058823529411763),
(0.0 , 0.42745098039215684, 0.17254901960784313),
(0.0 , 0.26666666666666666, 0.10588235294117647)
)
_BuPu_data = (
(0.96862745098039216, 0.9882352941176471 , 0.99215686274509807),
(0.8784313725490196 , 0.92549019607843142, 0.95686274509803926),
(0.74901960784313726, 0.82745098039215681, 0.90196078431372551),
(0.61960784313725492, 0.73725490196078436, 0.85490196078431369),
(0.5490196078431373 , 0.58823529411764708, 0.77647058823529413),
(0.5490196078431373 , 0.41960784313725491, 0.69411764705882351),
(0.53333333333333333, 0.25490196078431371, 0.61568627450980395),
(0.50588235294117645, 0.05882352941176471, 0.48627450980392156),
(0.30196078431372547, 0.0 , 0.29411764705882354)
)
_GnBu_data = (
(0.96862745098039216, 0.9882352941176471 , 0.94117647058823528),
(0.8784313725490196 , 0.95294117647058818, 0.85882352941176465),
(0.8 , 0.92156862745098034, 0.77254901960784317),
(0.6588235294117647 , 0.8666666666666667 , 0.70980392156862748),
(0.4823529411764706 , 0.8 , 0.7686274509803922 ),
(0.30588235294117649, 0.70196078431372544, 0.82745098039215681),
(0.16862745098039217, 0.5490196078431373 , 0.74509803921568629),
(0.03137254901960784, 0.40784313725490196, 0.67450980392156867),
(0.03137254901960784, 0.25098039215686274, 0.50588235294117645)
)
_Greens_data = (
(0.96862745098039216, 0.9882352941176471 , 0.96078431372549022),
(0.89803921568627454, 0.96078431372549022, 0.8784313725490196 ),
(0.7803921568627451 , 0.9137254901960784 , 0.75294117647058822),
(0.63137254901960782, 0.85098039215686272, 0.60784313725490191),
(0.45490196078431372, 0.7686274509803922 , 0.46274509803921571),
(0.25490196078431371, 0.6705882352941176 , 0.36470588235294116),
(0.13725490196078433, 0.54509803921568623, 0.27058823529411763),
(0.0 , 0.42745098039215684, 0.17254901960784313),
(0.0 , 0.26666666666666666, 0.10588235294117647)
)
_Greys_data = (
(1.0 , 1.0 , 1.0 ),
(0.94117647058823528, 0.94117647058823528, 0.94117647058823528),
(0.85098039215686272, 0.85098039215686272, 0.85098039215686272),
(0.74117647058823533, 0.74117647058823533, 0.74117647058823533),
(0.58823529411764708, 0.58823529411764708, 0.58823529411764708),
(0.45098039215686275, 0.45098039215686275, 0.45098039215686275),
(0.32156862745098042, 0.32156862745098042, 0.32156862745098042),
(0.14509803921568629, 0.14509803921568629, 0.14509803921568629),
(0.0 , 0.0 , 0.0 )
)
_Oranges_data = (
(1.0 , 0.96078431372549022, 0.92156862745098034),
(0.99607843137254903, 0.90196078431372551, 0.80784313725490198),
(0.99215686274509807, 0.81568627450980391, 0.63529411764705879),
(0.99215686274509807, 0.68235294117647061, 0.41960784313725491),
(0.99215686274509807, 0.55294117647058827, 0.23529411764705882),
(0.94509803921568625, 0.41176470588235292, 0.07450980392156863),
(0.85098039215686272, 0.28235294117647058, 0.00392156862745098),
(0.65098039215686276, 0.21176470588235294, 0.01176470588235294),
(0.49803921568627452, 0.15294117647058825, 0.01568627450980392)
)
_OrRd_data = (
(1.0 , 0.96862745098039216, 0.92549019607843142),
(0.99607843137254903, 0.90980392156862744, 0.78431372549019607),
(0.99215686274509807, 0.83137254901960789, 0.61960784313725492),
(0.99215686274509807, 0.73333333333333328, 0.51764705882352946),
(0.9882352941176471 , 0.55294117647058827, 0.34901960784313724),
(0.93725490196078431, 0.396078431372549 , 0.28235294117647058),
(0.84313725490196079, 0.18823529411764706, 0.12156862745098039),
(0.70196078431372544, 0.0 , 0.0 ),
(0.49803921568627452, 0.0 , 0.0 )
)
_PiYG_data = (
(0.55686274509803924, 0.00392156862745098, 0.32156862745098042),
(0.77254901960784317, 0.10588235294117647, 0.49019607843137253),
(0.87058823529411766, 0.46666666666666667, 0.68235294117647061),
(0.94509803921568625, 0.71372549019607845, 0.85490196078431369),
(0.99215686274509807, 0.8784313725490196 , 0.93725490196078431),
(0.96862745098039216, 0.96862745098039216, 0.96862745098039216),
(0.90196078431372551, 0.96078431372549022, 0.81568627450980391),
(0.72156862745098038, 0.88235294117647056, 0.52549019607843139),
(0.49803921568627452, 0.73725490196078436, 0.25490196078431371),
(0.30196078431372547, 0.5725490196078431 , 0.12941176470588237),
(0.15294117647058825, 0.39215686274509803, 0.09803921568627451)
)
_PRGn_data = (
(0.25098039215686274, 0.0 , 0.29411764705882354),
(0.46274509803921571, 0.16470588235294117, 0.51372549019607838),
(0.6 , 0.4392156862745098 , 0.6705882352941176 ),
(0.76078431372549016, 0.6470588235294118 , 0.81176470588235294),
(0.90588235294117647, 0.83137254901960789, 0.90980392156862744),
(0.96862745098039216, 0.96862745098039216, 0.96862745098039216),
(0.85098039215686272, 0.94117647058823528, 0.82745098039215681),
(0.65098039215686276, 0.85882352941176465, 0.62745098039215685),
(0.35294117647058826, 0.68235294117647061, 0.38039215686274508),
(0.10588235294117647, 0.47058823529411764, 0.21568627450980393),
(0.0 , 0.26666666666666666, 0.10588235294117647)
)
_PuBu_data = (
(1.0 , 0.96862745098039216, 0.98431372549019602),
(0.92549019607843142, 0.90588235294117647, 0.94901960784313721),
(0.81568627450980391, 0.81960784313725488, 0.90196078431372551),
(0.65098039215686276, 0.74117647058823533, 0.85882352941176465),
(0.45490196078431372, 0.66274509803921566, 0.81176470588235294),
(0.21176470588235294, 0.56470588235294117, 0.75294117647058822),
(0.0196078431372549 , 0.4392156862745098 , 0.69019607843137254),
(0.01568627450980392, 0.35294117647058826, 0.55294117647058827),
(0.00784313725490196, 0.2196078431372549 , 0.34509803921568627)
)
_PuBuGn_data = (
(1.0 , 0.96862745098039216, 0.98431372549019602),
(0.92549019607843142, 0.88627450980392153, 0.94117647058823528),
(0.81568627450980391, 0.81960784313725488, 0.90196078431372551),
(0.65098039215686276, 0.74117647058823533, 0.85882352941176465),
(0.40392156862745099, 0.66274509803921566, 0.81176470588235294),
(0.21176470588235294, 0.56470588235294117, 0.75294117647058822),
(0.00784313725490196, 0.50588235294117645, 0.54117647058823526),
(0.00392156862745098, 0.42352941176470588, 0.34901960784313724),
(0.00392156862745098, 0.27450980392156865, 0.21176470588235294)
)
_PuOr_data = (
(0.49803921568627452, 0.23137254901960785, 0.03137254901960784),
(0.70196078431372544, 0.34509803921568627, 0.02352941176470588),
(0.8784313725490196 , 0.50980392156862742, 0.07843137254901961),
(0.99215686274509807, 0.72156862745098038, 0.38823529411764707),
(0.99607843137254903, 0.8784313725490196 , 0.71372549019607845),
(0.96862745098039216, 0.96862745098039216, 0.96862745098039216),
(0.84705882352941175, 0.85490196078431369, 0.92156862745098034),
(0.69803921568627447, 0.6705882352941176 , 0.82352941176470584),
(0.50196078431372548, 0.45098039215686275, 0.67450980392156867),
(0.32941176470588235, 0.15294117647058825, 0.53333333333333333),
(0.17647058823529413, 0.0 , 0.29411764705882354)
)
_PuRd_data = (
(0.96862745098039216, 0.95686274509803926, 0.97647058823529409),
(0.90588235294117647, 0.88235294117647056, 0.93725490196078431),
(0.83137254901960789, 0.72549019607843135, 0.85490196078431369),
(0.78823529411764703, 0.58039215686274515, 0.7803921568627451 ),
(0.87450980392156863, 0.396078431372549 , 0.69019607843137254),
(0.90588235294117647, 0.16078431372549021, 0.54117647058823526),
(0.80784313725490198, 0.07058823529411765, 0.33725490196078434),
(0.59607843137254901, 0.0 , 0.2627450980392157 ),
(0.40392156862745099, 0.0 , 0.12156862745098039)
)
_Purples_data = (
(0.9882352941176471 , 0.98431372549019602, 0.99215686274509807),
(0.93725490196078431, 0.92941176470588238, 0.96078431372549022),
(0.85490196078431369, 0.85490196078431369, 0.92156862745098034),
(0.73725490196078436, 0.74117647058823533, 0.86274509803921573),
(0.61960784313725492, 0.60392156862745094, 0.78431372549019607),
(0.50196078431372548, 0.49019607843137253, 0.72941176470588232),
(0.41568627450980394, 0.31764705882352939, 0.63921568627450975),
(0.32941176470588235, 0.15294117647058825, 0.5607843137254902 ),
(0.24705882352941178, 0.0 , 0.49019607843137253)
)
_RdBu_data = (
(0.40392156862745099, 0.0 , 0.12156862745098039),
(0.69803921568627447, 0.09411764705882353, 0.16862745098039217),
(0.83921568627450982, 0.37647058823529411, 0.30196078431372547),
(0.95686274509803926, 0.6470588235294118 , 0.50980392156862742),
(0.99215686274509807, 0.85882352941176465, 0.7803921568627451 ),
(0.96862745098039216, 0.96862745098039216, 0.96862745098039216),
(0.81960784313725488, 0.89803921568627454, 0.94117647058823528),
(0.5725490196078431 , 0.77254901960784317, 0.87058823529411766),
(0.2627450980392157 , 0.57647058823529407, 0.76470588235294112),
(0.12941176470588237, 0.4 , 0.67450980392156867),
(0.0196078431372549 , 0.18823529411764706, 0.38039215686274508)
)
_RdGy_data = (
(0.40392156862745099, 0.0 , 0.12156862745098039),
(0.69803921568627447, 0.09411764705882353, 0.16862745098039217),
(0.83921568627450982, 0.37647058823529411, 0.30196078431372547),
(0.95686274509803926, 0.6470588235294118 , 0.50980392156862742),
(0.99215686274509807, 0.85882352941176465, 0.7803921568627451 ),
(1.0 , 1.0 , 1.0 ),
(0.8784313725490196 , 0.8784313725490196 , 0.8784313725490196 ),
(0.72941176470588232, 0.72941176470588232, 0.72941176470588232),
(0.52941176470588236, 0.52941176470588236, 0.52941176470588236),
(0.30196078431372547, 0.30196078431372547, 0.30196078431372547),
(0.10196078431372549, 0.10196078431372549, 0.10196078431372549)
)
_RdPu_data = (
(1.0 , 0.96862745098039216, 0.95294117647058818),
(0.99215686274509807, 0.8784313725490196 , 0.86666666666666667),
(0.9882352941176471 , 0.77254901960784317, 0.75294117647058822),
(0.98039215686274506, 0.62352941176470589, 0.70980392156862748),
(0.96862745098039216, 0.40784313725490196, 0.63137254901960782),
(0.86666666666666667, 0.20392156862745098, 0.59215686274509804),
(0.68235294117647061, 0.00392156862745098, 0.49411764705882355),
(0.47843137254901963, 0.00392156862745098, 0.46666666666666667),
(0.28627450980392155, 0.0 , 0.41568627450980394)
)
_RdYlBu_data = (
(0.6470588235294118 , 0.0 , 0.14901960784313725),
(0.84313725490196079, 0.18823529411764706 , 0.15294117647058825),
(0.95686274509803926, 0.42745098039215684 , 0.2627450980392157 ),
(0.99215686274509807, 0.68235294117647061 , 0.38039215686274508),
(0.99607843137254903, 0.8784313725490196 , 0.56470588235294117),
(1.0 , 1.0 , 0.74901960784313726),
(0.8784313725490196 , 0.95294117647058818 , 0.97254901960784312),
(0.6705882352941176 , 0.85098039215686272 , 0.9137254901960784 ),
(0.45490196078431372, 0.67843137254901964 , 0.81960784313725488),
(0.27058823529411763, 0.45882352941176469 , 0.70588235294117652),
(0.19215686274509805, 0.21176470588235294 , 0.58431372549019611)
)
_RdYlGn_data = (
(0.6470588235294118 , 0.0 , 0.14901960784313725),
(0.84313725490196079, 0.18823529411764706 , 0.15294117647058825),
(0.95686274509803926, 0.42745098039215684 , 0.2627450980392157 ),
(0.99215686274509807, 0.68235294117647061 , 0.38039215686274508),
(0.99607843137254903, 0.8784313725490196 , 0.54509803921568623),
(1.0 , 1.0 , 0.74901960784313726),
(0.85098039215686272, 0.93725490196078431 , 0.54509803921568623),
(0.65098039215686276, 0.85098039215686272 , 0.41568627450980394),
(0.4 , 0.74117647058823533 , 0.38823529411764707),
(0.10196078431372549, 0.59607843137254901 , 0.31372549019607843),
(0.0 , 0.40784313725490196 , 0.21568627450980393)
)
_Reds_data = (
(1.0 , 0.96078431372549022 , 0.94117647058823528),
(0.99607843137254903, 0.8784313725490196 , 0.82352941176470584),
(0.9882352941176471 , 0.73333333333333328 , 0.63137254901960782),
(0.9882352941176471 , 0.5725490196078431 , 0.44705882352941179),
(0.98431372549019602, 0.41568627450980394 , 0.29019607843137257),
(0.93725490196078431, 0.23137254901960785 , 0.17254901960784313),
(0.79607843137254897, 0.094117647058823528, 0.11372549019607843),
(0.6470588235294118 , 0.058823529411764705, 0.08235294117647058),
(0.40392156862745099, 0.0 , 0.05098039215686274)
)
_Spectral_data = (
(0.61960784313725492, 0.003921568627450980, 0.25882352941176473),
(0.83529411764705885, 0.24313725490196078 , 0.30980392156862746),
(0.95686274509803926, 0.42745098039215684 , 0.2627450980392157 ),
(0.99215686274509807, 0.68235294117647061 , 0.38039215686274508),
(0.99607843137254903, 0.8784313725490196 , 0.54509803921568623),
(1.0 , 1.0 , 0.74901960784313726),
(0.90196078431372551, 0.96078431372549022 , 0.59607843137254901),
(0.6705882352941176 , 0.8666666666666667 , 0.64313725490196083),
(0.4 , 0.76078431372549016 , 0.6470588235294118 ),
(0.19607843137254902, 0.53333333333333333 , 0.74117647058823533),
(0.36862745098039218, 0.30980392156862746 , 0.63529411764705879)
)
_YlGn_data = (
(1.0 , 1.0 , 0.89803921568627454),
(0.96862745098039216, 0.9882352941176471 , 0.72549019607843135),
(0.85098039215686272, 0.94117647058823528 , 0.63921568627450975),
(0.67843137254901964, 0.8666666666666667 , 0.55686274509803924),
(0.47058823529411764, 0.77647058823529413 , 0.47450980392156861),
(0.25490196078431371, 0.6705882352941176 , 0.36470588235294116),
(0.13725490196078433, 0.51764705882352946 , 0.2627450980392157 ),
(0.0 , 0.40784313725490196 , 0.21568627450980393),
(0.0 , 0.27058823529411763 , 0.16078431372549021)
)
_YlGnBu_data = (
(1.0 , 1.0 , 0.85098039215686272),
(0.92941176470588238, 0.97254901960784312 , 0.69411764705882351),
(0.7803921568627451 , 0.9137254901960784 , 0.70588235294117652),
(0.49803921568627452, 0.80392156862745101 , 0.73333333333333328),
(0.25490196078431371, 0.71372549019607845 , 0.7686274509803922 ),
(0.11372549019607843, 0.56862745098039214 , 0.75294117647058822),
(0.13333333333333333, 0.36862745098039218 , 0.6588235294117647 ),
(0.14509803921568629, 0.20392156862745098 , 0.58039215686274515),
(0.03137254901960784, 0.11372549019607843 , 0.34509803921568627)
)
_YlOrBr_data = (
(1.0 , 1.0 , 0.89803921568627454),
(1.0 , 0.96862745098039216 , 0.73725490196078436),
(0.99607843137254903, 0.8901960784313725 , 0.56862745098039214),
(0.99607843137254903, 0.7686274509803922 , 0.30980392156862746),
(0.99607843137254903, 0.6 , 0.16078431372549021),
(0.92549019607843142, 0.4392156862745098 , 0.07843137254901961),
(0.8 , 0.29803921568627451 , 0.00784313725490196),
(0.6 , 0.20392156862745098 , 0.01568627450980392),
(0.4 , 0.14509803921568629 , 0.02352941176470588)
)
_YlOrRd_data = (
(1.0 , 1.0 , 0.8 ),
(1.0 , 0.92941176470588238 , 0.62745098039215685),
(0.99607843137254903, 0.85098039215686272 , 0.46274509803921571),
(0.99607843137254903, 0.69803921568627447 , 0.29803921568627451),
(0.99215686274509807, 0.55294117647058827 , 0.23529411764705882),
(0.9882352941176471 , 0.30588235294117649 , 0.16470588235294117),
(0.8901960784313725 , 0.10196078431372549 , 0.10980392156862745),
(0.74117647058823533, 0.0 , 0.14901960784313725),
(0.50196078431372548, 0.0 , 0.14901960784313725)
)
# ColorBrewer's qualitative maps, implemented using ListedColormap
# for use with mpl.colors.NoNorm
_Accent_data = (
(0.49803921568627452, 0.78823529411764703, 0.49803921568627452),
(0.74509803921568629, 0.68235294117647061, 0.83137254901960789),
(0.99215686274509807, 0.75294117647058822, 0.52549019607843139),
(1.0, 1.0, 0.6 ),
(0.2196078431372549, 0.42352941176470588, 0.69019607843137254),
(0.94117647058823528, 0.00784313725490196, 0.49803921568627452),
(0.74901960784313726, 0.35686274509803922, 0.09019607843137254),
(0.4, 0.4, 0.4 ),
)
_Dark2_data = (
(0.10588235294117647, 0.61960784313725492, 0.46666666666666667),
(0.85098039215686272, 0.37254901960784315, 0.00784313725490196),
(0.45882352941176469, 0.4392156862745098, 0.70196078431372544),
(0.90588235294117647, 0.16078431372549021, 0.54117647058823526),
(0.4, 0.65098039215686276, 0.11764705882352941),
(0.90196078431372551, 0.6705882352941176, 0.00784313725490196),
(0.65098039215686276, 0.46274509803921571, 0.11372549019607843),
(0.4, 0.4, 0.4 ),
)
_Paired_data = (
(0.65098039215686276, 0.80784313725490198, 0.8901960784313725 ),
(0.12156862745098039, 0.47058823529411764, 0.70588235294117652),
(0.69803921568627447, 0.87450980392156863, 0.54117647058823526),
(0.2, 0.62745098039215685, 0.17254901960784313),
(0.98431372549019602, 0.60392156862745094, 0.6 ),
(0.8901960784313725, 0.10196078431372549, 0.10980392156862745),
(0.99215686274509807, 0.74901960784313726, 0.43529411764705883),
(1.0, 0.49803921568627452, 0.0 ),
(0.792156862745098, 0.69803921568627447, 0.83921568627450982),
(0.41568627450980394, 0.23921568627450981, 0.60392156862745094),
(1.0, 1.0, 0.6 ),
(0.69411764705882351, 0.34901960784313724, 0.15686274509803921),
)
_Pastel1_data = (
(0.98431372549019602, 0.70588235294117652, 0.68235294117647061),
(0.70196078431372544, 0.80392156862745101, 0.8901960784313725 ),
(0.8, 0.92156862745098034, 0.77254901960784317),
(0.87058823529411766, 0.79607843137254897, 0.89411764705882357),
(0.99607843137254903, 0.85098039215686272, 0.65098039215686276),
(1.0, 1.0, 0.8 ),
(0.89803921568627454, 0.84705882352941175, 0.74117647058823533),
(0.99215686274509807, 0.85490196078431369, 0.92549019607843142),
(0.94901960784313721, 0.94901960784313721, 0.94901960784313721),
)
_Pastel2_data = (
(0.70196078431372544, 0.88627450980392153, 0.80392156862745101),
(0.99215686274509807, 0.80392156862745101, 0.67450980392156867),
(0.79607843137254897, 0.83529411764705885, 0.90980392156862744),
(0.95686274509803926, 0.792156862745098, 0.89411764705882357),
(0.90196078431372551, 0.96078431372549022, 0.78823529411764703),
(1.0, 0.94901960784313721, 0.68235294117647061),
(0.94509803921568625, 0.88627450980392153, 0.8 ),
(0.8, 0.8, 0.8 ),
)
_Set1_data = (
(0.89411764705882357, 0.10196078431372549, 0.10980392156862745),
(0.21568627450980393, 0.49411764705882355, 0.72156862745098038),
(0.30196078431372547, 0.68627450980392157, 0.29019607843137257),
(0.59607843137254901, 0.30588235294117649, 0.63921568627450975),
(1.0, 0.49803921568627452, 0.0 ),
(1.0, 1.0, 0.2 ),
(0.65098039215686276, 0.33725490196078434, 0.15686274509803921),
(0.96862745098039216, 0.50588235294117645, 0.74901960784313726),
(0.6, 0.6, 0.6),
)
_Set2_data = (
(0.4, 0.76078431372549016, 0.6470588235294118 ),
(0.9882352941176471, 0.55294117647058827, 0.3843137254901961 ),
(0.55294117647058827, 0.62745098039215685, 0.79607843137254897),
(0.90588235294117647, 0.54117647058823526, 0.76470588235294112),
(0.65098039215686276, 0.84705882352941175, 0.32941176470588235),
(1.0, 0.85098039215686272, 0.18431372549019609),
(0.89803921568627454, 0.7686274509803922, 0.58039215686274515),
(0.70196078431372544, 0.70196078431372544, 0.70196078431372544),
)
_Set3_data = (
(0.55294117647058827, 0.82745098039215681, 0.7803921568627451 ),
(1.0, 1.0, 0.70196078431372544),
(0.74509803921568629, 0.72941176470588232, 0.85490196078431369),
(0.98431372549019602, 0.50196078431372548, 0.44705882352941179),
(0.50196078431372548, 0.69411764705882351, 0.82745098039215681),
(0.99215686274509807, 0.70588235294117652, 0.3843137254901961 ),
(0.70196078431372544, 0.87058823529411766, 0.41176470588235292),
(0.9882352941176471, 0.80392156862745101, 0.89803921568627454),
(0.85098039215686272, 0.85098039215686272, 0.85098039215686272),
(0.73725490196078436, 0.50196078431372548, 0.74117647058823533),
(0.8, 0.92156862745098034, 0.77254901960784317),
(1.0, 0.92941176470588238, 0.43529411764705883),
)
# The next 7 palettes are from the Yorick scientific visalisation package,
# an evolution of the GIST package, both by David H. Munro.
# They are released under a BSD-like license (see LICENSE_YORICK in
# the license directory of the matplotlib source distribution).
#
# Most palette functions have been reduced to simple function descriptions
# by Reinier Heeres, since the rgb components were mostly straight lines.
# gist_earth_data and gist_ncar_data were simplified by a script and some
# manual effort.
_gist_earth_data = \
{'red': (
(0.0, 0.0, 0.0000),
(0.2824, 0.1882, 0.1882),
(0.4588, 0.2714, 0.2714),
(0.5490, 0.4719, 0.4719),
(0.6980, 0.7176, 0.7176),
(0.7882, 0.7553, 0.7553),
(1.0000, 0.9922, 0.9922),
), 'green': (
(0.0, 0.0, 0.0000),
(0.0275, 0.0000, 0.0000),
(0.1098, 0.1893, 0.1893),
(0.1647, 0.3035, 0.3035),
(0.2078, 0.3841, 0.3841),
(0.2824, 0.5020, 0.5020),
(0.5216, 0.6397, 0.6397),
(0.6980, 0.7171, 0.7171),
(0.7882, 0.6392, 0.6392),
(0.7922, 0.6413, 0.6413),
(0.8000, 0.6447, 0.6447),
(0.8078, 0.6481, 0.6481),
(0.8157, 0.6549, 0.6549),
(0.8667, 0.6991, 0.6991),
(0.8745, 0.7103, 0.7103),
(0.8824, 0.7216, 0.7216),
(0.8902, 0.7323, 0.7323),
(0.8980, 0.7430, 0.7430),
(0.9412, 0.8275, 0.8275),
(0.9569, 0.8635, 0.8635),
(0.9647, 0.8816, 0.8816),
(0.9961, 0.9733, 0.9733),
(1.0000, 0.9843, 0.9843),
), 'blue': (
(0.0, 0.0, 0.0000),
(0.0039, 0.1684, 0.1684),
(0.0078, 0.2212, 0.2212),
(0.0275, 0.4329, 0.4329),
(0.0314, 0.4549, 0.4549),
(0.2824, 0.5004, 0.5004),
(0.4667, 0.2748, 0.2748),
(0.5451, 0.3205, 0.3205),
(0.7843, 0.3961, 0.3961),
(0.8941, 0.6651, 0.6651),
(1.0000, 0.9843, 0.9843),
)}
_gist_gray_data = {
'red': gfunc[3],
'green': gfunc[3],
'blue': gfunc[3],
}
_gist_heat_data = {
'red': lambda x: 1.5 * x,
'green': lambda x: 2 * x - 1,
'blue': lambda x: 4 * x - 3,
}
_gist_ncar_data = \
{'red': (
(0.0, 0.0, 0.0000),
(0.3098, 0.0000, 0.0000),
(0.3725, 0.3993, 0.3993),
(0.4235, 0.5003, 0.5003),
(0.5333, 1.0000, 1.0000),
(0.7922, 1.0000, 1.0000),
(0.8471, 0.6218, 0.6218),
(0.8980, 0.9235, 0.9235),
(1.0000, 0.9961, 0.9961),
), 'green': (
(0.0, 0.0, 0.0000),
(0.0510, 0.3722, 0.3722),
(0.1059, 0.0000, 0.0000),
(0.1569, 0.7202, 0.7202),
(0.1608, 0.7537, 0.7537),
(0.1647, 0.7752, 0.7752),
(0.2157, 1.0000, 1.0000),
(0.2588, 0.9804, 0.9804),
(0.2706, 0.9804, 0.9804),
(0.3176, 1.0000, 1.0000),
(0.3686, 0.8081, 0.8081),
(0.4275, 1.0000, 1.0000),
(0.5216, 1.0000, 1.0000),
(0.6314, 0.7292, 0.7292),
(0.6863, 0.2796, 0.2796),
(0.7451, 0.0000, 0.0000),
(0.7922, 0.0000, 0.0000),
(0.8431, 0.1753, 0.1753),
(0.8980, 0.5000, 0.5000),
(1.0000, 0.9725, 0.9725),
), 'blue': (
(0.0, 0.5020, 0.5020),
(0.0510, 0.0222, 0.0222),
(0.1098, 1.0000, 1.0000),
(0.2039, 1.0000, 1.0000),
(0.2627, 0.6145, 0.6145),
(0.3216, 0.0000, 0.0000),
(0.4157, 0.0000, 0.0000),
(0.4745, 0.2342, 0.2342),
(0.5333, 0.0000, 0.0000),
(0.5804, 0.0000, 0.0000),
(0.6314, 0.0549, 0.0549),
(0.6902, 0.0000, 0.0000),
(0.7373, 0.0000, 0.0000),
(0.7922, 0.9738, 0.9738),
(0.8000, 1.0000, 1.0000),
(0.8431, 1.0000, 1.0000),
(0.8980, 0.9341, 0.9341),
(1.0000, 0.9961, 0.9961),
)}
_gist_rainbow_data = (
(0.000, (1.00, 0.00, 0.16)),
(0.030, (1.00, 0.00, 0.00)),
(0.215, (1.00, 1.00, 0.00)),
(0.400, (0.00, 1.00, 0.00)),
(0.586, (0.00, 1.00, 1.00)),
(0.770, (0.00, 0.00, 1.00)),
(0.954, (1.00, 0.00, 1.00)),
(1.000, (1.00, 0.00, 0.75))
)
_gist_stern_data = {
'red': (
(0.000, 0.000, 0.000), (0.0547, 1.000, 1.000),
(0.250, 0.027, 0.250), # (0.2500, 0.250, 0.250),
(1.000, 1.000, 1.000)),
'green': ((0, 0, 0), (1, 1, 1)),
'blue': (
(0.000, 0.000, 0.000), (0.500, 1.000, 1.000),
(0.735, 0.000, 0.000), (1.000, 1.000, 1.000))
}
_gist_yarg_data = {
'red': lambda x: 1 - x,
'green': lambda x: 1 - x,
'blue': lambda x: 1 - x,
}
# This bipolar color map was generated from CoolWarmFloat33.csv of
# "Diverging Color Maps for Scientific Visualization" by Kenneth Moreland.
# <http://www.kennethmoreland.com/color-maps/>
_coolwarm_data = {
'red': [
(0.0, 0.2298057, 0.2298057),
(0.03125, 0.26623388, 0.26623388),
(0.0625, 0.30386891, 0.30386891),
(0.09375, 0.342804478, 0.342804478),
(0.125, 0.38301334, 0.38301334),
(0.15625, 0.424369608, 0.424369608),
(0.1875, 0.46666708, 0.46666708),
(0.21875, 0.509635204, 0.509635204),
(0.25, 0.552953156, 0.552953156),
(0.28125, 0.596262162, 0.596262162),
(0.3125, 0.639176211, 0.639176211),
(0.34375, 0.681291281, 0.681291281),
(0.375, 0.722193294, 0.722193294),
(0.40625, 0.761464949, 0.761464949),
(0.4375, 0.798691636, 0.798691636),
(0.46875, 0.833466556, 0.833466556),
(0.5, 0.865395197, 0.865395197),
(0.53125, 0.897787179, 0.897787179),
(0.5625, 0.924127593, 0.924127593),
(0.59375, 0.944468518, 0.944468518),
(0.625, 0.958852946, 0.958852946),
(0.65625, 0.96732803, 0.96732803),
(0.6875, 0.969954137, 0.969954137),
(0.71875, 0.966811177, 0.966811177),
(0.75, 0.958003065, 0.958003065),
(0.78125, 0.943660866, 0.943660866),
(0.8125, 0.923944917, 0.923944917),
(0.84375, 0.89904617, 0.89904617),
(0.875, 0.869186849, 0.869186849),
(0.90625, 0.834620542, 0.834620542),
(0.9375, 0.795631745, 0.795631745),
(0.96875, 0.752534934, 0.752534934),
(1.0, 0.705673158, 0.705673158)],
'green': [
(0.0, 0.298717966, 0.298717966),
(0.03125, 0.353094838, 0.353094838),
(0.0625, 0.406535296, 0.406535296),
(0.09375, 0.458757618, 0.458757618),
(0.125, 0.50941904, 0.50941904),
(0.15625, 0.558148092, 0.558148092),
(0.1875, 0.604562568, 0.604562568),
(0.21875, 0.648280772, 0.648280772),
(0.25, 0.688929332, 0.688929332),
(0.28125, 0.726149107, 0.726149107),
(0.3125, 0.759599947, 0.759599947),
(0.34375, 0.788964712, 0.788964712),
(0.375, 0.813952739, 0.813952739),
(0.40625, 0.834302879, 0.834302879),
(0.4375, 0.849786142, 0.849786142),
(0.46875, 0.860207984, 0.860207984),
(0.5, 0.86541021, 0.86541021),
(0.53125, 0.848937047, 0.848937047),
(0.5625, 0.827384882, 0.827384882),
(0.59375, 0.800927443, 0.800927443),
(0.625, 0.769767752, 0.769767752),
(0.65625, 0.734132809, 0.734132809),
(0.6875, 0.694266682, 0.694266682),
(0.71875, 0.650421156, 0.650421156),
(0.75, 0.602842431, 0.602842431),
(0.78125, 0.551750968, 0.551750968),
(0.8125, 0.49730856, 0.49730856),
(0.84375, 0.439559467, 0.439559467),
(0.875, 0.378313092, 0.378313092),
(0.90625, 0.312874446, 0.312874446),
(0.9375, 0.24128379, 0.24128379),
(0.96875, 0.157246067, 0.157246067),
(1.0, 0.01555616, 0.01555616)],
'blue': [
(0.0, 0.753683153, 0.753683153),
(0.03125, 0.801466763, 0.801466763),
(0.0625, 0.84495867, 0.84495867),
(0.09375, 0.883725899, 0.883725899),
(0.125, 0.917387822, 0.917387822),
(0.15625, 0.945619588, 0.945619588),
(0.1875, 0.968154911, 0.968154911),
(0.21875, 0.98478814, 0.98478814),
(0.25, 0.995375608, 0.995375608),
(0.28125, 0.999836203, 0.999836203),
(0.3125, 0.998151185, 0.998151185),
(0.34375, 0.990363227, 0.990363227),
(0.375, 0.976574709, 0.976574709),
(0.40625, 0.956945269, 0.956945269),
(0.4375, 0.931688648, 0.931688648),
(0.46875, 0.901068838, 0.901068838),
(0.5, 0.865395561, 0.865395561),
(0.53125, 0.820880546, 0.820880546),
(0.5625, 0.774508472, 0.774508472),
(0.59375, 0.726736146, 0.726736146),
(0.625, 0.678007945, 0.678007945),
(0.65625, 0.628751763, 0.628751763),
(0.6875, 0.579375448, 0.579375448),
(0.71875, 0.530263762, 0.530263762),
(0.75, 0.481775914, 0.481775914),
(0.78125, 0.434243684, 0.434243684),
(0.8125, 0.387970225, 0.387970225),
(0.84375, 0.343229596, 0.343229596),
(0.875, 0.300267182, 0.300267182),
(0.90625, 0.259301199, 0.259301199),
(0.9375, 0.220525627, 0.220525627),
(0.96875, 0.184115123, 0.184115123),
(1.0, 0.150232812, 0.150232812)]
}
# Implementation of Carey Rappaport's CMRmap.
# See `A Color Map for Effective Black-and-White Rendering of Color-Scale
# Images' by Carey Rappaport
# http://www.mathworks.com/matlabcentral/fileexchange/2662-cmrmap-m
_CMRmap_data = {'red': ((0.000, 0.00, 0.00),
(0.125, 0.15, 0.15),
(0.250, 0.30, 0.30),
(0.375, 0.60, 0.60),
(0.500, 1.00, 1.00),
(0.625, 0.90, 0.90),
(0.750, 0.90, 0.90),
(0.875, 0.90, 0.90),
(1.000, 1.00, 1.00)),
'green': ((0.000, 0.00, 0.00),
(0.125, 0.15, 0.15),
(0.250, 0.15, 0.15),
(0.375, 0.20, 0.20),
(0.500, 0.25, 0.25),
(0.625, 0.50, 0.50),
(0.750, 0.75, 0.75),
(0.875, 0.90, 0.90),
(1.000, 1.00, 1.00)),
'blue': ((0.000, 0.00, 0.00),
(0.125, 0.50, 0.50),
(0.250, 0.75, 0.75),
(0.375, 0.50, 0.50),
(0.500, 0.15, 0.15),
(0.625, 0.00, 0.00),
(0.750, 0.10, 0.10),
(0.875, 0.50, 0.50),
(1.000, 1.00, 1.00))}
# An MIT licensed, colorblind-friendly heatmap from Wistia:
# https://github.com/wistia/heatmap-palette
# http://wistia.com/blog/heatmaps-for-colorblindness
#
# >>> import matplotlib.colors as c
# >>> colors = ["#e4ff7a", "#ffe81a", "#ffbd00", "#ffa000", "#fc7f00"]
# >>> cm = c.LinearSegmentedColormap.from_list('wistia', colors)
# >>> _wistia_data = cm._segmentdata
# >>> del _wistia_data['alpha']
#
_wistia_data = {
'red': [(0.0, 0.8941176470588236, 0.8941176470588236),
(0.25, 1.0, 1.0),
(0.5, 1.0, 1.0),
(0.75, 1.0, 1.0),
(1.0, 0.9882352941176471, 0.9882352941176471)],
'green': [(0.0, 1.0, 1.0),
(0.25, 0.9098039215686274, 0.9098039215686274),
(0.5, 0.7411764705882353, 0.7411764705882353),
(0.75, 0.6274509803921569, 0.6274509803921569),
(1.0, 0.4980392156862745, 0.4980392156862745)],
'blue': [(0.0, 0.47843137254901963, 0.47843137254901963),
(0.25, 0.10196078431372549, 0.10196078431372549),
(0.5, 0.0, 0.0),
(0.75, 0.0, 0.0),
(1.0, 0.0, 0.0)],
}
# Categorical palettes from Vega:
# https://github.com/vega/vega/wiki/Scales
# (divided by 255)
#
_tab10_data = (
(0.12156862745098039, 0.4666666666666667, 0.7058823529411765 ), # 1f77b4
(1.0, 0.4980392156862745, 0.054901960784313725), # ff7f0e
(0.17254901960784313, 0.6274509803921569, 0.17254901960784313 ), # 2ca02c
(0.8392156862745098, 0.15294117647058825, 0.1568627450980392 ), # d62728
(0.5803921568627451, 0.403921568627451, 0.7411764705882353 ), # 9467bd
(0.5490196078431373, 0.33725490196078434, 0.29411764705882354 ), # 8c564b
(0.8901960784313725, 0.4666666666666667, 0.7607843137254902 ), # e377c2
(0.4980392156862745, 0.4980392156862745, 0.4980392156862745 ), # 7f7f7f
(0.7372549019607844, 0.7411764705882353, 0.13333333333333333 ), # bcbd22
(0.09019607843137255, 0.7450980392156863, 0.8117647058823529), # 17becf
)
_tab20_data = (
(0.12156862745098039, 0.4666666666666667, 0.7058823529411765 ), # 1f77b4
(0.6823529411764706, 0.7803921568627451, 0.9098039215686274 ), # aec7e8
(1.0, 0.4980392156862745, 0.054901960784313725), # ff7f0e
(1.0, 0.7333333333333333, 0.47058823529411764 ), # ffbb78
(0.17254901960784313, 0.6274509803921569, 0.17254901960784313 ), # 2ca02c
(0.596078431372549, 0.8745098039215686, 0.5411764705882353 ), # 98df8a
(0.8392156862745098, 0.15294117647058825, 0.1568627450980392 ), # d62728
(1.0, 0.596078431372549, 0.5882352941176471 ), # ff9896
(0.5803921568627451, 0.403921568627451, 0.7411764705882353 ), # 9467bd
(0.7725490196078432, 0.6901960784313725, 0.8352941176470589 ), # c5b0d5
(0.5490196078431373, 0.33725490196078434, 0.29411764705882354 ), # 8c564b
(0.7686274509803922, 0.611764705882353, 0.5803921568627451 ), # c49c94
(0.8901960784313725, 0.4666666666666667, 0.7607843137254902 ), # e377c2
(0.9686274509803922, 0.7137254901960784, 0.8235294117647058 ), # f7b6d2
(0.4980392156862745, 0.4980392156862745, 0.4980392156862745 ), # 7f7f7f
(0.7803921568627451, 0.7803921568627451, 0.7803921568627451 ), # c7c7c7
(0.7372549019607844, 0.7411764705882353, 0.13333333333333333 ), # bcbd22
(0.8588235294117647, 0.8588235294117647, 0.5529411764705883 ), # dbdb8d
(0.09019607843137255, 0.7450980392156863, 0.8117647058823529 ), # 17becf
(0.6196078431372549, 0.8549019607843137, 0.8980392156862745), # 9edae5
)
_tab20b_data = (
(0.2235294117647059, 0.23137254901960785, 0.4745098039215686 ), # 393b79
(0.3215686274509804, 0.32941176470588235, 0.6392156862745098 ), # 5254a3
(0.4196078431372549, 0.43137254901960786, 0.8117647058823529 ), # 6b6ecf
(0.611764705882353, 0.6196078431372549, 0.8705882352941177 ), # 9c9ede
(0.38823529411764707, 0.4745098039215686, 0.2235294117647059 ), # 637939
(0.5490196078431373, 0.6352941176470588, 0.3215686274509804 ), # 8ca252
(0.7098039215686275, 0.8117647058823529, 0.4196078431372549 ), # b5cf6b
(0.807843137254902, 0.8588235294117647, 0.611764705882353 ), # cedb9c
(0.5490196078431373, 0.42745098039215684, 0.19215686274509805), # 8c6d31
(0.7411764705882353, 0.6196078431372549, 0.2235294117647059 ), # bd9e39
(0.9058823529411765, 0.7294117647058823, 0.3215686274509804 ), # e7ba52
(0.9058823529411765, 0.796078431372549, 0.5803921568627451 ), # e7cb94
(0.5176470588235295, 0.23529411764705882, 0.2235294117647059 ), # 843c39
(0.6784313725490196, 0.28627450980392155, 0.2901960784313726 ), # ad494a
(0.8392156862745098, 0.3803921568627451, 0.4196078431372549 ), # d6616b
(0.9058823529411765, 0.5882352941176471, 0.611764705882353 ), # e7969c
(0.4823529411764706, 0.2549019607843137, 0.45098039215686275), # 7b4173
(0.6470588235294118, 0.3176470588235294, 0.5803921568627451 ), # a55194
(0.807843137254902, 0.42745098039215684, 0.7411764705882353 ), # ce6dbd
(0.8705882352941177, 0.6196078431372549, 0.8392156862745098 ), # de9ed6
)
_tab20c_data = (
(0.19215686274509805, 0.5098039215686274, 0.7411764705882353 ), # 3182bd
(0.4196078431372549, 0.6823529411764706, 0.8392156862745098 ), # 6baed6
(0.6196078431372549, 0.792156862745098, 0.8823529411764706 ), # 9ecae1
(0.7764705882352941, 0.8588235294117647, 0.9372549019607843 ), # c6dbef
(0.9019607843137255, 0.3333333333333333, 0.050980392156862744), # e6550d
(0.9921568627450981, 0.5529411764705883, 0.23529411764705882 ), # fd8d3c
(0.9921568627450981, 0.6823529411764706, 0.4196078431372549 ), # fdae6b
(0.9921568627450981, 0.8156862745098039, 0.6352941176470588 ), # fdd0a2
(0.19215686274509805, 0.6392156862745098, 0.32941176470588235 ), # 31a354
(0.4549019607843137, 0.7686274509803922, 0.4627450980392157 ), # 74c476
(0.6313725490196078, 0.8509803921568627, 0.6078431372549019 ), # a1d99b
(0.7803921568627451, 0.9137254901960784, 0.7529411764705882 ), # c7e9c0
(0.4588235294117647, 0.4196078431372549, 0.6941176470588235 ), # 756bb1
(0.6196078431372549, 0.6039215686274509, 0.7843137254901961 ), # 9e9ac8
(0.7372549019607844, 0.7411764705882353, 0.8627450980392157 ), # bcbddc
(0.8549019607843137, 0.8549019607843137, 0.9215686274509803 ), # dadaeb
(0.38823529411764707, 0.38823529411764707, 0.38823529411764707 ), # 636363
(0.5882352941176471, 0.5882352941176471, 0.5882352941176471 ), # 969696
(0.7411764705882353, 0.7411764705882353, 0.7411764705882353 ), # bdbdbd
(0.8509803921568627, 0.8509803921568627, 0.8509803921568627 ), # d9d9d9
)
datad = {
'Blues': _Blues_data,
'BrBG': _BrBG_data,
'BuGn': _BuGn_data,
'BuPu': _BuPu_data,
'CMRmap': _CMRmap_data,
'GnBu': _GnBu_data,
'Greens': _Greens_data,
'Greys': _Greys_data,
'OrRd': _OrRd_data,
'Oranges': _Oranges_data,
'PRGn': _PRGn_data,
'PiYG': _PiYG_data,
'PuBu': _PuBu_data,
'PuBuGn': _PuBuGn_data,
'PuOr': _PuOr_data,
'PuRd': _PuRd_data,
'Purples': _Purples_data,
'RdBu': _RdBu_data,
'RdGy': _RdGy_data,
'RdPu': _RdPu_data,
'RdYlBu': _RdYlBu_data,
'RdYlGn': _RdYlGn_data,
'Reds': _Reds_data,
'Spectral': _Spectral_data,
'Wistia': _wistia_data,
'YlGn': _YlGn_data,
'YlGnBu': _YlGnBu_data,
'YlOrBr': _YlOrBr_data,
'YlOrRd': _YlOrRd_data,
'afmhot': _afmhot_data,
'autumn': _autumn_data,
'binary': _binary_data,
'bone': _bone_data,
'brg': _brg_data,
'bwr': _bwr_data,
'cool': _cool_data,
'coolwarm': _coolwarm_data,
'copper': _copper_data,
'cubehelix': _cubehelix_data,
'flag': _flag_data,
'gist_earth': _gist_earth_data,
'gist_gray': _gist_gray_data,
'gist_heat': _gist_heat_data,
'gist_ncar': _gist_ncar_data,
'gist_rainbow': _gist_rainbow_data,
'gist_stern': _gist_stern_data,
'gist_yarg': _gist_yarg_data,
'gnuplot': _gnuplot_data,
'gnuplot2': _gnuplot2_data,
'gray': _gray_data,
'hot': _hot_data,
'hsv': _hsv_data,
'jet': _jet_data,
'nipy_spectral': _nipy_spectral_data,
'ocean': _ocean_data,
'pink': _pink_data,
'prism': _prism_data,
'rainbow': _rainbow_data,
'seismic': _seismic_data,
'spring': _spring_data,
'summer': _summer_data,
'terrain': _terrain_data,
'winter': _winter_data,
# Qualitative
'Accent': {'listed': _Accent_data},
'Dark2': {'listed': _Dark2_data},
'Paired': {'listed': _Paired_data},
'Pastel1': {'listed': _Pastel1_data},
'Pastel2': {'listed': _Pastel2_data},
'Set1': {'listed': _Set1_data},
'Set2': {'listed': _Set2_data},
'Set3': {'listed': _Set3_data},
'tab10': {'listed': _tab10_data},
'tab20': {'listed': _tab20_data},
'tab20b': {'listed': _tab20b_data},
'tab20c': {'listed': _tab20c_data},
}
| 66,603 | 45.060858 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/text.py
|
"""
Classes for including text in a figure.
"""
from __future__ import absolute_import, division, print_function
import six
from six.moves import zip
import contextlib
import logging
import math
import warnings
import weakref
import numpy as np
from . import artist, cbook, docstring, rcParams
from .artist import Artist
from .font_manager import FontProperties
from .lines import Line2D
from .patches import FancyArrowPatch, FancyBboxPatch, Rectangle
from .textpath import TextPath # Unused, but imported by others.
from .transforms import (
Affine2D, Bbox, BboxBase, BboxTransformTo, IdentityTransform, Transform)
_log = logging.getLogger(__name__)
def _process_text_args(override, fontdict=None, **kwargs):
"Return an override dict. See :func:`~pyplot.text' docstring for info"
if fontdict is not None:
override.update(fontdict)
override.update(kwargs)
return override
@contextlib.contextmanager
def _wrap_text(textobj):
"""Temporarily inserts newlines to the text if the wrap option is enabled.
"""
if textobj.get_wrap():
old_text = textobj.get_text()
try:
textobj.set_text(textobj._get_wrapped_text())
yield textobj
finally:
textobj.set_text(old_text)
else:
yield textobj
# Extracted from Text's method to serve as a function
def get_rotation(rotation):
"""
Return the text angle as float. The returned
angle is between 0 and 360 deg.
*rotation* may be 'horizontal', 'vertical', or a numeric value in degrees.
"""
try:
angle = float(rotation)
except (ValueError, TypeError):
isString = isinstance(rotation, six.string_types)
if ((isString and rotation == 'horizontal') or rotation is None):
angle = 0.
elif (isString and rotation == 'vertical'):
angle = 90.
else:
raise ValueError("rotation is {0} expected either 'horizontal'"
" 'vertical', numeric value or"
"None".format(rotation))
return angle % 360
def _get_textbox(text, renderer):
"""
Calculate the bounding box of the text. Unlike
:meth:`matplotlib.text.Text.get_extents` method, The bbox size of
the text before the rotation is calculated.
"""
# TODO : This function may move into the Text class as a method. As a
# matter of fact, The information from the _get_textbox function
# should be available during the Text._get_layout() call, which is
# called within the _get_textbox. So, it would better to move this
# function as a method with some refactoring of _get_layout method.
projected_xs = []
projected_ys = []
theta = np.deg2rad(text.get_rotation())
tr = Affine2D().rotate(-theta)
_, parts, d = text._get_layout(renderer)
for t, wh, x, y in parts:
w, h = wh
xt1, yt1 = tr.transform_point((x, y))
yt1 -= d
xt2, yt2 = xt1 + w, yt1 + h
projected_xs.extend([xt1, xt2])
projected_ys.extend([yt1, yt2])
xt_box, yt_box = min(projected_xs), min(projected_ys)
w_box, h_box = max(projected_xs) - xt_box, max(projected_ys) - yt_box
tr = Affine2D().rotate(theta)
x_box, y_box = tr.transform_point((xt_box, yt_box))
return x_box, y_box, w_box, h_box
class Text(Artist):
"""
Handle storing and drawing of text in window or data coordinates.
"""
zorder = 3
_cached = cbook.maxdict(50)
def __repr__(self):
return "Text(%g,%g,%s)" % (self._x, self._y, repr(self._text))
def __init__(self,
x=0, y=0, text='',
color=None, # defaults to rc params
verticalalignment='baseline',
horizontalalignment='left',
multialignment=None,
fontproperties=None, # defaults to FontProperties()
rotation=None,
linespacing=None,
rotation_mode=None,
usetex=None, # defaults to rcParams['text.usetex']
wrap=False,
**kwargs
):
"""
Create a :class:`~matplotlib.text.Text` instance at *x*, *y*
with string *text*.
Valid kwargs are
%(Text)s
"""
Artist.__init__(self)
self._x, self._y = x, y
if color is None:
color = rcParams['text.color']
if fontproperties is None:
fontproperties = FontProperties()
elif isinstance(fontproperties, six.string_types):
fontproperties = FontProperties(fontproperties)
self.set_text(text)
self.set_color(color)
self.set_usetex(usetex)
self.set_wrap(wrap)
self._verticalalignment = verticalalignment
self._horizontalalignment = horizontalalignment
self._multialignment = multialignment
self._rotation = rotation
self._fontproperties = fontproperties
self._bbox_patch = None # a FancyBboxPatch instance
self._renderer = None
if linespacing is None:
linespacing = 1.2 # Maybe use rcParam later.
self._linespacing = linespacing
self.set_rotation_mode(rotation_mode)
self.update(kwargs)
def update(self, kwargs):
"""
Update properties from a dictionary.
"""
# Update bbox last, as it depends on font properties.
sentinel = object() # bbox can be None, so use another sentinel.
bbox = kwargs.pop("bbox", sentinel)
super(Text, self).update(kwargs)
if bbox is not sentinel:
self.set_bbox(bbox)
def __getstate__(self):
d = super(Text, self).__getstate__()
# remove the cached _renderer (if it exists)
d['_renderer'] = None
return d
def contains(self, mouseevent):
"""Test whether the mouse event occurred in the patch.
In the case of text, a hit is true anywhere in the
axis-aligned bounding-box containing the text.
Returns True or False.
"""
if callable(self._contains):
return self._contains(self, mouseevent)
if not self.get_visible() or self._renderer is None:
return False, {}
l, b, w, h = self.get_window_extent().bounds
r, t = l + w, b + h
x, y = mouseevent.x, mouseevent.y
inside = (l <= x <= r and b <= y <= t)
cattr = {}
# if the text has a surrounding patch, also check containment for it,
# and merge the results with the results for the text.
if self._bbox_patch:
patch_inside, patch_cattr = self._bbox_patch.contains(mouseevent)
inside = inside or patch_inside
cattr["bbox_patch"] = patch_cattr
return inside, cattr
def _get_xy_display(self):
'get the (possibly unit converted) transformed x, y in display coords'
x, y = self.get_unitless_position()
return self.get_transform().transform_point((x, y))
def _get_multialignment(self):
if self._multialignment is not None:
return self._multialignment
else:
return self._horizontalalignment
def get_rotation(self):
'return the text angle as float in degrees'
return get_rotation(self._rotation) # string_or_number -> number
def set_rotation_mode(self, m):
"""
Set text rotation mode.
.. ACCEPTS: [ None | "default" | "anchor" ]
Parameters
----------
m : ``None`` or ``"default"`` or ``"anchor"``
If ``None`` or ``"default"``, the text will be first rotated, then
aligned according to their horizontal and vertical alignments. If
``"anchor"``, then alignment occurs before rotation.
"""
if m is None or m in ["anchor", "default"]:
self._rotation_mode = m
else:
raise ValueError("Unknown rotation_mode : %s" % repr(m))
self.stale = True
def get_rotation_mode(self):
"get text rotation mode"
return self._rotation_mode
def update_from(self, other):
'Copy properties from other to self'
Artist.update_from(self, other)
self._color = other._color
self._multialignment = other._multialignment
self._verticalalignment = other._verticalalignment
self._horizontalalignment = other._horizontalalignment
self._fontproperties = other._fontproperties.copy()
self._rotation = other._rotation
self._picker = other._picker
self._linespacing = other._linespacing
self.stale = True
def _get_layout(self, renderer):
"""
return the extent (bbox) of the text together with
multiple-alignment information. Note that it returns an extent
of a rotated text when necessary.
"""
key = self.get_prop_tup(renderer=renderer)
if key in self._cached:
return self._cached[key]
horizLayout = []
thisx, thisy = 0.0, 0.0
xmin, ymin = 0.0, 0.0
width, height = 0.0, 0.0
lines = self.get_text().split('\n')
whs = np.zeros((len(lines), 2))
horizLayout = np.zeros((len(lines), 4))
# Find full vertical extent of font,
# including ascenders and descenders:
tmp, lp_h, lp_bl = renderer.get_text_width_height_descent('lp',
self._fontproperties,
ismath=False)
offsety = (lp_h - lp_bl) * self._linespacing
baseline = 0
for i, line in enumerate(lines):
clean_line, ismath = self.is_math_text(line, self.get_usetex())
if clean_line:
w, h, d = renderer.get_text_width_height_descent(clean_line,
self._fontproperties,
ismath=ismath)
else:
w, h, d = 0, 0, 0
# For multiline text, increase the line spacing when the
# text net-height(excluding baseline) is larger than that
# of a "l" (e.g., use of superscripts), which seems
# what TeX does.
h = max(h, lp_h)
d = max(d, lp_bl)
whs[i] = w, h
baseline = (h - d) - thisy
thisy -= max(offsety, (h - d) * self._linespacing)
horizLayout[i] = thisx, thisy, w, h
thisy -= d
width = max(width, w)
descent = d
ymin = horizLayout[-1][1]
ymax = horizLayout[0][1] + horizLayout[0][3]
height = ymax - ymin
xmax = xmin + width
# get the rotation matrix
M = Affine2D().rotate_deg(self.get_rotation())
offsetLayout = np.zeros((len(lines), 2))
offsetLayout[:] = horizLayout[:, 0:2]
# now offset the individual text lines within the box
if len(lines) > 1: # do the multiline aligment
malign = self._get_multialignment()
if malign == 'center':
offsetLayout[:, 0] += width / 2.0 - horizLayout[:, 2] / 2.0
elif malign == 'right':
offsetLayout[:, 0] += width - horizLayout[:, 2]
# the corners of the unrotated bounding box
cornersHoriz = np.array(
[(xmin, ymin), (xmin, ymax), (xmax, ymax), (xmax, ymin)], float)
cornersHoriz[:, 1] -= descent
# now rotate the bbox
cornersRotated = M.transform(cornersHoriz)
txs = cornersRotated[:, 0]
tys = cornersRotated[:, 1]
# compute the bounds of the rotated box
xmin, xmax = txs.min(), txs.max()
ymin, ymax = tys.min(), tys.max()
width = xmax - xmin
height = ymax - ymin
# Now move the box to the target position offset the display
# bbox by alignment
halign = self._horizontalalignment
valign = self._verticalalignment
rotation_mode = self.get_rotation_mode()
if rotation_mode != "anchor":
# compute the text location in display coords and the offsets
# necessary to align the bbox with that location
if halign == 'center':
offsetx = (xmin + width / 2.0)
elif halign == 'right':
offsetx = (xmin + width)
else:
offsetx = xmin
if valign == 'center':
offsety = (ymin + height / 2.0)
elif valign == 'top':
offsety = (ymin + height)
elif valign == 'baseline':
offsety = (ymin + height) - baseline
elif valign == 'center_baseline':
offsety = ymin + height - baseline / 2.0
else:
offsety = ymin
else:
xmin1, ymin1 = cornersHoriz[0]
xmax1, ymax1 = cornersHoriz[2]
if halign == 'center':
offsetx = (xmin1 + xmax1) / 2.0
elif halign == 'right':
offsetx = xmax1
else:
offsetx = xmin1
if valign == 'center':
offsety = (ymin1 + ymax1) / 2.0
elif valign == 'top':
offsety = ymax1
elif valign == 'baseline':
offsety = ymax1 - baseline
elif valign == 'center_baseline':
offsety = (ymin1 + ymax1 - baseline) / 2.0
else:
offsety = ymin1
offsetx, offsety = M.transform_point((offsetx, offsety))
xmin -= offsetx
ymin -= offsety
bbox = Bbox.from_bounds(xmin, ymin, width, height)
# now rotate the positions around the first x,y position
xys = M.transform(offsetLayout)
xys -= (offsetx, offsety)
xs, ys = xys[:, 0], xys[:, 1]
ret = bbox, list(zip(lines, whs, xs, ys)), descent
self._cached[key] = ret
return ret
def set_bbox(self, rectprops):
"""
Draw a bounding box around self. rectprops are any settable
properties for a FancyBboxPatch, e.g., facecolor='red', alpha=0.5.
t.set_bbox(dict(facecolor='red', alpha=0.5))
The default boxstyle is 'square'. The mutation
scale of the FancyBboxPatch is set to the fontsize.
ACCEPTS: FancyBboxPatch prop dict
"""
if rectprops is not None:
props = rectprops.copy()
boxstyle = props.pop("boxstyle", None)
pad = props.pop("pad", None)
if boxstyle is None:
boxstyle = "square"
if pad is None:
pad = 4 # points
pad /= self.get_size() # to fraction of font size
else:
if pad is None:
pad = 0.3
# boxstyle could be a callable or a string
if (isinstance(boxstyle, six.string_types)
and "pad" not in boxstyle):
boxstyle += ",pad=%0.2f" % pad
bbox_transmuter = props.pop("bbox_transmuter", None)
self._bbox_patch = FancyBboxPatch(
(0., 0.),
1., 1.,
boxstyle=boxstyle,
bbox_transmuter=bbox_transmuter,
transform=IdentityTransform(),
**props)
else:
self._bbox_patch = None
self._update_clip_properties()
def get_bbox_patch(self):
"""
Return the bbox Patch object. Returns None if the
FancyBboxPatch is not made.
"""
return self._bbox_patch
def update_bbox_position_size(self, renderer):
"""
Update the location and the size of the bbox. This method
should be used when the position and size of the bbox needs to
be updated before actually drawing the bbox.
"""
if self._bbox_patch:
trans = self.get_transform()
# don't use self.get_unitless_position here, which refers to text
# position in Text, and dash position in TextWithDash:
posx = float(self.convert_xunits(self._x))
posy = float(self.convert_yunits(self._y))
posx, posy = trans.transform_point((posx, posy))
x_box, y_box, w_box, h_box = _get_textbox(self, renderer)
self._bbox_patch.set_bounds(0., 0., w_box, h_box)
theta = np.deg2rad(self.get_rotation())
tr = Affine2D().rotate(theta)
tr = tr.translate(posx + x_box, posy + y_box)
self._bbox_patch.set_transform(tr)
fontsize_in_pixel = renderer.points_to_pixels(self.get_size())
self._bbox_patch.set_mutation_scale(fontsize_in_pixel)
def _draw_bbox(self, renderer, posx, posy):
""" Update the location and the size of the bbox
(FancyBboxPatch), and draw
"""
x_box, y_box, w_box, h_box = _get_textbox(self, renderer)
self._bbox_patch.set_bounds(0., 0., w_box, h_box)
theta = np.deg2rad(self.get_rotation())
tr = Affine2D().rotate(theta)
tr = tr.translate(posx + x_box, posy + y_box)
self._bbox_patch.set_transform(tr)
fontsize_in_pixel = renderer.points_to_pixels(self.get_size())
self._bbox_patch.set_mutation_scale(fontsize_in_pixel)
self._bbox_patch.draw(renderer)
def _update_clip_properties(self):
clipprops = dict(clip_box=self.clipbox,
clip_path=self._clippath,
clip_on=self._clipon)
if self._bbox_patch:
bbox = self._bbox_patch.update(clipprops)
def set_clip_box(self, clipbox):
"""
Set the artist's clip :class:`~matplotlib.transforms.Bbox`.
ACCEPTS: a :class:`matplotlib.transforms.Bbox` instance
"""
super(Text, self).set_clip_box(clipbox)
self._update_clip_properties()
def set_clip_path(self, path, transform=None):
"""
Set the artist's clip path, which may be:
* a :class:`~matplotlib.patches.Patch` (or subclass) instance
* a :class:`~matplotlib.path.Path` instance, in which case
an optional :class:`~matplotlib.transforms.Transform`
instance may be provided, which will be applied to the
path before using it for clipping.
* *None*, to remove the clipping path
For efficiency, if the path happens to be an axis-aligned
rectangle, this method will set the clipping box to the
corresponding rectangle and set the clipping path to *None*.
ACCEPTS: [ (:class:`~matplotlib.path.Path`,
:class:`~matplotlib.transforms.Transform`) |
:class:`~matplotlib.patches.Patch` | None ]
"""
super(Text, self).set_clip_path(path, transform)
self._update_clip_properties()
def set_clip_on(self, b):
"""
Set whether artist uses clipping.
When False, artists will be visible outside of the axes, which can lead
to unexpected results.
Parameters
----------
b : bool
.. ACCEPTS: bool
"""
super(Text, self).set_clip_on(b)
self._update_clip_properties()
def get_wrap(self):
"""Returns the wrapping state for the text."""
return self._wrap
def set_wrap(self, wrap):
"""Sets the wrapping state for the text.
Parameters
----------
wrap : bool
.. ACCEPTS: bool
"""
self._wrap = wrap
def _get_wrap_line_width(self):
"""
Returns the maximum line width for wrapping text based on the
current orientation.
"""
x0, y0 = self.get_transform().transform(self.get_position())
figure_box = self.get_figure().get_window_extent()
# Calculate available width based on text alignment
alignment = self.get_horizontalalignment()
self.set_rotation_mode('anchor')
rotation = self.get_rotation()
left = self._get_dist_to_box(rotation, x0, y0, figure_box)
right = self._get_dist_to_box(
(180 + rotation) % 360,
x0,
y0,
figure_box)
if alignment == 'left':
line_width = left
elif alignment == 'right':
line_width = right
else:
line_width = 2 * min(left, right)
return line_width
def _get_dist_to_box(self, rotation, x0, y0, figure_box):
"""
Returns the distance from the given points, to the boundaries
of a rotated box in pixels.
"""
if rotation > 270:
quad = rotation - 270
h1 = y0 / math.cos(math.radians(quad))
h2 = (figure_box.x1 - x0) / math.cos(math.radians(90 - quad))
elif rotation > 180:
quad = rotation - 180
h1 = x0 / math.cos(math.radians(quad))
h2 = y0 / math.cos(math.radians(90 - quad))
elif rotation > 90:
quad = rotation - 90
h1 = (figure_box.y1 - y0) / math.cos(math.radians(quad))
h2 = x0 / math.cos(math.radians(90 - quad))
else:
h1 = (figure_box.x1 - x0) / math.cos(math.radians(rotation))
h2 = (figure_box.y1 - y0) / math.cos(math.radians(90 - rotation))
return min(h1, h2)
def _get_rendered_text_width(self, text):
"""
Returns the width of a given text string, in pixels.
"""
w, h, d = self._renderer.get_text_width_height_descent(
text,
self.get_fontproperties(),
False)
return math.ceil(w)
def _get_wrapped_text(self):
"""
Return a copy of the text with new lines added, so that
the text is wrapped relative to the parent figure.
"""
# Not fit to handle breaking up latex syntax correctly, so
# ignore latex for now.
if self.get_usetex():
return self.get_text()
# Build the line incrementally, for a more accurate measure of length
line_width = self._get_wrap_line_width()
wrapped_str = ""
line = ""
for word in self.get_text().split(' '):
# New lines in the user's test need to force a split, so that it's
# not using the longest current line width in the line being built
sub_words = word.split('\n')
for i in range(len(sub_words)):
current_width = self._get_rendered_text_width(
line + ' ' + sub_words[i])
# Split long lines, and each newline found in the current word
if current_width > line_width or i > 0:
wrapped_str += line + '\n'
line = ""
if line == "":
line = sub_words[i]
else:
line += ' ' + sub_words[i]
return wrapped_str + line
@artist.allow_rasterization
def draw(self, renderer):
"""
Draws the :class:`Text` object to the given *renderer*.
"""
if renderer is not None:
self._renderer = renderer
if not self.get_visible():
return
if self.get_text() == '':
return
renderer.open_group('text', self.get_gid())
with _wrap_text(self) as textobj:
bbox, info, descent = textobj._get_layout(renderer)
trans = textobj.get_transform()
# don't use textobj.get_position here, which refers to text
# position in Text, and dash position in TextWithDash:
posx = float(textobj.convert_xunits(textobj._x))
posy = float(textobj.convert_yunits(textobj._y))
posx, posy = trans.transform_point((posx, posy))
if not np.isfinite(posx) or not np.isfinite(posy):
_log.warning("posx and posy should be finite values")
return
canvasw, canvash = renderer.get_canvas_width_height()
# draw the FancyBboxPatch
if textobj._bbox_patch:
textobj._draw_bbox(renderer, posx, posy)
gc = renderer.new_gc()
gc.set_foreground(textobj.get_color())
gc.set_alpha(textobj.get_alpha())
gc.set_url(textobj._url)
textobj._set_gc_clip(gc)
angle = textobj.get_rotation()
for line, wh, x, y in info:
mtext = textobj if len(info) == 1 else None
x = x + posx
y = y + posy
if renderer.flipy():
y = canvash - y
clean_line, ismath = textobj.is_math_text(line,
self.get_usetex())
if textobj.get_path_effects():
from matplotlib.patheffects import PathEffectRenderer
textrenderer = PathEffectRenderer(
textobj.get_path_effects(), renderer)
else:
textrenderer = renderer
if textobj.get_usetex():
textrenderer.draw_tex(gc, x, y, clean_line,
textobj._fontproperties, angle,
mtext=mtext)
else:
textrenderer.draw_text(gc, x, y, clean_line,
textobj._fontproperties, angle,
ismath=ismath, mtext=mtext)
gc.restore()
renderer.close_group('text')
self.stale = False
def get_color(self):
"Return the color of the text"
return self._color
def get_fontproperties(self):
"Return the :class:`~font_manager.FontProperties` object"
return self._fontproperties
def get_font_properties(self):
'alias for get_fontproperties'
return self.get_fontproperties()
def get_family(self):
"Return the list of font families used for font lookup"
return self._fontproperties.get_family()
def get_fontfamily(self):
'alias for get_family'
return self.get_family()
def get_name(self):
"Return the font name as string"
return self._fontproperties.get_name()
def get_style(self):
"Return the font style as string"
return self._fontproperties.get_style()
def get_size(self):
"Return the font size as integer"
return self._fontproperties.get_size_in_points()
def get_variant(self):
"Return the font variant as a string"
return self._fontproperties.get_variant()
def get_fontvariant(self):
'alias for get_variant'
return self.get_variant()
def get_weight(self):
"Get the font weight as string or number"
return self._fontproperties.get_weight()
def get_fontname(self):
'alias for get_name'
return self.get_name()
def get_fontstyle(self):
'alias for get_style'
return self.get_style()
def get_fontsize(self):
'alias for get_size'
return self.get_size()
def get_fontweight(self):
'alias for get_weight'
return self.get_weight()
def get_stretch(self):
'Get the font stretch as a string or number'
return self._fontproperties.get_stretch()
def get_fontstretch(self):
'alias for get_stretch'
return self.get_stretch()
def get_ha(self):
'alias for get_horizontalalignment'
return self.get_horizontalalignment()
def get_horizontalalignment(self):
"""
Return the horizontal alignment as string. Will be one of
'left', 'center' or 'right'.
"""
return self._horizontalalignment
def get_unitless_position(self):
"Return the unitless position of the text as a tuple (*x*, *y*)"
# This will get the position with all unit information stripped away.
# This is here for convenience since it is done in several locations.
x = float(self.convert_xunits(self._x))
y = float(self.convert_yunits(self._y))
return x, y
def get_position(self):
"Return the position of the text as a tuple (*x*, *y*)"
# This should return the same data (possible unitized) as was
# specified with 'set_x' and 'set_y'.
return self._x, self._y
def get_prop_tup(self, renderer=None):
"""
Return a hashable tuple of properties.
Not intended to be human readable, but useful for backends who
want to cache derived information about text (e.g., layouts) and
need to know if the text has changed.
"""
x, y = self.get_unitless_position()
renderer = renderer or self._renderer
return (x, y, self.get_text(), self._color,
self._verticalalignment, self._horizontalalignment,
hash(self._fontproperties),
self._rotation, self._rotation_mode,
self.figure.dpi, weakref.ref(renderer),
self._linespacing
)
def get_text(self):
"Get the text as string"
return self._text
def get_va(self):
'alias for :meth:`getverticalalignment`'
return self.get_verticalalignment()
def get_verticalalignment(self):
"""
Return the vertical alignment as string. Will be one of
'top', 'center', 'bottom' or 'baseline'.
"""
return self._verticalalignment
def get_window_extent(self, renderer=None, dpi=None):
'''
Return a :class:`~matplotlib.transforms.Bbox` object bounding
the text, in display units.
In addition to being used internally, this is useful for
specifying clickable regions in a png file on a web page.
*renderer* defaults to the _renderer attribute of the text
object. This is not assigned until the first execution of
:meth:`draw`, so you must use this kwarg if you want
to call :meth:`get_window_extent` prior to the first
:meth:`draw`. For getting web page regions, it is
simpler to call the method after saving the figure.
*dpi* defaults to self.figure.dpi; the renderer dpi is
irrelevant. For the web application, if figure.dpi is not
the value used when saving the figure, then the value that
was used must be specified as the *dpi* argument.
'''
#return _unit_box
if not self.get_visible():
return Bbox.unit()
if dpi is not None:
dpi_orig = self.figure.dpi
self.figure.dpi = dpi
if self.get_text() == '':
tx, ty = self._get_xy_display()
return Bbox.from_bounds(tx, ty, 0, 0)
if renderer is not None:
self._renderer = renderer
if self._renderer is None:
raise RuntimeError('Cannot get window extent w/o renderer')
bbox, info, descent = self._get_layout(self._renderer)
x, y = self.get_unitless_position()
x, y = self.get_transform().transform_point((x, y))
bbox = bbox.translated(x, y)
if dpi is not None:
self.figure.dpi = dpi_orig
return bbox
def set_backgroundcolor(self, color):
"""
Set the background color of the text by updating the bbox.
.. seealso::
:meth:`set_bbox`
To change the position of the bounding box.
ACCEPTS: any matplotlib color
"""
if self._bbox_patch is None:
self.set_bbox(dict(facecolor=color, edgecolor=color))
else:
self._bbox_patch.update(dict(facecolor=color))
self._update_clip_properties()
self.stale = True
def set_color(self, color):
"""
Set the foreground color of the text
ACCEPTS: any matplotlib color
"""
# Make sure it is hashable, or get_prop_tup will fail.
try:
hash(color)
except TypeError:
color = tuple(color)
self._color = color
self.stale = True
def set_ha(self, align):
'alias for set_horizontalalignment'
self.set_horizontalalignment(align)
def set_horizontalalignment(self, align):
"""
Set the horizontal alignment to one of
ACCEPTS: [ 'center' | 'right' | 'left' ]
"""
legal = ('center', 'right', 'left')
if align not in legal:
raise ValueError('Horizontal alignment must be one of %s' %
str(legal))
self._horizontalalignment = align
self.stale = True
def set_ma(self, align):
'alias for set_multialignment'
self.set_multialignment(align)
def set_multialignment(self, align):
"""
Set the alignment for multiple lines layout. The layout of the
bounding box of all the lines is determined bu the horizontalalignment
and verticalalignment properties, but the multiline text within that
box can be
ACCEPTS: ['left' | 'right' | 'center' ]
"""
legal = ('center', 'right', 'left')
if align not in legal:
raise ValueError('Horizontal alignment must be one of %s' %
str(legal))
self._multialignment = align
self.stale = True
def set_linespacing(self, spacing):
"""
Set the line spacing as a multiple of the font size.
Default is 1.2.
ACCEPTS: float (multiple of font size)
"""
self._linespacing = spacing
self.stale = True
def set_family(self, fontname):
"""
Set the font family. May be either a single string, or a list
of strings in decreasing priority. Each string may be either
a real font name or a generic font class name. If the latter,
the specific font names will be looked up in the
:file:`matplotlibrc` file.
ACCEPTS: [FONTNAME | 'serif' | 'sans-serif' | 'cursive' | 'fantasy' |
'monospace' ]
"""
self._fontproperties.set_family(fontname)
self.stale = True
def set_variant(self, variant):
"""
Set the font variant, either 'normal' or 'small-caps'.
ACCEPTS: [ 'normal' | 'small-caps' ]
"""
self._fontproperties.set_variant(variant)
self.stale = True
def set_fontvariant(self, variant):
'alias for set_variant'
return self.set_variant(variant)
def set_name(self, fontname):
"""alias for set_family"""
return self.set_family(fontname)
def set_fontname(self, fontname):
"""alias for set_family"""
self.set_family(fontname)
def set_style(self, fontstyle):
"""
Set the font style.
ACCEPTS: [ 'normal' | 'italic' | 'oblique']
"""
self._fontproperties.set_style(fontstyle)
self.stale = True
def set_fontstyle(self, fontstyle):
'alias for set_style'
return self.set_style(fontstyle)
def set_size(self, fontsize):
"""
Set the font size. May be either a size string, relative to
the default font size, or an absolute font size in points.
ACCEPTS: [size in points | 'xx-small' | 'x-small' | 'small' |
'medium' | 'large' | 'x-large' | 'xx-large' ]
"""
self._fontproperties.set_size(fontsize)
self.stale = True
def set_fontsize(self, fontsize):
'alias for set_size'
return self.set_size(fontsize)
def set_weight(self, weight):
"""
Set the font weight.
ACCEPTS: [a numeric value in range 0-1000 | 'ultralight' | 'light' |
'normal' | 'regular' | 'book' | 'medium' | 'roman' |
'semibold' | 'demibold' | 'demi' | 'bold' | 'heavy' |
'extra bold' | 'black' ]
"""
self._fontproperties.set_weight(weight)
self.stale = True
def set_fontweight(self, weight):
'alias for set_weight'
return self.set_weight(weight)
def set_stretch(self, stretch):
"""
Set the font stretch (horizontal condensation or expansion).
ACCEPTS: [a numeric value in range 0-1000 | 'ultra-condensed' |
'extra-condensed' | 'condensed' | 'semi-condensed' |
'normal' | 'semi-expanded' | 'expanded' | 'extra-expanded' |
'ultra-expanded' ]
"""
self._fontproperties.set_stretch(stretch)
self.stale = True
def set_fontstretch(self, stretch):
'alias for set_stretch'
return self.set_stretch(stretch)
def set_position(self, xy):
"""
Set the (*x*, *y*) position of the text
ACCEPTS: (x,y)
"""
self.set_x(xy[0])
self.set_y(xy[1])
def set_x(self, x):
"""
Set the *x* position of the text
ACCEPTS: float
"""
self._x = x
self.stale = True
def set_y(self, y):
"""
Set the *y* position of the text
ACCEPTS: float
"""
self._y = y
self.stale = True
def set_rotation(self, s):
"""
Set the rotation of the text
ACCEPTS: [ angle in degrees | 'vertical' | 'horizontal' ]
"""
self._rotation = s
self.stale = True
def set_va(self, align):
'alias for set_verticalalignment'
self.set_verticalalignment(align)
def set_verticalalignment(self, align):
"""
Set the vertical alignment
ACCEPTS: [ 'center' | 'top' | 'bottom' | 'baseline' ]
"""
legal = ('top', 'bottom', 'center', 'baseline')
if align not in legal:
raise ValueError('Vertical alignment must be one of %s' %
str(legal))
self._verticalalignment = align
self.stale = True
def set_text(self, s):
"""
Set the text string *s*
It may contain newlines (``\\n``) or math in LaTeX syntax.
ACCEPTS: string or anything printable with '%s' conversion.
"""
self._text = '%s' % (s,)
self.stale = True
@staticmethod
def is_math_text(s, usetex=None):
"""
Returns a cleaned string and a boolean flag.
The flag indicates if the given string *s* contains any mathtext,
determined by counting unescaped dollar signs. If no mathtext
is present, the cleaned string has its dollar signs unescaped.
If usetex is on, the flag always has the value "TeX".
"""
# Did we find an even number of non-escaped dollar signs?
# If so, treat is as math text.
if usetex is None:
usetex = rcParams['text.usetex']
if usetex:
if s == ' ':
s = r'\ '
return s, 'TeX'
if cbook.is_math_text(s):
return s, True
else:
return s.replace(r'\$', '$'), False
def set_fontproperties(self, fp):
"""
Set the font properties that control the text. *fp* must be a
:class:`matplotlib.font_manager.FontProperties` object.
ACCEPTS: a :class:`matplotlib.font_manager.FontProperties` instance
"""
if isinstance(fp, six.string_types):
fp = FontProperties(fp)
self._fontproperties = fp.copy()
self.stale = True
def set_font_properties(self, fp):
'alias for set_fontproperties'
self.set_fontproperties(fp)
def set_usetex(self, usetex):
"""
Parameters
----------
usetex : bool or None
Whether to render using TeX, ``None`` means to use
:rc:`text.usetex`.
.. ACCEPTS: bool or None
"""
if usetex is None:
self._usetex = rcParams['text.usetex']
else:
self._usetex = bool(usetex)
self.stale = True
def get_usetex(self):
"""
Return whether this `Text` object uses TeX for rendering.
If the user has not manually set this value, it defaults to
:rc:`text.usetex`.
"""
if self._usetex is None:
return rcParams['text.usetex']
else:
return self._usetex
docstring.interpd.update(Text=artist.kwdoc(Text))
docstring.dedent_interpd(Text.__init__)
class TextWithDash(Text):
"""
This is basically a :class:`~matplotlib.text.Text` with a dash
(drawn with a :class:`~matplotlib.lines.Line2D`) before/after
it. It is intended to be a drop-in replacement for
:class:`~matplotlib.text.Text`, and should behave identically to
it when *dashlength* = 0.0.
The dash always comes between the point specified by
:meth:`~matplotlib.text.Text.set_position` and the text. When a
dash exists, the text alignment arguments (*horizontalalignment*,
*verticalalignment*) are ignored.
*dashlength* is the length of the dash in canvas units.
(default = 0.0).
*dashdirection* is one of 0 or 1, where 0 draws the dash after the
text and 1 before. (default = 0).
*dashrotation* specifies the rotation of the dash, and should
generally stay *None*. In this case
:meth:`~matplotlib.text.TextWithDash.get_dashrotation` returns
:meth:`~matplotlib.text.Text.get_rotation`. (i.e., the dash takes
its rotation from the text's rotation). Because the text center is
projected onto the dash, major deviations in the rotation cause
what may be considered visually unappealing results.
(default = *None*)
*dashpad* is a padding length to add (or subtract) space
between the text and the dash, in canvas units.
(default = 3)
*dashpush* "pushes" the dash and text away from the point
specified by :meth:`~matplotlib.text.Text.set_position` by the
amount in canvas units. (default = 0)
.. note::
The alignment of the two objects is based on the bounding box
of the :class:`~matplotlib.text.Text`, as obtained by
:meth:`~matplotlib.artist.Artist.get_window_extent`. This, in
turn, appears to depend on the font metrics as given by the
rendering backend. Hence the quality of the "centering" of the
label text with respect to the dash varies depending on the
backend used.
.. note::
I'm not sure that I got the
:meth:`~matplotlib.text.TextWithDash.get_window_extent` right,
or whether that's sufficient for providing the object bounding
box.
"""
__name__ = 'textwithdash'
def __str__(self):
return "TextWithDash(%g,%g,%s)" % (self._x, self._y, repr(self._text))
def __init__(self,
x=0, y=0, text='',
color=None, # defaults to rc params
verticalalignment='center',
horizontalalignment='center',
multialignment=None,
fontproperties=None, # defaults to FontProperties()
rotation=None,
linespacing=None,
dashlength=0.0,
dashdirection=0,
dashrotation=None,
dashpad=3,
dashpush=0,
):
Text.__init__(self, x=x, y=y, text=text, color=color,
verticalalignment=verticalalignment,
horizontalalignment=horizontalalignment,
multialignment=multialignment,
fontproperties=fontproperties,
rotation=rotation,
linespacing=linespacing)
# The position (x,y) values for text and dashline
# are bogus as given in the instantiation; they will
# be set correctly by update_coords() in draw()
self.dashline = Line2D(xdata=(x, x),
ydata=(y, y),
color='k',
linestyle='-')
self._dashx = float(x)
self._dashy = float(y)
self._dashlength = dashlength
self._dashdirection = dashdirection
self._dashrotation = dashrotation
self._dashpad = dashpad
self._dashpush = dashpush
#self.set_bbox(dict(pad=0))
def get_unitless_position(self):
"Return the unitless position of the text as a tuple (*x*, *y*)"
# This will get the position with all unit information stripped away.
# This is here for convenience since it is done in several locations.
x = float(self.convert_xunits(self._dashx))
y = float(self.convert_yunits(self._dashy))
return x, y
def get_position(self):
"Return the position of the text as a tuple (*x*, *y*)"
# This should return the same data (possibly unitized) as was
# specified with set_x and set_y
return self._dashx, self._dashy
def get_prop_tup(self, renderer=None):
"""
Return a hashable tuple of properties.
Not intended to be human readable, but useful for backends who
want to cache derived information about text (e.g., layouts) and
need to know if the text has changed.
"""
props = [p for p in Text.get_prop_tup(self, renderer=renderer)]
props.extend([self._x, self._y, self._dashlength,
self._dashdirection, self._dashrotation, self._dashpad,
self._dashpush])
return tuple(props)
def draw(self, renderer):
"""
Draw the :class:`TextWithDash` object to the given *renderer*.
"""
self.update_coords(renderer)
Text.draw(self, renderer)
if self.get_dashlength() > 0.0:
self.dashline.draw(renderer)
self.stale = False
def update_coords(self, renderer):
"""
Computes the actual *x*, *y* coordinates for text based on the
input *x*, *y* and the *dashlength*. Since the rotation is
with respect to the actual canvas's coordinates we need to map
back and forth.
"""
dashx, dashy = self.get_unitless_position()
dashlength = self.get_dashlength()
# Shortcircuit this process if we don't have a dash
if dashlength == 0.0:
self._x, self._y = dashx, dashy
return
dashrotation = self.get_dashrotation()
dashdirection = self.get_dashdirection()
dashpad = self.get_dashpad()
dashpush = self.get_dashpush()
angle = get_rotation(dashrotation)
theta = np.pi * (angle / 180.0 + dashdirection - 1)
cos_theta, sin_theta = np.cos(theta), np.sin(theta)
transform = self.get_transform()
# Compute the dash end points
# The 'c' prefix is for canvas coordinates
cxy = transform.transform_point((dashx, dashy))
cd = np.array([cos_theta, sin_theta])
c1 = cxy + dashpush * cd
c2 = cxy + (dashpush + dashlength) * cd
inverse = transform.inverted()
(x1, y1) = inverse.transform_point(tuple(c1))
(x2, y2) = inverse.transform_point(tuple(c2))
self.dashline.set_data((x1, x2), (y1, y2))
# We now need to extend this vector out to
# the center of the text area.
# The basic problem here is that we're "rotating"
# two separate objects but want it to appear as
# if they're rotated together.
# This is made non-trivial because of the
# interaction between text rotation and alignment -
# text alignment is based on the bbox after rotation.
# We reset/force both alignments to 'center'
# so we can do something relatively reasonable.
# There's probably a better way to do this by
# embedding all this in the object's transformations,
# but I don't grok the transformation stuff
# well enough yet.
we = Text.get_window_extent(self, renderer=renderer)
w, h = we.width, we.height
# Watch for zeros
if sin_theta == 0.0:
dx = w
dy = 0.0
elif cos_theta == 0.0:
dx = 0.0
dy = h
else:
tan_theta = sin_theta / cos_theta
dx = w
dy = w * tan_theta
if dy > h or dy < -h:
dy = h
dx = h / tan_theta
cwd = np.array([dx, dy]) / 2
cwd *= 1 + dashpad / np.sqrt(np.dot(cwd, cwd))
cw = c2 + (dashdirection * 2 - 1) * cwd
newx, newy = inverse.transform_point(tuple(cw))
self._x, self._y = newx, newy
# Now set the window extent
# I'm not at all sure this is the right way to do this.
we = Text.get_window_extent(self, renderer=renderer)
self._twd_window_extent = we.frozen()
self._twd_window_extent.update_from_data_xy(np.array([c1]), False)
# Finally, make text align center
Text.set_horizontalalignment(self, 'center')
Text.set_verticalalignment(self, 'center')
def get_window_extent(self, renderer=None):
'''
Return a :class:`~matplotlib.transforms.Bbox` object bounding
the text, in display units.
In addition to being used internally, this is useful for
specifying clickable regions in a png file on a web page.
*renderer* defaults to the _renderer attribute of the text
object. This is not assigned until the first execution of
:meth:`draw`, so you must use this kwarg if you want
to call :meth:`get_window_extent` prior to the first
:meth:`draw`. For getting web page regions, it is
simpler to call the method after saving the figure.
'''
self.update_coords(renderer)
if self.get_dashlength() == 0.0:
return Text.get_window_extent(self, renderer=renderer)
else:
return self._twd_window_extent
def get_dashlength(self):
"""
Get the length of the dash.
"""
return self._dashlength
def set_dashlength(self, dl):
"""
Set the length of the dash.
ACCEPTS: float (canvas units)
"""
self._dashlength = dl
self.stale = True
def get_dashdirection(self):
"""
Get the direction dash. 1 is before the text and 0 is after.
"""
return self._dashdirection
def set_dashdirection(self, dd):
"""
Set the direction of the dash following the text.
1 is before the text and 0 is after. The default
is 0, which is what you'd want for the typical
case of ticks below and on the left of the figure.
ACCEPTS: int (1 is before, 0 is after)
"""
self._dashdirection = dd
self.stale = True
def get_dashrotation(self):
"""
Get the rotation of the dash in degrees.
"""
if self._dashrotation is None:
return self.get_rotation()
else:
return self._dashrotation
def set_dashrotation(self, dr):
"""
Set the rotation of the dash, in degrees
ACCEPTS: float (degrees)
"""
self._dashrotation = dr
self.stale = True
def get_dashpad(self):
"""
Get the extra spacing between the dash and the text, in canvas units.
"""
return self._dashpad
def set_dashpad(self, dp):
"""
Set the "pad" of the TextWithDash, which is the extra spacing
between the dash and the text, in canvas units.
ACCEPTS: float (canvas units)
"""
self._dashpad = dp
self.stale = True
def get_dashpush(self):
"""
Get the extra spacing between the dash and the specified text
position, in canvas units.
"""
return self._dashpush
def set_dashpush(self, dp):
"""
Set the "push" of the TextWithDash, which
is the extra spacing between the beginning
of the dash and the specified position.
ACCEPTS: float (canvas units)
"""
self._dashpush = dp
self.stale = True
def set_position(self, xy):
"""
Set the (*x*, *y*) position of the :class:`TextWithDash`.
ACCEPTS: (x, y)
"""
self.set_x(xy[0])
self.set_y(xy[1])
def set_x(self, x):
"""
Set the *x* position of the :class:`TextWithDash`.
ACCEPTS: float
"""
self._dashx = float(x)
self.stale = True
def set_y(self, y):
"""
Set the *y* position of the :class:`TextWithDash`.
ACCEPTS: float
"""
self._dashy = float(y)
self.stale = True
def set_transform(self, t):
"""
Set the :class:`matplotlib.transforms.Transform` instance used
by this artist.
ACCEPTS: a :class:`matplotlib.transforms.Transform` instance
"""
Text.set_transform(self, t)
self.dashline.set_transform(t)
self.stale = True
def get_figure(self):
'return the figure instance the artist belongs to'
return self.figure
def set_figure(self, fig):
"""
Set the figure instance the artist belong to.
ACCEPTS: a :class:`matplotlib.figure.Figure` instance
"""
Text.set_figure(self, fig)
self.dashline.set_figure(fig)
docstring.interpd.update(TextWithDash=artist.kwdoc(TextWithDash))
class OffsetFrom(object):
'Callable helper class for working with `Annotation`'
def __init__(self, artist, ref_coord, unit="points"):
'''
Parameters
----------
artist : `Artist`, `BboxBase`, or `Transform`
The object to compute the offset from.
ref_coord : length 2 sequence
If `artist` is an `Artist` or `BboxBase`, this values is
the location to of the offset origin in fractions of the
`artist` bounding box.
If `artist` is a transform, the offset origin is the
transform applied to this value.
unit : {'points, 'pixels'}
The screen units to use (pixels or points) for the offset
input.
'''
self._artist = artist
self._ref_coord = ref_coord
self.set_unit(unit)
def set_unit(self, unit):
'''
The unit for input to the transform used by ``__call__``
Parameters
----------
unit : {'points', 'pixels'}
'''
if unit not in ["points", "pixels"]:
raise ValueError("'unit' must be one of [ 'points' | 'pixels' ]")
self._unit = unit
def get_unit(self):
'The unit for input to the transform used by ``__call__``'
return self._unit
def _get_scale(self, renderer):
unit = self.get_unit()
if unit == "pixels":
return 1.
else:
return renderer.points_to_pixels(1.)
def __call__(self, renderer):
'''
Return the offset transform.
Parameters
----------
renderer : `RendererBase`
The renderer to use to compute the offset
Returns
-------
transform : `Transform`
Maps (x, y) in pixel or point units to screen units
relative to the given artist.
'''
if isinstance(self._artist, Artist):
bbox = self._artist.get_window_extent(renderer)
l, b, w, h = bbox.bounds
xf, yf = self._ref_coord
x, y = l + w * xf, b + h * yf
elif isinstance(self._artist, BboxBase):
l, b, w, h = self._artist.bounds
xf, yf = self._ref_coord
x, y = l + w * xf, b + h * yf
elif isinstance(self._artist, Transform):
x, y = self._artist.transform_point(self._ref_coord)
else:
raise RuntimeError("unknown type")
sc = self._get_scale(renderer)
tr = Affine2D().scale(sc, sc).translate(x, y)
return tr
class _AnnotationBase(object):
def __init__(self,
xy,
xycoords='data',
annotation_clip=None):
self.xy = xy
self.xycoords = xycoords
self.set_annotation_clip(annotation_clip)
self._draggable = None
def _get_xy(self, renderer, x, y, s):
if isinstance(s, tuple):
s1, s2 = s
else:
s1, s2 = s, s
if s1 == 'data':
x = float(self.convert_xunits(x))
if s2 == 'data':
y = float(self.convert_yunits(y))
tr = self._get_xy_transform(renderer, s)
x1, y1 = tr.transform_point((x, y))
return x1, y1
def _get_xy_transform(self, renderer, s):
if isinstance(s, tuple):
s1, s2 = s
from matplotlib.transforms import blended_transform_factory
tr1 = self._get_xy_transform(renderer, s1)
tr2 = self._get_xy_transform(renderer, s2)
tr = blended_transform_factory(tr1, tr2)
return tr
elif callable(s):
tr = s(renderer)
if isinstance(tr, BboxBase):
return BboxTransformTo(tr)
elif isinstance(tr, Transform):
return tr
else:
raise RuntimeError("unknown return type ...")
elif isinstance(s, Artist):
bbox = s.get_window_extent(renderer)
return BboxTransformTo(bbox)
elif isinstance(s, BboxBase):
return BboxTransformTo(s)
elif isinstance(s, Transform):
return s
elif not isinstance(s, six.string_types):
raise RuntimeError("unknown coordinate type : %s" % (s,))
if s == 'data':
return self.axes.transData
elif s == 'polar':
from matplotlib.projections import PolarAxes
tr = PolarAxes.PolarTransform()
trans = tr + self.axes.transData
return trans
s_ = s.split()
if len(s_) != 2:
raise ValueError("%s is not a recognized coordinate" % s)
bbox0, xy0 = None, None
bbox_name, unit = s_
# if unit is offset-like
if bbox_name == "figure":
bbox0 = self.figure.bbox
elif bbox_name == "axes":
bbox0 = self.axes.bbox
# elif bbox_name == "bbox":
# if bbox is None:
# raise RuntimeError("bbox is specified as a coordinate but "
# "never set")
# bbox0 = self._get_bbox(renderer, bbox)
if bbox0 is not None:
xy0 = bbox0.bounds[:2]
elif bbox_name == "offset":
xy0 = self._get_ref_xy(renderer)
if xy0 is not None:
# reference x, y in display coordinate
ref_x, ref_y = xy0
from matplotlib.transforms import Affine2D
if unit == "points":
# dots per points
dpp = self.figure.get_dpi() / 72.
tr = Affine2D().scale(dpp, dpp)
elif unit == "pixels":
tr = Affine2D()
elif unit == "fontsize":
fontsize = self.get_size()
dpp = fontsize * self.figure.get_dpi() / 72.
tr = Affine2D().scale(dpp, dpp)
elif unit == "fraction":
w, h = bbox0.bounds[2:]
tr = Affine2D().scale(w, h)
else:
raise ValueError("%s is not a recognized coordinate" % s)
return tr.translate(ref_x, ref_y)
else:
raise ValueError("%s is not a recognized coordinate" % s)
def _get_ref_xy(self, renderer):
"""
return x, y (in display coordinate) that is to be used for a reference
of any offset coordinate
"""
if isinstance(self.xycoords, tuple):
s1, s2 = self.xycoords
if ((isinstance(s1, six.string_types)
and s1.split()[0] == "offset")
or (isinstance(s2, six.string_types)
and s2.split()[0] == "offset")):
raise ValueError("xycoords should not be an offset coordinate")
x, y = self.xy
x1, y1 = self._get_xy(renderer, x, y, s1)
x2, y2 = self._get_xy(renderer, x, y, s2)
return x1, y2
elif (isinstance(self.xycoords, six.string_types) and
self.xycoords.split()[0] == "offset"):
raise ValueError("xycoords should not be an offset coordinate")
else:
x, y = self.xy
return self._get_xy(renderer, x, y, self.xycoords)
#raise RuntimeError("must be defined by the derived class")
# def _get_bbox(self, renderer):
# if hasattr(bbox, "bounds"):
# return bbox
# elif hasattr(bbox, "get_window_extent"):
# bbox = bbox.get_window_extent()
# return bbox
# else:
# raise ValueError("A bbox instance is expected but got %s" %
# str(bbox))
def set_annotation_clip(self, b):
"""
set *annotation_clip* attribute.
* True: the annotation will only be drawn when self.xy is inside
the axes.
* False: the annotation will always be drawn regardless of its
position.
* None: the self.xy will be checked only if *xycoords* is "data"
"""
self._annotation_clip = b
def get_annotation_clip(self):
"""
Return *annotation_clip* attribute.
See :meth:`set_annotation_clip` for the meaning of return values.
"""
return self._annotation_clip
def _get_position_xy(self, renderer):
"Return the pixel position of the annotated point."
x, y = self.xy
return self._get_xy(renderer, x, y, self.xycoords)
def _check_xy(self, renderer, xy_pixel):
"""
given the xy pixel coordinate, check if the annotation need to
be drawn.
"""
b = self.get_annotation_clip()
if b or (b is None and self.xycoords == "data"):
# check if self.xy is inside the axes.
if not self.axes.contains_point(xy_pixel):
return False
return True
def draggable(self, state=None, use_blit=False):
"""
Set the draggable state -- if state is
* None : toggle the current state
* True : turn draggable on
* False : turn draggable off
If draggable is on, you can drag the annotation on the canvas with
the mouse. The DraggableAnnotation helper instance is returned if
draggable is on.
"""
from matplotlib.offsetbox import DraggableAnnotation
is_draggable = self._draggable is not None
# if state is None we'll toggle
if state is None:
state = not is_draggable
if state:
if self._draggable is None:
self._draggable = DraggableAnnotation(self, use_blit)
else:
if self._draggable is not None:
self._draggable.disconnect()
self._draggable = None
return self._draggable
class Annotation(Text, _AnnotationBase):
def __str__(self):
return "Annotation(%g,%g,%s)" % (self.xy[0],
self.xy[1],
repr(self._text))
@docstring.dedent_interpd
def __init__(self, s, xy,
xytext=None,
xycoords='data',
textcoords=None,
arrowprops=None,
annotation_clip=None,
**kwargs):
'''
Annotate the point ``xy`` with text ``s``.
Additional kwargs are passed to `~matplotlib.text.Text`.
Parameters
----------
s : str
The text of the annotation
xy : iterable
Length 2 sequence specifying the *(x,y)* point to annotate
xytext : iterable, optional
Length 2 sequence specifying the *(x,y)* to place the text
at. If None, defaults to ``xy``.
xycoords : str, Artist, Transform, callable or tuple, optional
The coordinate system that ``xy`` is given in.
For a `str` the allowed values are:
================= ===============================================
Property Description
================= ===============================================
'figure points' points from the lower left of the figure
'figure pixels' pixels from the lower left of the figure
'figure fraction' fraction of figure from lower left
'axes points' points from lower left corner of axes
'axes pixels' pixels from lower left corner of axes
'axes fraction' fraction of axes from lower left
'data' use the coordinate system of the object being
annotated (default)
'polar' *(theta,r)* if not native 'data' coordinates
================= ===============================================
If a `~matplotlib.artist.Artist` object is passed in the units are
fraction if it's bounding box.
If a `~matplotlib.transforms.Transform` object is passed
in use that to transform ``xy`` to screen coordinates
If a callable it must take a
`~matplotlib.backend_bases.RendererBase` object as input
and return a `~matplotlib.transforms.Transform` or
`~matplotlib.transforms.Bbox` object
If a `tuple` must be length 2 tuple of str, `Artist`,
`Transform` or callable objects. The first transform is
used for the *x* coordinate and the second for *y*.
See :ref:`plotting-guide-annotation` for more details.
Defaults to ``'data'``
textcoords : str, `Artist`, `Transform`, callable or tuple, optional
The coordinate system that ``xytext`` is given, which
may be different than the coordinate system used for
``xy``.
All ``xycoords`` values are valid as well as the following
strings:
================= =========================================
Property Description
================= =========================================
'offset points' offset (in points) from the *xy* value
'offset pixels' offset (in pixels) from the *xy* value
================= =========================================
defaults to the input of ``xycoords``
arrowprops : dict, optional
If not None, properties used to draw a
`~matplotlib.patches.FancyArrowPatch` arrow between ``xy`` and
``xytext``.
If `arrowprops` does not contain the key ``'arrowstyle'`` the
allowed keys are:
========== ======================================================
Key Description
========== ======================================================
width the width of the arrow in points
headwidth the width of the base of the arrow head in points
headlength the length of the arrow head in points
shrink fraction of total length to 'shrink' from both ends
? any key to :class:`matplotlib.patches.FancyArrowPatch`
========== ======================================================
If the `arrowprops` contains the key ``'arrowstyle'`` the
above keys are forbidden. The allowed values of
``'arrowstyle'`` are:
============ =============================================
Name Attrs
============ =============================================
``'-'`` None
``'->'`` head_length=0.4,head_width=0.2
``'-['`` widthB=1.0,lengthB=0.2,angleB=None
``'|-|'`` widthA=1.0,widthB=1.0
``'-|>'`` head_length=0.4,head_width=0.2
``'<-'`` head_length=0.4,head_width=0.2
``'<->'`` head_length=0.4,head_width=0.2
``'<|-'`` head_length=0.4,head_width=0.2
``'<|-|>'`` head_length=0.4,head_width=0.2
``'fancy'`` head_length=0.4,head_width=0.4,tail_width=0.4
``'simple'`` head_length=0.5,head_width=0.5,tail_width=0.2
``'wedge'`` tail_width=0.3,shrink_factor=0.5
============ =============================================
Valid keys for `~matplotlib.patches.FancyArrowPatch` are:
=============== ==================================================
Key Description
=============== ==================================================
arrowstyle the arrow style
connectionstyle the connection style
relpos default is (0.5, 0.5)
patchA default is bounding box of the text
patchB default is None
shrinkA default is 2 points
shrinkB default is 2 points
mutation_scale default is text size (in points)
mutation_aspect default is 1.
? any key for :class:`matplotlib.patches.PathPatch`
=============== ==================================================
Defaults to None
annotation_clip : bool, optional
Controls the visibility of the annotation when it goes
outside the axes area.
If `True`, the annotation will only be drawn when the
``xy`` is inside the axes. If `False`, the annotation will
always be drawn regardless of its position.
The default is `None`, which behave as `True` only if
*xycoords* is "data".
Returns
-------
Annotation
'''
_AnnotationBase.__init__(self,
xy,
xycoords=xycoords,
annotation_clip=annotation_clip)
# warn about wonky input data
if (xytext is None and
textcoords is not None and
textcoords != xycoords):
warnings.warn("You have used the `textcoords` kwarg, but not "
"the `xytext` kwarg. This can lead to surprising "
"results.")
# clean up textcoords and assign default
if textcoords is None:
textcoords = self.xycoords
self._textcoords = textcoords
# cleanup xytext defaults
if xytext is None:
xytext = self.xy
x, y = xytext
Text.__init__(self, x, y, s, **kwargs)
self.arrowprops = arrowprops
self.arrow = None
if arrowprops is not None:
if "arrowstyle" in arrowprops:
arrowprops = self.arrowprops.copy()
self._arrow_relpos = arrowprops.pop("relpos", (0.5, 0.5))
else:
# modified YAArrow API to be used with FancyArrowPatch
shapekeys = ('width', 'headwidth', 'headlength',
'shrink', 'frac')
arrowprops = dict()
for key, val in self.arrowprops.items():
if key not in shapekeys:
arrowprops[key] = val # basic Patch properties
self.arrow_patch = FancyArrowPatch((0, 0), (1, 1),
**arrowprops)
else:
self.arrow_patch = None
def contains(self, event):
contains, tinfo = Text.contains(self, event)
if self.arrow is not None:
in_arrow, _ = self.arrow.contains(event)
contains = contains or in_arrow
if self.arrow_patch is not None:
in_patch, _ = self.arrow_patch.contains(event)
contains = contains or in_patch
return contains, tinfo
@property
def xyann(self):
return self.get_position()
@xyann.setter
def xyann(self, xytext):
self.set_position(xytext)
@property
def anncoords(self):
return self._textcoords
@anncoords.setter
def anncoords(self, coords):
self._textcoords = coords
def set_figure(self, fig):
if self.arrow is not None:
self.arrow.set_figure(fig)
if self.arrow_patch is not None:
self.arrow_patch.set_figure(fig)
Artist.set_figure(self, fig)
def update_positions(self, renderer):
""""Update the pixel positions of the annotated point and the
text.
"""
xy_pixel = self._get_position_xy(renderer)
self._update_position_xytext(renderer, xy_pixel)
def _update_position_xytext(self, renderer, xy_pixel):
"""Update the pixel positions of the annotation text and the arrow
patch.
"""
# generate transformation,
self.set_transform(self._get_xy_transform(renderer, self.anncoords))
ox0, oy0 = self._get_xy_display()
ox1, oy1 = xy_pixel
if self.arrowprops is not None:
x0, y0 = xy_pixel
l, b, w, h = Text.get_window_extent(self, renderer).bounds
r = l + w
t = b + h
xc = 0.5 * (l + r)
yc = 0.5 * (b + t)
d = self.arrowprops.copy()
ms = d.pop("mutation_scale", self.get_size())
self.arrow_patch.set_mutation_scale(ms)
if "arrowstyle" not in d:
# Approximately simulate the YAArrow.
# Pop its kwargs:
shrink = d.pop('shrink', 0.0)
width = d.pop('width', 4)
headwidth = d.pop('headwidth', 12)
# Ignore frac--it is useless.
frac = d.pop('frac', None)
if frac is not None:
warnings.warn(
"'frac' option in 'arrowprops' is no longer supported;"
" use 'headlength' to set the head length in points.")
headlength = d.pop('headlength', 12)
# NB: ms is in pts
stylekw = dict(head_length=headlength / ms,
head_width=headwidth / ms,
tail_width=width / ms)
self.arrow_patch.set_arrowstyle('simple', **stylekw)
# using YAArrow style:
# pick the x,y corner of the text bbox closest to point
# annotated
xpos = ((l, 0), (xc, 0.5), (r, 1))
ypos = ((b, 0), (yc, 0.5), (t, 1))
_, (x, relposx) = min((abs(val[0] - x0), val) for val in xpos)
_, (y, relposy) = min((abs(val[0] - y0), val) for val in ypos)
self._arrow_relpos = (relposx, relposy)
r = np.hypot((y - y0), (x - x0))
shrink_pts = shrink * r / renderer.points_to_pixels(1)
self.arrow_patch.shrinkA = shrink_pts
self.arrow_patch.shrinkB = shrink_pts
# adjust the starting point of the arrow relative to
# the textbox.
# TODO : Rotation needs to be accounted.
relpos = self._arrow_relpos
bbox = Text.get_window_extent(self, renderer)
ox0 = bbox.x0 + bbox.width * relpos[0]
oy0 = bbox.y0 + bbox.height * relpos[1]
# The arrow will be drawn from (ox0, oy0) to (ox1,
# oy1). It will be first clipped by patchA and patchB.
# Then it will be shrunk by shrinkA and shrinkB
# (in points). If patch A is not set, self.bbox_patch
# is used.
self.arrow_patch.set_positions((ox0, oy0), (ox1, oy1))
if "patchA" in d:
self.arrow_patch.set_patchA(d.pop("patchA"))
else:
if self._bbox_patch:
self.arrow_patch.set_patchA(self._bbox_patch)
else:
pad = renderer.points_to_pixels(4)
if self.get_text() == "":
self.arrow_patch.set_patchA(None)
return
bbox = Text.get_window_extent(self, renderer)
l, b, w, h = bbox.bounds
l -= pad / 2.
b -= pad / 2.
w += pad
h += pad
r = Rectangle(xy=(l, b),
width=w,
height=h,
)
r.set_transform(IdentityTransform())
r.set_clip_on(False)
self.arrow_patch.set_patchA(r)
@artist.allow_rasterization
def draw(self, renderer):
"""
Draw the :class:`Annotation` object to the given *renderer*.
"""
if renderer is not None:
self._renderer = renderer
if not self.get_visible():
return
xy_pixel = self._get_position_xy(renderer)
if not self._check_xy(renderer, xy_pixel):
return
self._update_position_xytext(renderer, xy_pixel)
self.update_bbox_position_size(renderer)
if self.arrow_patch is not None: # FancyArrowPatch
if self.arrow_patch.figure is None and self.figure is not None:
self.arrow_patch.figure = self.figure
self.arrow_patch.draw(renderer)
# Draw text, including FancyBboxPatch, after FancyArrowPatch.
# Otherwise, a wedge arrowstyle can land partly on top of the Bbox.
Text.draw(self, renderer)
def get_window_extent(self, renderer=None):
'''
Return a :class:`~matplotlib.transforms.Bbox` object bounding
the text and arrow annotation, in display units.
*renderer* defaults to the _renderer attribute of the text
object. This is not assigned until the first execution of
:meth:`draw`, so you must use this kwarg if you want
to call :meth:`get_window_extent` prior to the first
:meth:`draw`. For getting web page regions, it is
simpler to call the method after saving the figure. The
*dpi* used defaults to self.figure.dpi; the renderer dpi is
irrelevant.
'''
if not self.get_visible():
return Bbox.unit()
arrow = self.arrow
arrow_patch = self.arrow_patch
text_bbox = Text.get_window_extent(self, renderer=renderer)
bboxes = [text_bbox]
if self.arrow is not None:
bboxes.append(arrow.get_window_extent(renderer=renderer))
elif self.arrow_patch is not None:
bboxes.append(arrow_patch.get_window_extent(renderer=renderer))
return Bbox.union(bboxes)
docstring.interpd.update(Annotation=Annotation.__init__.__doc__)
| 80,065 | 33.289507 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/sankey.py
|
"""
Module for creating Sankey diagrams using matplotlib
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import logging
from six.moves import zip
import numpy as np
from matplotlib.cbook import iterable, Bunch
from matplotlib.path import Path
from matplotlib.patches import PathPatch
from matplotlib.transforms import Affine2D
from matplotlib import docstring
from matplotlib import rcParams
_log = logging.getLogger(__name__)
__author__ = "Kevin L. Davies"
__credits__ = ["Yannick Copin"]
__license__ = "BSD"
__version__ = "2011/09/16"
# Angles [deg/90]
RIGHT = 0
UP = 1
# LEFT = 2
DOWN = 3
class Sankey(object):
"""
Sankey diagram in matplotlib
Sankey diagrams are a specific type of flow diagram, in which
the width of the arrows is shown proportionally to the flow
quantity. They are typically used to visualize energy or
material or cost transfers between processes.
`Wikipedia (6/1/2011) <https://en.wikipedia.org/wiki/Sankey_diagram>`_
"""
def __init__(self, ax=None, scale=1.0, unit='', format='%G', gap=0.25,
radius=0.1, shoulder=0.03, offset=0.15, head_angle=100,
margin=0.4, tolerance=1e-6, **kwargs):
"""
Create a new Sankey instance.
Optional keyword arguments:
=============== ===================================================
Field Description
=============== ===================================================
*ax* axes onto which the data should be plotted
If *ax* isn't provided, new axes will be created.
*scale* scaling factor for the flows
*scale* sizes the width of the paths in order to
maintain proper layout. The same scale is applied
to all subdiagrams. The value should be chosen
such that the product of the scale and the sum of
the inputs is approximately 1.0 (and the product of
the scale and the sum of the outputs is
approximately -1.0).
*unit* string representing the physical unit associated
with the flow quantities
If *unit* is None, then none of the quantities are
labeled.
*format* a Python number formatting string to be used in
labeling the flow as a quantity (i.e., a number
times a unit, where the unit is given)
*gap* space between paths that break in/break away
to/from the top or bottom
*radius* inner radius of the vertical paths
*shoulder* size of the shoulders of output arrowS
*offset* text offset (from the dip or tip of the arrow)
*head_angle* angle of the arrow heads (and negative of the angle
of the tails) [deg]
*margin* minimum space between Sankey outlines and the edge
of the plot area
*tolerance* acceptable maximum of the magnitude of the sum of
flows
The magnitude of the sum of connected flows cannot
be greater than *tolerance*.
=============== ===================================================
The optional arguments listed above are applied to all subdiagrams so
that there is consistent alignment and formatting.
If :class:`Sankey` is instantiated with any keyword arguments other
than those explicitly listed above (``**kwargs``), they will be passed
to :meth:`add`, which will create the first subdiagram.
In order to draw a complex Sankey diagram, create an instance of
:class:`Sankey` by calling it without any kwargs::
sankey = Sankey()
Then add simple Sankey sub-diagrams::
sankey.add() # 1
sankey.add() # 2
#...
sankey.add() # n
Finally, create the full diagram::
sankey.finish()
Or, instead, simply daisy-chain those calls::
Sankey().add().add... .add().finish()
.. seealso::
:meth:`add`
:meth:`finish`
**Examples:**
.. plot:: gallery/api/sankey_basics.py
"""
# Check the arguments.
if gap < 0:
raise ValueError(
"The gap is negative.\nThis isn't allowed because it "
"would cause the paths to overlap.")
if radius > gap:
raise ValueError(
"The inner radius is greater than the path spacing.\n"
"This isn't allowed because it would cause the paths to overlap.")
if head_angle < 0:
raise ValueError(
"The angle is negative.\nThis isn't allowed "
"because it would cause inputs to look like "
"outputs and vice versa.")
if tolerance < 0:
raise ValueError(
"The tolerance is negative.\nIt must be a magnitude.")
# Create axes if necessary.
if ax is None:
import matplotlib.pyplot as plt
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1, xticks=[], yticks=[])
self.diagrams = []
# Store the inputs.
self.ax = ax
self.unit = unit
self.format = format
self.scale = scale
self.gap = gap
self.radius = radius
self.shoulder = shoulder
self.offset = offset
self.margin = margin
self.pitch = np.tan(np.pi * (1 - head_angle / 180.0) / 2.0)
self.tolerance = tolerance
# Initialize the vertices of tight box around the diagram(s).
self.extent = np.array((np.inf, -np.inf, np.inf, -np.inf))
# If there are any kwargs, create the first subdiagram.
if len(kwargs):
self.add(**kwargs)
def _arc(self, quadrant=0, cw=True, radius=1, center=(0, 0)):
"""
Return the codes and vertices for a rotated, scaled, and translated
90 degree arc.
Optional keyword arguments:
=============== ==========================================
Keyword Description
=============== ==========================================
*quadrant* uses 0-based indexing (0, 1, 2, or 3)
*cw* if True, clockwise
*center* (x, y) tuple of the arc's center
=============== ==========================================
"""
# Note: It would be possible to use matplotlib's transforms to rotate,
# scale, and translate the arc, but since the angles are discrete,
# it's just as easy and maybe more efficient to do it here.
ARC_CODES = [Path.LINETO,
Path.CURVE4,
Path.CURVE4,
Path.CURVE4,
Path.CURVE4,
Path.CURVE4,
Path.CURVE4]
# Vertices of a cubic Bezier curve approximating a 90 deg arc
# These can be determined by Path.arc(0,90).
ARC_VERTICES = np.array([[1.00000000e+00, 0.00000000e+00],
[1.00000000e+00, 2.65114773e-01],
[8.94571235e-01, 5.19642327e-01],
[7.07106781e-01, 7.07106781e-01],
[5.19642327e-01, 8.94571235e-01],
[2.65114773e-01, 1.00000000e+00],
# Insignificant
# [6.12303177e-17, 1.00000000e+00]])
[0.00000000e+00, 1.00000000e+00]])
if quadrant == 0 or quadrant == 2:
if cw:
vertices = ARC_VERTICES
else:
vertices = ARC_VERTICES[:, ::-1] # Swap x and y.
elif quadrant == 1 or quadrant == 3:
# Negate x.
if cw:
# Swap x and y.
vertices = np.column_stack((-ARC_VERTICES[:, 1],
ARC_VERTICES[:, 0]))
else:
vertices = np.column_stack((-ARC_VERTICES[:, 0],
ARC_VERTICES[:, 1]))
if quadrant > 1:
radius = -radius # Rotate 180 deg.
return list(zip(ARC_CODES, radius * vertices +
np.tile(center, (ARC_VERTICES.shape[0], 1))))
def _add_input(self, path, angle, flow, length):
"""
Add an input to a path and return its tip and label locations.
"""
if angle is None:
return [0, 0], [0, 0]
else:
x, y = path[-1][1] # Use the last point as a reference.
dipdepth = (flow / 2) * self.pitch
if angle == RIGHT:
x -= length
dip = [x + dipdepth, y + flow / 2.0]
path.extend([(Path.LINETO, [x, y]),
(Path.LINETO, dip),
(Path.LINETO, [x, y + flow]),
(Path.LINETO, [x + self.gap, y + flow])])
label_location = [dip[0] - self.offset, dip[1]]
else: # Vertical
x -= self.gap
if angle == UP:
sign = 1
else:
sign = -1
dip = [x - flow / 2, y - sign * (length - dipdepth)]
if angle == DOWN:
quadrant = 2
else:
quadrant = 1
# Inner arc isn't needed if inner radius is zero
if self.radius:
path.extend(self._arc(quadrant=quadrant,
cw=angle == UP,
radius=self.radius,
center=(x + self.radius,
y - sign * self.radius)))
else:
path.append((Path.LINETO, [x, y]))
path.extend([(Path.LINETO, [x, y - sign * length]),
(Path.LINETO, dip),
(Path.LINETO, [x - flow, y - sign * length])])
path.extend(self._arc(quadrant=quadrant,
cw=angle == DOWN,
radius=flow + self.radius,
center=(x + self.radius,
y - sign * self.radius)))
path.append((Path.LINETO, [x - flow, y + sign * flow]))
label_location = [dip[0], dip[1] - sign * self.offset]
return dip, label_location
def _add_output(self, path, angle, flow, length):
"""
Append an output to a path and return its tip and label locations.
.. note:: *flow* is negative for an output.
"""
if angle is None:
return [0, 0], [0, 0]
else:
x, y = path[-1][1] # Use the last point as a reference.
tipheight = (self.shoulder - flow / 2) * self.pitch
if angle == RIGHT:
x += length
tip = [x + tipheight, y + flow / 2.0]
path.extend([(Path.LINETO, [x, y]),
(Path.LINETO, [x, y + self.shoulder]),
(Path.LINETO, tip),
(Path.LINETO, [x, y - self.shoulder + flow]),
(Path.LINETO, [x, y + flow]),
(Path.LINETO, [x - self.gap, y + flow])])
label_location = [tip[0] + self.offset, tip[1]]
else: # Vertical
x += self.gap
if angle == UP:
sign = 1
else:
sign = -1
tip = [x - flow / 2.0, y + sign * (length + tipheight)]
if angle == UP:
quadrant = 3
else:
quadrant = 0
# Inner arc isn't needed if inner radius is zero
if self.radius:
path.extend(self._arc(quadrant=quadrant,
cw=angle == UP,
radius=self.radius,
center=(x - self.radius,
y + sign * self.radius)))
else:
path.append((Path.LINETO, [x, y]))
path.extend([(Path.LINETO, [x, y + sign * length]),
(Path.LINETO, [x - self.shoulder,
y + sign * length]),
(Path.LINETO, tip),
(Path.LINETO, [x + self.shoulder - flow,
y + sign * length]),
(Path.LINETO, [x - flow, y + sign * length])])
path.extend(self._arc(quadrant=quadrant,
cw=angle == DOWN,
radius=self.radius - flow,
center=(x - self.radius,
y + sign * self.radius)))
path.append((Path.LINETO, [x - flow, y + sign * flow]))
label_location = [tip[0], tip[1] + sign * self.offset]
return tip, label_location
def _revert(self, path, first_action=Path.LINETO):
"""
A path is not simply revertable by path[::-1] since the code
specifies an action to take from the **previous** point.
"""
reverse_path = []
next_code = first_action
for code, position in path[::-1]:
reverse_path.append((next_code, position))
next_code = code
return reverse_path
# This might be more efficient, but it fails because 'tuple' object
# doesn't support item assignment:
# path[1] = path[1][-1:0:-1]
# path[1][0] = first_action
# path[2] = path[2][::-1]
# return path
@docstring.dedent_interpd
def add(self, patchlabel='', flows=None, orientations=None, labels='',
trunklength=1.0, pathlengths=0.25, prior=None, connect=(0, 0),
rotation=0, **kwargs):
"""
Add a simple Sankey diagram with flows at the same hierarchical level.
Return value is the instance of :class:`Sankey`.
Optional keyword arguments:
=============== ===================================================
Keyword Description
=============== ===================================================
*patchlabel* label to be placed at the center of the diagram
Note: *label* (not *patchlabel*) will be passed to
the patch through ``**kwargs`` and can be used to
create an entry in the legend.
*flows* array of flow values
By convention, inputs are positive and outputs are
negative.
*orientations* list of orientations of the paths
Valid values are 1 (from/to the top), 0 (from/to
the left or right), or -1 (from/to the bottom). If
*orientations* == 0, inputs will break in from the
left and outputs will break away to the right.
*labels* list of specifications of the labels for the flows
Each value may be *None* (no labels), '' (just
label the quantities), or a labeling string. If a
single value is provided, it will be applied to all
flows. If an entry is a non-empty string, then the
quantity for the corresponding flow will be shown
below the string. However, if the *unit* of the
main diagram is None, then quantities are never
shown, regardless of the value of this argument.
*trunklength* length between the bases of the input and output
groups
*pathlengths* list of lengths of the arrows before break-in or
after break-away
If a single value is given, then it will be applied
to the first (inside) paths on the top and bottom,
and the length of all other arrows will be
justified accordingly. The *pathlengths* are not
applied to the horizontal inputs and outputs.
*prior* index of the prior diagram to which this diagram
should be connected
*connect* a (prior, this) tuple indexing the flow of the
prior diagram and the flow of this diagram which
should be connected
If this is the first diagram or *prior* is *None*,
*connect* will be ignored.
*rotation* angle of rotation of the diagram [deg]
*rotation* is ignored if this diagram is connected
to an existing one (using *prior* and *connect*).
The interpretation of the *orientations* argument
will be rotated accordingly (e.g., if *rotation*
== 90, an *orientations* entry of 1 means to/from
the left).
=============== ===================================================
Valid kwargs are :meth:`matplotlib.patches.PathPatch` arguments:
%(Patch)s
As examples, ``fill=False`` and ``label='A legend entry'``.
By default, ``facecolor='#bfd1d4'`` (light blue) and
``linewidth=0.5``.
The indexing parameters (*prior* and *connect*) are zero-based.
The flows are placed along the top of the diagram from the inside out
in order of their index within the *flows* list or array. They are
placed along the sides of the diagram from the top down and along the
bottom from the outside in.
If the sum of the inputs and outputs is nonzero, the discrepancy
will appear as a cubic Bezier curve along the top and bottom edges of
the trunk.
.. seealso::
:meth:`finish`
"""
# Check and preprocess the arguments.
if flows is None:
flows = np.array([1.0, -1.0])
else:
flows = np.array(flows)
n = flows.shape[0] # Number of flows
if rotation is None:
rotation = 0
else:
# In the code below, angles are expressed in deg/90.
rotation /= 90.0
if orientations is None:
orientations = [0, 0]
if len(orientations) != n:
raise ValueError(
"orientations and flows must have the same length.\n"
"orientations has length %d, but flows has length %d."
% (len(orientations), n))
if labels != '' and getattr(labels, '__iter__', False):
# iterable() isn't used because it would give True if labels is a
# string
if len(labels) != n:
raise ValueError(
"If labels is a list, then labels and flows must have the "
"same length.\nlabels has length %d, but flows has length %d."
% (len(labels), n))
else:
labels = [labels] * n
if trunklength < 0:
raise ValueError(
"trunklength is negative.\nThis isn't allowed, because it would "
"cause poor layout.")
if np.abs(np.sum(flows)) > self.tolerance:
_log.info("The sum of the flows is nonzero (%f).\nIs the "
"system not at steady state?", np.sum(flows))
scaled_flows = self.scale * flows
gain = sum(max(flow, 0) for flow in scaled_flows)
loss = sum(min(flow, 0) for flow in scaled_flows)
if not (0.5 <= gain <= 2.0):
_log.info(
"The scaled sum of the inputs is %f.\nThis may "
"cause poor layout.\nConsider changing the scale so"
" that the scaled sum is approximately 1.0.", gain)
if not (-2.0 <= loss <= -0.5):
_log.info(
"The scaled sum of the outputs is %f.\nThis may "
"cause poor layout.\nConsider changing the scale so"
" that the scaled sum is approximately 1.0.", gain)
if prior is not None:
if prior < 0:
raise ValueError("The index of the prior diagram is negative.")
if min(connect) < 0:
raise ValueError(
"At least one of the connection indices is negative.")
if prior >= len(self.diagrams):
raise ValueError(
"The index of the prior diagram is %d, but there are "
"only %d other diagrams.\nThe index is zero-based."
% (prior, len(self.diagrams)))
if connect[0] >= len(self.diagrams[prior].flows):
raise ValueError(
"The connection index to the source diagram is %d, but "
"that diagram has only %d flows.\nThe index is zero-based."
% (connect[0], len(self.diagrams[prior].flows)))
if connect[1] >= n:
raise ValueError(
"The connection index to this diagram is %d, but this diagram"
"has only %d flows.\n The index is zero-based."
% (connect[1], n))
if self.diagrams[prior].angles[connect[0]] is None:
raise ValueError(
"The connection cannot be made. Check that the magnitude "
"of flow %d of diagram %d is greater than or equal to the "
"specified tolerance." % (connect[0], prior))
flow_error = (self.diagrams[prior].flows[connect[0]] +
flows[connect[1]])
if abs(flow_error) >= self.tolerance:
raise ValueError(
"The scaled sum of the connected flows is %f, which is not "
"within the tolerance (%f)." % (flow_error, self.tolerance))
# Determine if the flows are inputs.
are_inputs = [None] * n
for i, flow in enumerate(flows):
if flow >= self.tolerance:
are_inputs[i] = True
elif flow <= -self.tolerance:
are_inputs[i] = False
else:
_log.info(
"The magnitude of flow %d (%f) is below the "
"tolerance (%f).\nIt will not be shown, and it "
"cannot be used in a connection."
% (i, flow, self.tolerance))
# Determine the angles of the arrows (before rotation).
angles = [None] * n
for i, (orient, is_input) in enumerate(zip(orientations, are_inputs)):
if orient == 1:
if is_input:
angles[i] = DOWN
elif not is_input:
# Be specific since is_input can be None.
angles[i] = UP
elif orient == 0:
if is_input is not None:
angles[i] = RIGHT
else:
if orient != -1:
raise ValueError(
"The value of orientations[%d] is %d, "
"but it must be [ -1 | 0 | 1 ]." % (i, orient))
if is_input:
angles[i] = UP
elif not is_input:
angles[i] = DOWN
# Justify the lengths of the paths.
if iterable(pathlengths):
if len(pathlengths) != n:
raise ValueError(
"If pathlengths is a list, then pathlengths and flows must "
"have the same length.\npathlengths has length %d, but flows "
"has length %d." % (len(pathlengths), n))
else: # Make pathlengths into a list.
urlength = pathlengths
ullength = pathlengths
lrlength = pathlengths
lllength = pathlengths
d = dict(RIGHT=pathlengths)
pathlengths = [d.get(angle, 0) for angle in angles]
# Determine the lengths of the top-side arrows
# from the middle outwards.
for i, (angle, is_input, flow) in enumerate(zip(angles, are_inputs,
scaled_flows)):
if angle == DOWN and is_input:
pathlengths[i] = ullength
ullength += flow
elif angle == UP and not is_input:
pathlengths[i] = urlength
urlength -= flow # Flow is negative for outputs.
# Determine the lengths of the bottom-side arrows
# from the middle outwards.
for i, (angle, is_input, flow) in enumerate(reversed(list(zip(
angles, are_inputs, scaled_flows)))):
if angle == UP and is_input:
pathlengths[n - i - 1] = lllength
lllength += flow
elif angle == DOWN and not is_input:
pathlengths[n - i - 1] = lrlength
lrlength -= flow
# Determine the lengths of the left-side arrows
# from the bottom upwards.
has_left_input = False
for i, (angle, is_input, spec) in enumerate(reversed(list(zip(
angles, are_inputs, zip(scaled_flows, pathlengths))))):
if angle == RIGHT:
if is_input:
if has_left_input:
pathlengths[n - i - 1] = 0
else:
has_left_input = True
# Determine the lengths of the right-side arrows
# from the top downwards.
has_right_output = False
for i, (angle, is_input, spec) in enumerate(zip(
angles, are_inputs, list(zip(scaled_flows, pathlengths)))):
if angle == RIGHT:
if not is_input:
if has_right_output:
pathlengths[i] = 0
else:
has_right_output = True
# Begin the subpaths, and smooth the transition if the sum of the flows
# is nonzero.
urpath = [(Path.MOVETO, [(self.gap - trunklength / 2.0), # Upper right
gain / 2.0]),
(Path.LINETO, [(self.gap - trunklength / 2.0) / 2.0,
gain / 2.0]),
(Path.CURVE4, [(self.gap - trunklength / 2.0) / 8.0,
gain / 2.0]),
(Path.CURVE4, [(trunklength / 2.0 - self.gap) / 8.0,
-loss / 2.0]),
(Path.LINETO, [(trunklength / 2.0 - self.gap) / 2.0,
-loss / 2.0]),
(Path.LINETO, [(trunklength / 2.0 - self.gap),
-loss / 2.0])]
llpath = [(Path.LINETO, [(trunklength / 2.0 - self.gap), # Lower left
loss / 2.0]),
(Path.LINETO, [(trunklength / 2.0 - self.gap) / 2.0,
loss / 2.0]),
(Path.CURVE4, [(trunklength / 2.0 - self.gap) / 8.0,
loss / 2.0]),
(Path.CURVE4, [(self.gap - trunklength / 2.0) / 8.0,
-gain / 2.0]),
(Path.LINETO, [(self.gap - trunklength / 2.0) / 2.0,
-gain / 2.0]),
(Path.LINETO, [(self.gap - trunklength / 2.0),
-gain / 2.0])]
lrpath = [(Path.LINETO, [(trunklength / 2.0 - self.gap), # Lower right
loss / 2.0])]
ulpath = [(Path.LINETO, [self.gap - trunklength / 2.0, # Upper left
gain / 2.0])]
# Add the subpaths and assign the locations of the tips and labels.
tips = np.zeros((n, 2))
label_locations = np.zeros((n, 2))
# Add the top-side inputs and outputs from the middle outwards.
for i, (angle, is_input, spec) in enumerate(zip(
angles, are_inputs, list(zip(scaled_flows, pathlengths)))):
if angle == DOWN and is_input:
tips[i, :], label_locations[i, :] = self._add_input(
ulpath, angle, *spec)
elif angle == UP and not is_input:
tips[i, :], label_locations[i, :] = self._add_output(
urpath, angle, *spec)
# Add the bottom-side inputs and outputs from the middle outwards.
for i, (angle, is_input, spec) in enumerate(reversed(list(zip(
angles, are_inputs, list(zip(scaled_flows, pathlengths)))))):
if angle == UP and is_input:
tip, label_location = self._add_input(llpath, angle, *spec)
tips[n - i - 1, :] = tip
label_locations[n - i - 1, :] = label_location
elif angle == DOWN and not is_input:
tip, label_location = self._add_output(lrpath, angle, *spec)
tips[n - i - 1, :] = tip
label_locations[n - i - 1, :] = label_location
# Add the left-side inputs from the bottom upwards.
has_left_input = False
for i, (angle, is_input, spec) in enumerate(reversed(list(zip(
angles, are_inputs, list(zip(scaled_flows, pathlengths)))))):
if angle == RIGHT and is_input:
if not has_left_input:
# Make sure the lower path extends
# at least as far as the upper one.
if llpath[-1][1][0] > ulpath[-1][1][0]:
llpath.append((Path.LINETO, [ulpath[-1][1][0],
llpath[-1][1][1]]))
has_left_input = True
tip, label_location = self._add_input(llpath, angle, *spec)
tips[n - i - 1, :] = tip
label_locations[n - i - 1, :] = label_location
# Add the right-side outputs from the top downwards.
has_right_output = False
for i, (angle, is_input, spec) in enumerate(zip(
angles, are_inputs, list(zip(scaled_flows, pathlengths)))):
if angle == RIGHT and not is_input:
if not has_right_output:
# Make sure the upper path extends
# at least as far as the lower one.
if urpath[-1][1][0] < lrpath[-1][1][0]:
urpath.append((Path.LINETO, [lrpath[-1][1][0],
urpath[-1][1][1]]))
has_right_output = True
tips[i, :], label_locations[i, :] = self._add_output(
urpath, angle, *spec)
# Trim any hanging vertices.
if not has_left_input:
ulpath.pop()
llpath.pop()
if not has_right_output:
lrpath.pop()
urpath.pop()
# Concatenate the subpaths in the correct order (clockwise from top).
path = (urpath + self._revert(lrpath) + llpath + self._revert(ulpath) +
[(Path.CLOSEPOLY, urpath[0][1])])
# Create a patch with the Sankey outline.
codes, vertices = zip(*path)
vertices = np.array(vertices)
def _get_angle(a, r):
if a is None:
return None
else:
return a + r
if prior is None:
if rotation != 0: # By default, none of this is needed.
angles = [_get_angle(angle, rotation) for angle in angles]
rotate = Affine2D().rotate_deg(rotation * 90).transform_affine
tips = rotate(tips)
label_locations = rotate(label_locations)
vertices = rotate(vertices)
text = self.ax.text(0, 0, s=patchlabel, ha='center', va='center')
else:
rotation = (self.diagrams[prior].angles[connect[0]] -
angles[connect[1]])
angles = [_get_angle(angle, rotation) for angle in angles]
rotate = Affine2D().rotate_deg(rotation * 90).transform_affine
tips = rotate(tips)
offset = self.diagrams[prior].tips[connect[0]] - tips[connect[1]]
translate = Affine2D().translate(*offset).transform_affine
tips = translate(tips)
label_locations = translate(rotate(label_locations))
vertices = translate(rotate(vertices))
kwds = dict(s=patchlabel, ha='center', va='center')
text = self.ax.text(*offset, **kwds)
if rcParams['_internal.classic_mode']:
fc = kwargs.pop('fc', kwargs.pop('facecolor', '#bfd1d4'))
lw = kwargs.pop('lw', kwargs.pop('linewidth', 0.5))
else:
fc = kwargs.pop('fc', kwargs.pop('facecolor', None))
lw = kwargs.pop('lw', kwargs.pop('linewidth', None))
if fc is None:
fc = next(self.ax._get_patches_for_fill.prop_cycler)['color']
patch = PathPatch(Path(vertices, codes), fc=fc, lw=lw, **kwargs)
self.ax.add_patch(patch)
# Add the path labels.
texts = []
for number, angle, label, location in zip(flows, angles, labels,
label_locations):
if label is None or angle is None:
label = ''
elif self.unit is not None:
quantity = self.format % abs(number) + self.unit
if label != '':
label += "\n"
label += quantity
texts.append(self.ax.text(x=location[0], y=location[1],
s=label,
ha='center', va='center'))
# Text objects are placed even they are empty (as long as the magnitude
# of the corresponding flow is larger than the tolerance) in case the
# user wants to provide labels later.
# Expand the size of the diagram if necessary.
self.extent = (min(np.min(vertices[:, 0]),
np.min(label_locations[:, 0]),
self.extent[0]),
max(np.max(vertices[:, 0]),
np.max(label_locations[:, 0]),
self.extent[1]),
min(np.min(vertices[:, 1]),
np.min(label_locations[:, 1]),
self.extent[2]),
max(np.max(vertices[:, 1]),
np.max(label_locations[:, 1]),
self.extent[3]))
# Include both vertices _and_ label locations in the extents; there are
# where either could determine the margins (e.g., arrow shoulders).
# Add this diagram as a subdiagram.
self.diagrams.append(Bunch(patch=patch, flows=flows, angles=angles,
tips=tips, text=text, texts=texts))
# Allow a daisy-chained call structure (see docstring for the class).
return self
def finish(self):
"""
Adjust the axes and return a list of information about the Sankey
subdiagram(s).
Return value is a list of subdiagrams represented with the following
fields:
=============== ===================================================
Field Description
=============== ===================================================
*patch* Sankey outline (an instance of
:class:`~maplotlib.patches.PathPatch`)
*flows* values of the flows (positive for input, negative
for output)
*angles* list of angles of the arrows [deg/90]
For example, if the diagram has not been rotated,
an input to the top side will have an angle of 3
(DOWN), and an output from the top side will have
an angle of 1 (UP). If a flow has been skipped
(because its magnitude is less than *tolerance*),
then its angle will be *None*.
*tips* array in which each row is an [x, y] pair
indicating the positions of the tips (or "dips") of
the flow paths
If the magnitude of a flow is less the *tolerance*
for the instance of :class:`Sankey`, the flow is
skipped and its tip will be at the center of the
diagram.
*text* :class:`~matplotlib.text.Text` instance for the
label of the diagram
*texts* list of :class:`~matplotlib.text.Text` instances
for the labels of flows
=============== ===================================================
.. seealso::
:meth:`add`
"""
self.ax.axis([self.extent[0] - self.margin,
self.extent[1] + self.margin,
self.extent[2] - self.margin,
self.extent[3] + self.margin])
self.ax.set_aspect('equal', adjustable='datalim')
return self.diagrams
| 38,824 | 45.552758 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/gridspec.py
|
"""
:mod:`~matplotlib.gridspec` is a module which specifies the location
of the subplot in the figure.
`GridSpec`
specifies the geometry of the grid that a subplot will be
placed. The number of rows and number of columns of the grid
need to be set. Optionally, the subplot layout parameters
(e.g., left, right, etc.) can be tuned.
`SubplotSpec`
specifies the location of the subplot in the given `GridSpec`.
"""
from __future__ import absolute_import, division, print_function
import six
import copy
import logging
import warnings
import numpy as np
import matplotlib as mpl
from matplotlib import _pylab_helpers, tight_layout, rcParams
from matplotlib.transforms import Bbox
import matplotlib._layoutbox as layoutbox
from matplotlib.cbook import mplDeprecation
_log = logging.getLogger(__name__)
class GridSpecBase(object):
"""
A base class of GridSpec that specifies the geometry of the grid
that a subplot will be placed.
"""
def __init__(self, nrows, ncols, height_ratios=None, width_ratios=None):
"""
The number of rows and number of columns of the grid need to
be set. Optionally, the ratio of heights and widths of rows and
columns can be specified.
"""
self._nrows, self._ncols = nrows, ncols
self.set_height_ratios(height_ratios)
self.set_width_ratios(width_ratios)
def get_geometry(self):
'get the geometry of the grid, e.g., 2,3'
return self._nrows, self._ncols
def get_subplot_params(self, figure=None, fig=None):
pass
def new_subplotspec(self, loc, rowspan=1, colspan=1):
"""
create and return a SuplotSpec instance.
"""
loc1, loc2 = loc
subplotspec = self[loc1:loc1+rowspan, loc2:loc2+colspan]
return subplotspec
def set_width_ratios(self, width_ratios):
if width_ratios is not None and len(width_ratios) != self._ncols:
raise ValueError('Expected the given number of width ratios to '
'match the number of columns of the grid')
self._col_width_ratios = width_ratios
def get_width_ratios(self):
return self._col_width_ratios
def set_height_ratios(self, height_ratios):
if height_ratios is not None and len(height_ratios) != self._nrows:
raise ValueError('Expected the given number of height ratios to '
'match the number of rows of the grid')
self._row_height_ratios = height_ratios
def get_height_ratios(self):
return self._row_height_ratios
def get_grid_positions(self, fig, raw=False):
"""
return lists of bottom and top position of rows, left and
right positions of columns.
If raw=True, then these are all in units relative to the container
with no margins. (used for constrained_layout).
"""
nrows, ncols = self.get_geometry()
if raw:
left = 0.
right = 1.
bottom = 0.
top = 1.
wspace = 0.
hspace = 0.
else:
subplot_params = self.get_subplot_params(fig)
left = subplot_params.left
right = subplot_params.right
bottom = subplot_params.bottom
top = subplot_params.top
wspace = subplot_params.wspace
hspace = subplot_params.hspace
tot_width = right - left
tot_height = top - bottom
# calculate accumulated heights of columns
cell_h = tot_height / (nrows + hspace*(nrows-1))
sep_h = hspace * cell_h
if self._row_height_ratios is not None:
norm = cell_h * nrows / sum(self._row_height_ratios)
cell_heights = [r * norm for r in self._row_height_ratios]
else:
cell_heights = [cell_h] * nrows
sep_heights = [0] + ([sep_h] * (nrows-1))
cell_hs = np.cumsum(np.column_stack([sep_heights, cell_heights]).flat)
# calculate accumulated widths of rows
cell_w = tot_width / (ncols + wspace*(ncols-1))
sep_w = wspace * cell_w
if self._col_width_ratios is not None:
norm = cell_w * ncols / sum(self._col_width_ratios)
cell_widths = [r * norm for r in self._col_width_ratios]
else:
cell_widths = [cell_w] * ncols
sep_widths = [0] + ([sep_w] * (ncols-1))
cell_ws = np.cumsum(np.column_stack([sep_widths, cell_widths]).flat)
fig_tops, fig_bottoms = (top - cell_hs).reshape((-1, 2)).T
fig_lefts, fig_rights = (left + cell_ws).reshape((-1, 2)).T
return fig_bottoms, fig_tops, fig_lefts, fig_rights
def __getitem__(self, key):
"""Create and return a SuplotSpec instance.
"""
nrows, ncols = self.get_geometry()
def _normalize(key, size): # Includes last index.
if isinstance(key, slice):
start, stop, _ = key.indices(size)
if stop > start:
return start, stop - 1
else:
if key < 0:
key += size
if 0 <= key < size:
return key, key
raise IndexError("invalid index")
if isinstance(key, tuple):
try:
k1, k2 = key
except ValueError:
raise ValueError("unrecognized subplot spec")
num1, num2 = np.ravel_multi_index(
[_normalize(k1, nrows), _normalize(k2, ncols)], (nrows, ncols))
else: # Single key
num1, num2 = _normalize(key, nrows * ncols)
return SubplotSpec(self, num1, num2)
class GridSpec(GridSpecBase):
"""
A class that specifies the geometry of the grid that a subplot
will be placed. The location of grid is determined by similar way
as the SubplotParams.
"""
def __init__(self, nrows, ncols, figure=None,
left=None, bottom=None, right=None, top=None,
wspace=None, hspace=None,
width_ratios=None, height_ratios=None):
"""
The number of rows and number of columns of the grid need to be set.
Optionally, the subplot layout parameters (e.g., left, right, etc.)
can be tuned.
Parameters
----------
nrows : int
Number of rows in grid.
ncols : int
Number or columns in grid.
Notes
-----
See `~.figure.SubplotParams` for descriptions of the layout parameters.
"""
self.left = left
self.bottom = bottom
self.right = right
self.top = top
self.wspace = wspace
self.hspace = hspace
self.figure = figure
GridSpecBase.__init__(self, nrows, ncols,
width_ratios=width_ratios,
height_ratios=height_ratios)
if (self.figure is None) or not self.figure.get_constrained_layout():
self._layoutbox = None
else:
self.figure.init_layoutbox()
self._layoutbox = layoutbox.LayoutBox(
parent=self.figure._layoutbox,
name='gridspec' + layoutbox.seq_id(),
artist=self)
# by default the layoutbox for a gridsepc will fill a figure.
# but this can change below if the gridspec is created from a
# subplotspec. (GridSpecFromSubplotSpec)
_AllowedKeys = ["left", "bottom", "right", "top", "wspace", "hspace"]
def __getstate__(self):
state = self.__dict__
try:
state.pop('_layoutbox')
except KeyError:
pass
return state
def __setstate__(self, state):
self.__dict__ = state
# layoutboxes don't survive pickling...
self._layoutbox = None
def update(self, **kwargs):
"""
Update the current values. If any kwarg is None, default to
the current value, if set, otherwise to rc.
"""
for k, v in six.iteritems(kwargs):
if k in self._AllowedKeys:
setattr(self, k, v)
else:
raise AttributeError("%s is unknown keyword" % (k,))
for figmanager in six.itervalues(_pylab_helpers.Gcf.figs):
for ax in figmanager.canvas.figure.axes:
# copied from Figure.subplots_adjust
if not isinstance(ax, mpl.axes.SubplotBase):
# Check if sharing a subplots axis
if isinstance(ax._sharex, mpl.axes.SubplotBase):
if ax._sharex.get_subplotspec().get_gridspec() == self:
ax._sharex.update_params()
ax._set_position(ax._sharex.figbox)
elif isinstance(ax._sharey, mpl.axes.SubplotBase):
if ax._sharey.get_subplotspec().get_gridspec() == self:
ax._sharey.update_params()
ax._set_position(ax._sharey.figbox)
else:
ss = ax.get_subplotspec().get_topmost_subplotspec()
if ss.get_gridspec() == self:
ax.update_params()
ax._set_position(ax.figbox)
def get_subplot_params(self, figure=None, fig=None):
"""
Return a dictionary of subplot layout parameters. The default
parameters are from rcParams unless a figure attribute is set.
"""
if fig is not None:
warnings.warn("the 'fig' kwarg is deprecated "
"use 'figure' instead", mplDeprecation)
if figure is None:
figure = fig
if figure is None:
kw = {k: rcParams["figure.subplot."+k] for k in self._AllowedKeys}
subplotpars = mpl.figure.SubplotParams(**kw)
else:
subplotpars = copy.copy(figure.subplotpars)
update_kw = {k: getattr(self, k) for k in self._AllowedKeys}
subplotpars.update(**update_kw)
return subplotpars
def locally_modified_subplot_params(self):
return [k for k in self._AllowedKeys if getattr(self, k)]
def tight_layout(self, figure, renderer=None,
pad=1.08, h_pad=None, w_pad=None, rect=None):
"""
Adjust subplot parameters to give specified padding.
Parameters
----------
pad : float
Padding between the figure edge and the edges of subplots, as a
fraction of the font-size.
h_pad, w_pad : float, optional
Padding (height/width) between edges of adjacent subplots.
Defaults to ``pad_inches``.
rect : tuple of 4 floats, optional
(left, bottom, right, top) rectangle in normalized figure
coordinates that the whole subplots area (including labels) will
fit into. Default is (0, 0, 1, 1).
"""
subplotspec_list = tight_layout.get_subplotspec_list(
figure.axes, grid_spec=self)
if None in subplotspec_list:
warnings.warn("This figure includes Axes that are not compatible "
"with tight_layout, so results might be incorrect.")
if renderer is None:
renderer = tight_layout.get_renderer(figure)
kwargs = tight_layout.get_tight_layout_figure(
figure, figure.axes, subplotspec_list, renderer,
pad=pad, h_pad=h_pad, w_pad=w_pad, rect=rect)
self.update(**kwargs)
class GridSpecFromSubplotSpec(GridSpecBase):
"""
GridSpec whose subplot layout parameters are inherited from the
location specified by a given SubplotSpec.
"""
def __init__(self, nrows, ncols,
subplot_spec,
wspace=None, hspace=None,
height_ratios=None, width_ratios=None):
"""
The number of rows and number of columns of the grid need to
be set. An instance of SubplotSpec is also needed to be set
from which the layout parameters will be inherited. The wspace
and hspace of the layout can be optionally specified or the
default values (from the figure or rcParams) will be used.
"""
self._wspace = wspace
self._hspace = hspace
self._subplot_spec = subplot_spec
GridSpecBase.__init__(self, nrows, ncols,
width_ratios=width_ratios,
height_ratios=height_ratios)
# do the layoutboxes
subspeclb = subplot_spec._layoutbox
if subspeclb is None:
self._layoutbox = None
else:
# OK, this is needed to divide the figure.
self._layoutbox = subspeclb.layout_from_subplotspec(
subplot_spec,
name=subspeclb.name + '.gridspec' + layoutbox.seq_id(),
artist=self)
def get_subplot_params(self, figure=None, fig=None):
"""Return a dictionary of subplot layout parameters.
"""
if fig is not None:
warnings.warn("the 'fig' kwarg is deprecated "
"use 'figure' instead", mplDeprecation)
if figure is None:
figure = fig
hspace = (self._hspace if self._hspace is not None
else figure.subplotpars.hspace if figure is not None
else rcParams["figure.subplot.hspace"])
wspace = (self._wspace if self._wspace is not None
else figure.subplotpars.wspace if figure is not None
else rcParams["figure.subplot.wspace"])
figbox = self._subplot_spec.get_position(figure)
left, bottom, right, top = figbox.extents
return mpl.figure.SubplotParams(left=left, right=right,
bottom=bottom, top=top,
wspace=wspace, hspace=hspace)
def get_topmost_subplotspec(self):
"""Get the topmost SubplotSpec instance associated with the subplot."""
return self._subplot_spec.get_topmost_subplotspec()
class SubplotSpec(object):
"""Specifies the location of the subplot in the given `GridSpec`.
"""
def __init__(self, gridspec, num1, num2=None):
"""
The subplot will occupy the num1-th cell of the given
gridspec. If num2 is provided, the subplot will span between
num1-th cell and num2-th cell.
The index starts from 0.
"""
self._gridspec = gridspec
self.num1 = num1
self.num2 = num2
if gridspec._layoutbox is not None:
glb = gridspec._layoutbox
# So note that here we don't assign any layout yet,
# just make the layoutbox that will conatin all items
# associated w/ this axis. This can include other axes like
# a colorbar or a legend.
self._layoutbox = layoutbox.LayoutBox(
parent=glb,
name=glb.name + '.ss' + layoutbox.seq_id(),
artist=self)
else:
self._layoutbox = None
def __getstate__(self):
state = self.__dict__
try:
state.pop('_layoutbox')
except KeyError:
pass
return state
def __setstate__(self, state):
self.__dict__ = state
# layoutboxes don't survive pickling...
self._layoutbox = None
def get_gridspec(self):
return self._gridspec
def get_geometry(self):
"""
Get the subplot geometry (``n_rows, n_cols, start, stop``).
start and stop are the index of the start and stop of the
subplot.
"""
rows, cols = self.get_gridspec().get_geometry()
return rows, cols, self.num1, self.num2
def get_rows_columns(self):
"""
Get the subplot row and column numbers:
(``n_rows, n_cols, row_start, row_stop, col_start, col_stop``)
"""
gridspec = self.get_gridspec()
nrows, ncols = gridspec.get_geometry()
row_start, col_start = divmod(self.num1, ncols)
if self.num2 is not None:
row_stop, col_stop = divmod(self.num2, ncols)
else:
row_stop = row_start
col_stop = col_start
return nrows, ncols, row_start, row_stop, col_start, col_stop
def get_position(self, figure, return_all=False):
"""Update the subplot position from ``figure.subplotpars``.
"""
gridspec = self.get_gridspec()
nrows, ncols = gridspec.get_geometry()
rows, cols = np.unravel_index(
[self.num1] if self.num2 is None else [self.num1, self.num2],
(nrows, ncols))
fig_bottoms, fig_tops, fig_lefts, fig_rights = \
gridspec.get_grid_positions(figure)
fig_bottom = fig_bottoms[rows].min()
fig_top = fig_tops[rows].max()
fig_left = fig_lefts[cols].min()
fig_right = fig_rights[cols].max()
figbox = Bbox.from_extents(fig_left, fig_bottom, fig_right, fig_top)
if return_all:
return figbox, rows[0], cols[0], nrows, ncols
else:
return figbox
def get_topmost_subplotspec(self):
'get the topmost SubplotSpec instance associated with the subplot'
gridspec = self.get_gridspec()
if hasattr(gridspec, "get_topmost_subplotspec"):
return gridspec.get_topmost_subplotspec()
else:
return self
def __eq__(self, other):
# other may not even have the attributes we are checking.
return ((self._gridspec, self.num1, self.num2)
== (getattr(other, "_gridspec", object()),
getattr(other, "num1", object()),
getattr(other, "num2", object())))
if six.PY2:
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self._gridspec, self.num1, self.num2))
| 18,123 | 35.320641 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/mlab.py
|
"""
Numerical python functions written for compatibility with MATLAB
commands with the same names.
MATLAB compatible functions
---------------------------
:func:`cohere`
Coherence (normalized cross spectral density)
:func:`csd`
Cross spectral density using Welch's average periodogram
:func:`detrend`
Remove the mean or best fit line from an array
:func:`find`
Return the indices where some condition is true;
numpy.nonzero is similar but more general.
:func:`griddata`
Interpolate irregularly distributed data to a
regular grid.
:func:`prctile`
Find the percentiles of a sequence
:func:`prepca`
Principal Component Analysis
:func:`psd`
Power spectral density using Welch's average periodogram
:func:`rk4`
A 4th order runge kutta integrator for 1D or ND systems
:func:`specgram`
Spectrogram (spectrum over segments of time)
Miscellaneous functions
-----------------------
Functions that don't exist in MATLAB, but are useful anyway:
:func:`cohere_pairs`
Coherence over all pairs. This is not a MATLAB function, but we
compute coherence a lot in my lab, and we compute it for a lot of
pairs. This function is optimized to do this efficiently by
caching the direct FFTs.
:func:`rk4`
A 4th order Runge-Kutta ODE integrator in case you ever find
yourself stranded without scipy (and the far superior
scipy.integrate tools)
:func:`contiguous_regions`
Return the indices of the regions spanned by some logical mask
:func:`cross_from_below`
Return the indices where a 1D array crosses a threshold from below
:func:`cross_from_above`
Return the indices where a 1D array crosses a threshold from above
:func:`complex_spectrum`
Return the complex-valued frequency spectrum of a signal
:func:`magnitude_spectrum`
Return the magnitude of the frequency spectrum of a signal
:func:`angle_spectrum`
Return the angle (wrapped phase) of the frequency spectrum of a signal
:func:`phase_spectrum`
Return the phase (unwrapped angle) of the frequency spectrum of a signal
:func:`detrend_mean`
Remove the mean from a line.
:func:`demean`
Remove the mean from a line. This function is the same as
:func:`detrend_mean` except for the default *axis*.
:func:`detrend_linear`
Remove the best fit line from a line.
:func:`detrend_none`
Return the original line.
:func:`stride_windows`
Get all windows in an array in a memory-efficient manner
:func:`stride_repeat`
Repeat an array in a memory-efficient manner
:func:`apply_window`
Apply a window along a given axis
record array helper functions
-----------------------------
A collection of helper methods for numpyrecord arrays
.. _htmlonly:
See :ref:`misc-examples-index`
:func:`rec2txt`
Pretty print a record array
:func:`rec2csv`
Store record array in CSV file
:func:`csv2rec`
Import record array from CSV file with type inspection
:func:`rec_append_fields`
Adds field(s)/array(s) to record array
:func:`rec_drop_fields`
Drop fields from record array
:func:`rec_join`
Join two record arrays on sequence of fields
:func:`recs_join`
A simple join of multiple recarrays using a single column as a key
:func:`rec_groupby`
Summarize data by groups (similar to SQL GROUP BY)
:func:`rec_summarize`
Helper code to filter rec array fields into new fields
For the rec viewer functions(e rec2csv), there are a bunch of Format
objects you can pass into the functions that will do things like color
negative values red, set percent formatting and scaling, etc.
Example usage::
r = csv2rec('somefile.csv', checkrows=0)
formatd = dict(
weight = FormatFloat(2),
change = FormatPercent(2),
cost = FormatThousands(2),
)
rec2excel(r, 'test.xls', formatd=formatd)
rec2csv(r, 'test.csv', formatd=formatd)
scroll = rec2gtk(r, formatd=formatd)
win = gtk.Window()
win.set_size_request(600,800)
win.add(scroll)
win.show_all()
gtk.main()
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import map, xrange, zip
import copy
import csv
import operator
import os
import warnings
import numpy as np
import matplotlib.cbook as cbook
from matplotlib import docstring
from matplotlib.path import Path
import math
if six.PY3:
long = int
@cbook.deprecated("2.2", alternative='numpy.logspace or numpy.geomspace')
def logspace(xmin, xmax, N):
'''
Return N values logarithmically spaced between xmin and xmax.
'''
return np.exp(np.linspace(np.log(xmin), np.log(xmax), N))
def window_hanning(x):
'''
Return x times the hanning window of len(x).
See Also
--------
:func:`window_none`
:func:`window_none` is another window algorithm.
'''
return np.hanning(len(x))*x
def window_none(x):
'''
No window function; simply return x.
See Also
--------
:func:`window_hanning`
:func:`window_hanning` is another window algorithm.
'''
return x
def apply_window(x, window, axis=0, return_window=None):
'''
Apply the given window to the given 1D or 2D array along the given axis.
Parameters
----------
x : 1D or 2D array or sequence
Array or sequence containing the data.
window : function or array.
Either a function to generate a window or an array with length
*x*.shape[*axis*]
axis : integer
The axis over which to do the repetition.
Must be 0 or 1. The default is 0
return_window : bool
If true, also return the 1D values of the window that was applied
'''
x = np.asarray(x)
if x.ndim < 1 or x.ndim > 2:
raise ValueError('only 1D or 2D arrays can be used')
if axis+1 > x.ndim:
raise ValueError('axis(=%s) out of bounds' % axis)
xshape = list(x.shape)
xshapetarg = xshape.pop(axis)
if cbook.iterable(window):
if len(window) != xshapetarg:
raise ValueError('The len(window) must be the same as the shape '
'of x for the chosen axis')
windowVals = window
else:
windowVals = window(np.ones(xshapetarg, dtype=x.dtype))
if x.ndim == 1:
if return_window:
return windowVals * x, windowVals
else:
return windowVals * x
xshapeother = xshape.pop()
otheraxis = (axis+1) % 2
windowValsRep = stride_repeat(windowVals, xshapeother, axis=otheraxis)
if return_window:
return windowValsRep * x, windowVals
else:
return windowValsRep * x
def detrend(x, key=None, axis=None):
'''
Return x with its trend removed.
Parameters
----------
x : array or sequence
Array or sequence containing the data.
key : [ 'default' | 'constant' | 'mean' | 'linear' | 'none'] or function
Specifies the detrend algorithm to use. 'default' is 'mean', which is
the same as :func:`detrend_mean`. 'constant' is the same. 'linear' is
the same as :func:`detrend_linear`. 'none' is the same as
:func:`detrend_none`. The default is 'mean'. See the corresponding
functions for more details regarding the algorithms. Can also be a
function that carries out the detrend operation.
axis : integer
The axis along which to do the detrending.
See Also
--------
:func:`detrend_mean`
:func:`detrend_mean` implements the 'mean' algorithm.
:func:`detrend_linear`
:func:`detrend_linear` implements the 'linear' algorithm.
:func:`detrend_none`
:func:`detrend_none` implements the 'none' algorithm.
'''
if key is None or key in ['constant', 'mean', 'default']:
return detrend(x, key=detrend_mean, axis=axis)
elif key == 'linear':
return detrend(x, key=detrend_linear, axis=axis)
elif key == 'none':
return detrend(x, key=detrend_none, axis=axis)
elif isinstance(key, six.string_types):
raise ValueError("Unknown value for key %s, must be one of: "
"'default', 'constant', 'mean', "
"'linear', or a function" % key)
if not callable(key):
raise ValueError("Unknown value for key %s, must be one of: "
"'default', 'constant', 'mean', "
"'linear', or a function" % key)
x = np.asarray(x)
if axis is not None and axis+1 > x.ndim:
raise ValueError('axis(=%s) out of bounds' % axis)
if (axis is None and x.ndim == 0) or (not axis and x.ndim == 1):
return key(x)
# try to use the 'axis' argument if the function supports it,
# otherwise use apply_along_axis to do it
try:
return key(x, axis=axis)
except TypeError:
return np.apply_along_axis(key, axis=axis, arr=x)
def demean(x, axis=0):
'''
Return x minus its mean along the specified axis.
Parameters
----------
x : array or sequence
Array or sequence containing the data
Can have any dimensionality
axis : integer
The axis along which to take the mean. See numpy.mean for a
description of this argument.
See Also
--------
:func:`delinear`
:func:`denone`
:func:`delinear` and :func:`denone` are other detrend algorithms.
:func:`detrend_mean`
This function is the same as :func:`detrend_mean` except for the
default *axis*.
'''
return detrend_mean(x, axis=axis)
def detrend_mean(x, axis=None):
'''
Return x minus the mean(x).
Parameters
----------
x : array or sequence
Array or sequence containing the data
Can have any dimensionality
axis : integer
The axis along which to take the mean. See numpy.mean for a
description of this argument.
See Also
--------
:func:`demean`
This function is the same as :func:`demean` except for the default
*axis*.
:func:`detrend_linear`
:func:`detrend_none`
:func:`detrend_linear` and :func:`detrend_none` are other detrend
algorithms.
:func:`detrend`
:func:`detrend` is a wrapper around all the detrend algorithms.
'''
x = np.asarray(x)
if axis is not None and axis+1 > x.ndim:
raise ValueError('axis(=%s) out of bounds' % axis)
# short-circuit 0-D array.
if not x.ndim:
return np.array(0., dtype=x.dtype)
# short-circuit simple operations
if axis == 0 or axis is None or x.ndim <= 1:
return x - x.mean(axis)
ind = [slice(None)] * x.ndim
ind[axis] = np.newaxis
return x - x.mean(axis)[ind]
def detrend_none(x, axis=None):
'''
Return x: no detrending.
Parameters
----------
x : any object
An object containing the data
axis : integer
This parameter is ignored.
It is included for compatibility with detrend_mean
See Also
--------
:func:`denone`
This function is the same as :func:`denone` except for the default
*axis*, which has no effect.
:func:`detrend_mean`
:func:`detrend_linear`
:func:`detrend_mean` and :func:`detrend_linear` are other detrend
algorithms.
:func:`detrend`
:func:`detrend` is a wrapper around all the detrend algorithms.
'''
return x
def detrend_linear(y):
'''
Return x minus best fit line; 'linear' detrending.
Parameters
----------
y : 0-D or 1-D array or sequence
Array or sequence containing the data
axis : integer
The axis along which to take the mean. See numpy.mean for a
description of this argument.
See Also
--------
:func:`delinear`
This function is the same as :func:`delinear` except for the default
*axis*.
:func:`detrend_mean`
:func:`detrend_none`
:func:`detrend_mean` and :func:`detrend_none` are other detrend
algorithms.
:func:`detrend`
:func:`detrend` is a wrapper around all the detrend algorithms.
'''
# This is faster than an algorithm based on linalg.lstsq.
y = np.asarray(y)
if y.ndim > 1:
raise ValueError('y cannot have ndim > 1')
# short-circuit 0-D array.
if not y.ndim:
return np.array(0., dtype=y.dtype)
x = np.arange(y.size, dtype=float)
C = np.cov(x, y, bias=1)
b = C[0, 1]/C[0, 0]
a = y.mean() - b*x.mean()
return y - (b*x + a)
def stride_windows(x, n, noverlap=None, axis=0):
'''
Get all windows of x with length n as a single array,
using strides to avoid data duplication.
.. warning::
It is not safe to write to the output array. Multiple
elements may point to the same piece of memory,
so modifying one value may change others.
Parameters
----------
x : 1D array or sequence
Array or sequence containing the data.
n : integer
The number of data points in each window.
noverlap : integer
The overlap between adjacent windows.
Default is 0 (no overlap)
axis : integer
The axis along which the windows will run.
References
----------
`stackoverflow: Rolling window for 1D arrays in Numpy?
<http://stackoverflow.com/a/6811241>`_
`stackoverflow: Using strides for an efficient moving average filter
<http://stackoverflow.com/a/4947453>`_
'''
if noverlap is None:
noverlap = 0
if noverlap >= n:
raise ValueError('noverlap must be less than n')
if n < 1:
raise ValueError('n cannot be less than 1')
x = np.asarray(x)
if x.ndim != 1:
raise ValueError('only 1-dimensional arrays can be used')
if n == 1 and noverlap == 0:
if axis == 0:
return x[np.newaxis]
else:
return x[np.newaxis].transpose()
if n > x.size:
raise ValueError('n cannot be greater than the length of x')
# np.lib.stride_tricks.as_strided easily leads to memory corruption for
# non integer shape and strides, i.e. noverlap or n. See #3845.
noverlap = int(noverlap)
n = int(n)
step = n - noverlap
if axis == 0:
shape = (n, (x.shape[-1]-noverlap)//step)
strides = (x.strides[0], step*x.strides[0])
else:
shape = ((x.shape[-1]-noverlap)//step, n)
strides = (step*x.strides[0], x.strides[0])
return np.lib.stride_tricks.as_strided(x, shape=shape, strides=strides)
def stride_repeat(x, n, axis=0):
'''
Repeat the values in an array in a memory-efficient manner. Array x is
stacked vertically n times.
.. warning::
It is not safe to write to the output array. Multiple
elements may point to the same piece of memory, so
modifying one value may change others.
Parameters
----------
x : 1D array or sequence
Array or sequence containing the data.
n : integer
The number of time to repeat the array.
axis : integer
The axis along which the data will run.
References
----------
`stackoverflow: Repeat NumPy array without replicating data?
<http://stackoverflow.com/a/5568169>`_
'''
if axis not in [0, 1]:
raise ValueError('axis must be 0 or 1')
x = np.asarray(x)
if x.ndim != 1:
raise ValueError('only 1-dimensional arrays can be used')
if n == 1:
if axis == 0:
return np.atleast_2d(x)
else:
return np.atleast_2d(x).T
if n < 1:
raise ValueError('n cannot be less than 1')
# np.lib.stride_tricks.as_strided easily leads to memory corruption for
# non integer shape and strides, i.e. n. See #3845.
n = int(n)
if axis == 0:
shape = (n, x.size)
strides = (0, x.strides[0])
else:
shape = (x.size, n)
strides = (x.strides[0], 0)
return np.lib.stride_tricks.as_strided(x, shape=shape, strides=strides)
def _spectral_helper(x, y=None, NFFT=None, Fs=None, detrend_func=None,
window=None, noverlap=None, pad_to=None,
sides=None, scale_by_freq=None, mode=None):
'''
This is a helper function that implements the commonality between the
psd, csd, spectrogram and complex, magnitude, angle, and phase spectrums.
It is *NOT* meant to be used outside of mlab and may change at any time.
'''
if y is None:
# if y is None use x for y
same_data = True
else:
# The checks for if y is x are so that we can use the same function to
# implement the core of psd(), csd(), and spectrogram() without doing
# extra calculations. We return the unaveraged Pxy, freqs, and t.
same_data = y is x
if Fs is None:
Fs = 2
if noverlap is None:
noverlap = 0
if detrend_func is None:
detrend_func = detrend_none
if window is None:
window = window_hanning
# if NFFT is set to None use the whole signal
if NFFT is None:
NFFT = 256
if mode is None or mode == 'default':
mode = 'psd'
elif mode not in ['psd', 'complex', 'magnitude', 'angle', 'phase']:
raise ValueError("Unknown value for mode %s, must be one of: "
"'default', 'psd', 'complex', "
"'magnitude', 'angle', 'phase'" % mode)
if not same_data and mode != 'psd':
raise ValueError("x and y must be equal if mode is not 'psd'")
# Make sure we're dealing with a numpy array. If y and x were the same
# object to start with, keep them that way
x = np.asarray(x)
if not same_data:
y = np.asarray(y)
if sides is None or sides == 'default':
if np.iscomplexobj(x):
sides = 'twosided'
else:
sides = 'onesided'
elif sides not in ['onesided', 'twosided']:
raise ValueError("Unknown value for sides %s, must be one of: "
"'default', 'onesided', or 'twosided'" % sides)
# zero pad x and y up to NFFT if they are shorter than NFFT
if len(x) < NFFT:
n = len(x)
x = np.resize(x, (NFFT,))
x[n:] = 0
if not same_data and len(y) < NFFT:
n = len(y)
y = np.resize(y, (NFFT,))
y[n:] = 0
if pad_to is None:
pad_to = NFFT
if mode != 'psd':
scale_by_freq = False
elif scale_by_freq is None:
scale_by_freq = True
# For real x, ignore the negative frequencies unless told otherwise
if sides == 'twosided':
numFreqs = pad_to
if pad_to % 2:
freqcenter = (pad_to - 1)//2 + 1
else:
freqcenter = pad_to//2
scaling_factor = 1.
elif sides == 'onesided':
if pad_to % 2:
numFreqs = (pad_to + 1)//2
else:
numFreqs = pad_to//2 + 1
scaling_factor = 2.
result = stride_windows(x, NFFT, noverlap, axis=0)
result = detrend(result, detrend_func, axis=0)
result, windowVals = apply_window(result, window, axis=0,
return_window=True)
result = np.fft.fft(result, n=pad_to, axis=0)[:numFreqs, :]
freqs = np.fft.fftfreq(pad_to, 1/Fs)[:numFreqs]
if not same_data:
# if same_data is False, mode must be 'psd'
resultY = stride_windows(y, NFFT, noverlap)
resultY = detrend(resultY, detrend_func, axis=0)
resultY = apply_window(resultY, window, axis=0)
resultY = np.fft.fft(resultY, n=pad_to, axis=0)[:numFreqs, :]
result = np.conj(result) * resultY
elif mode == 'psd':
result = np.conj(result) * result
elif mode == 'magnitude':
result = np.abs(result) / np.abs(windowVals).sum()
elif mode == 'angle' or mode == 'phase':
# we unwrap the phase later to handle the onesided vs. twosided case
result = np.angle(result)
elif mode == 'complex':
result /= np.abs(windowVals).sum()
if mode == 'psd':
# Also include scaling factors for one-sided densities and dividing by
# the sampling frequency, if desired. Scale everything, except the DC
# component and the NFFT/2 component:
# if we have a even number of frequencies, don't scale NFFT/2
if not NFFT % 2:
slc = slice(1, -1, None)
# if we have an odd number, just don't scale DC
else:
slc = slice(1, None, None)
result[slc] *= scaling_factor
# MATLAB divides by the sampling frequency so that density function
# has units of dB/Hz and can be integrated by the plotted frequency
# values. Perform the same scaling here.
if scale_by_freq:
result /= Fs
# Scale the spectrum by the norm of the window to compensate for
# windowing loss; see Bendat & Piersol Sec 11.5.2.
result /= (np.abs(windowVals)**2).sum()
else:
# In this case, preserve power in the segment, not amplitude
result /= np.abs(windowVals).sum()**2
t = np.arange(NFFT/2, len(x) - NFFT/2 + 1, NFFT - noverlap)/Fs
if sides == 'twosided':
# center the frequency range at zero
freqs = np.concatenate((freqs[freqcenter:], freqs[:freqcenter]))
result = np.concatenate((result[freqcenter:, :],
result[:freqcenter, :]), 0)
elif not pad_to % 2:
# get the last value correctly, it is negative otherwise
freqs[-1] *= -1
# we unwrap the phase here to handle the onesided vs. twosided case
if mode == 'phase':
result = np.unwrap(result, axis=0)
return result, freqs, t
def _single_spectrum_helper(x, mode, Fs=None, window=None, pad_to=None,
sides=None):
'''
This is a helper function that implements the commonality between the
complex, magnitude, angle, and phase spectrums.
It is *NOT* meant to be used outside of mlab and may change at any time.
'''
if mode is None or mode == 'psd' or mode == 'default':
raise ValueError('_single_spectrum_helper does not work with %s mode'
% mode)
if pad_to is None:
pad_to = len(x)
spec, freqs, _ = _spectral_helper(x=x, y=None, NFFT=len(x), Fs=Fs,
detrend_func=detrend_none, window=window,
noverlap=0, pad_to=pad_to,
sides=sides,
scale_by_freq=False,
mode=mode)
if mode != 'complex':
spec = spec.real
if spec.ndim == 2 and spec.shape[1] == 1:
spec = spec[:, 0]
return spec, freqs
# Split out these keyword docs so that they can be used elsewhere
docstring.interpd.update(Spectral=cbook.dedent("""
Fs : scalar
The sampling frequency (samples per time unit). It is used
to calculate the Fourier frequencies, freqs, in cycles per time
unit. The default value is 2.
window : callable or ndarray
A function or a vector of length *NFFT*. To create window
vectors see :func:`window_hanning`, :func:`window_none`,
:func:`numpy.blackman`, :func:`numpy.hamming`,
:func:`numpy.bartlett`, :func:`scipy.signal`,
:func:`scipy.signal.get_window`, etc. The default is
:func:`window_hanning`. If a function is passed as the
argument, it must take a data segment as an argument and
return the windowed version of the segment.
sides : [ 'default' | 'onesided' | 'twosided' ]
Specifies which sides of the spectrum to return. Default gives the
default behavior, which returns one-sided for real data and both
for complex data. 'onesided' forces the return of a one-sided
spectrum, while 'twosided' forces two-sided.
"""))
docstring.interpd.update(Single_Spectrum=cbook.dedent("""
pad_to : integer
The number of points to which the data segment is padded when
performing the FFT. While not increasing the actual resolution of
the spectrum (the minimum distance between resolvable peaks),
this can give more points in the plot, allowing for more
detail. This corresponds to the *n* parameter in the call to fft().
The default is None, which sets *pad_to* equal to the length of the
input signal (i.e. no padding).
"""))
docstring.interpd.update(PSD=cbook.dedent("""
pad_to : integer
The number of points to which the data segment is padded when
performing the FFT. This can be different from *NFFT*, which
specifies the number of data points used. While not increasing
the actual resolution of the spectrum (the minimum distance between
resolvable peaks), this can give more points in the plot,
allowing for more detail. This corresponds to the *n* parameter
in the call to fft(). The default is None, which sets *pad_to*
equal to *NFFT*
NFFT : integer
The number of data points used in each block for the FFT.
A power 2 is most efficient. The default value is 256.
This should *NOT* be used to get zero padding, or the scaling of the
result will be incorrect. Use *pad_to* for this instead.
detrend : {'default', 'constant', 'mean', 'linear', 'none'} or callable
The function applied to each segment before fft-ing,
designed to remove the mean or linear trend. Unlike in
MATLAB, where the *detrend* parameter is a vector, in
matplotlib is it a function. The :mod:`~matplotlib.pylab`
module defines :func:`~matplotlib.pylab.detrend_none`,
:func:`~matplotlib.pylab.detrend_mean`, and
:func:`~matplotlib.pylab.detrend_linear`, but you can use
a custom function as well. You can also use a string to choose
one of the functions. 'default', 'constant', and 'mean' call
:func:`~matplotlib.pylab.detrend_mean`. 'linear' calls
:func:`~matplotlib.pylab.detrend_linear`. 'none' calls
:func:`~matplotlib.pylab.detrend_none`.
scale_by_freq : boolean, optional
Specifies whether the resulting density values should be scaled
by the scaling frequency, which gives density in units of Hz^-1.
This allows for integration over the returned frequency values.
The default is True for MATLAB compatibility.
"""))
@docstring.dedent_interpd
def psd(x, NFFT=None, Fs=None, detrend=None, window=None,
noverlap=None, pad_to=None, sides=None, scale_by_freq=None):
r"""
Compute the power spectral density.
Call signature::
psd(x, NFFT=256, Fs=2, detrend=mlab.detrend_none,
window=mlab.window_hanning, noverlap=0, pad_to=None,
sides='default', scale_by_freq=None)
The power spectral density :math:`P_{xx}` by Welch's average
periodogram method. The vector *x* is divided into *NFFT* length
segments. Each segment is detrended by function *detrend* and
windowed by function *window*. *noverlap* gives the length of
the overlap between segments. The :math:`|\mathrm{fft}(i)|^2`
of each segment :math:`i` are averaged to compute :math:`P_{xx}`.
If len(*x*) < *NFFT*, it will be zero padded to *NFFT*.
Parameters
----------
x : 1-D array or sequence
Array or sequence containing the data
%(Spectral)s
%(PSD)s
noverlap : integer
The number of points of overlap between segments.
The default value is 0 (no overlap).
Returns
-------
Pxx : 1-D array
The values for the power spectrum `P_{xx}` (real valued)
freqs : 1-D array
The frequencies corresponding to the elements in *Pxx*
References
----------
Bendat & Piersol -- Random Data: Analysis and Measurement Procedures, John
Wiley & Sons (1986)
See Also
--------
:func:`specgram`
:func:`specgram` differs in the default overlap; in not returning the
mean of the segment periodograms; and in returning the times of the
segments.
:func:`magnitude_spectrum`
:func:`magnitude_spectrum` returns the magnitude spectrum.
:func:`csd`
:func:`csd` returns the spectral density between two signals.
"""
Pxx, freqs = csd(x=x, y=None, NFFT=NFFT, Fs=Fs, detrend=detrend,
window=window, noverlap=noverlap, pad_to=pad_to,
sides=sides, scale_by_freq=scale_by_freq)
return Pxx.real, freqs
@docstring.dedent_interpd
def csd(x, y, NFFT=None, Fs=None, detrend=None, window=None,
noverlap=None, pad_to=None, sides=None, scale_by_freq=None):
"""
Compute the cross-spectral density.
Call signature::
csd(x, y, NFFT=256, Fs=2, detrend=mlab.detrend_none,
window=mlab.window_hanning, noverlap=0, pad_to=None,
sides='default', scale_by_freq=None)
The cross spectral density :math:`P_{xy}` by Welch's average
periodogram method. The vectors *x* and *y* are divided into
*NFFT* length segments. Each segment is detrended by function
*detrend* and windowed by function *window*. *noverlap* gives
the length of the overlap between segments. The product of
the direct FFTs of *x* and *y* are averaged over each segment
to compute :math:`P_{xy}`, with a scaling to correct for power
loss due to windowing.
If len(*x*) < *NFFT* or len(*y*) < *NFFT*, they will be zero
padded to *NFFT*.
Parameters
----------
x, y : 1-D arrays or sequences
Arrays or sequences containing the data
%(Spectral)s
%(PSD)s
noverlap : integer
The number of points of overlap between segments.
The default value is 0 (no overlap).
Returns
-------
Pxy : 1-D array
The values for the cross spectrum `P_{xy}` before scaling (real valued)
freqs : 1-D array
The frequencies corresponding to the elements in *Pxy*
References
----------
Bendat & Piersol -- Random Data: Analysis and Measurement Procedures, John
Wiley & Sons (1986)
See Also
--------
:func:`psd`
:func:`psd` is the equivalent to setting y=x.
"""
if NFFT is None:
NFFT = 256
Pxy, freqs, _ = _spectral_helper(x=x, y=y, NFFT=NFFT, Fs=Fs,
detrend_func=detrend, window=window,
noverlap=noverlap, pad_to=pad_to,
sides=sides, scale_by_freq=scale_by_freq,
mode='psd')
if Pxy.ndim == 2:
if Pxy.shape[1] > 1:
Pxy = Pxy.mean(axis=1)
else:
Pxy = Pxy[:, 0]
return Pxy, freqs
@docstring.dedent_interpd
def complex_spectrum(x, Fs=None, window=None, pad_to=None,
sides=None):
"""
Compute the complex-valued frequency spectrum of *x*. Data is padded to a
length of *pad_to* and the windowing function *window* is applied to the
signal.
Parameters
----------
x : 1-D array or sequence
Array or sequence containing the data
%(Spectral)s
%(Single_Spectrum)s
Returns
-------
spectrum : 1-D array
The values for the complex spectrum (complex valued)
freqs : 1-D array
The frequencies corresponding to the elements in *spectrum*
See Also
--------
:func:`magnitude_spectrum`
:func:`magnitude_spectrum` returns the absolute value of this function.
:func:`angle_spectrum`
:func:`angle_spectrum` returns the angle of this function.
:func:`phase_spectrum`
:func:`phase_spectrum` returns the phase (unwrapped angle) of this
function.
:func:`specgram`
:func:`specgram` can return the complex spectrum of segments within the
signal.
"""
return _single_spectrum_helper(x=x, Fs=Fs, window=window, pad_to=pad_to,
sides=sides, mode='complex')
@docstring.dedent_interpd
def magnitude_spectrum(x, Fs=None, window=None, pad_to=None,
sides=None):
"""
Compute the magnitude (absolute value) of the frequency spectrum of
*x*. Data is padded to a length of *pad_to* and the windowing function
*window* is applied to the signal.
Parameters
----------
x : 1-D array or sequence
Array or sequence containing the data
%(Spectral)s
%(Single_Spectrum)s
Returns
-------
spectrum : 1-D array
The values for the magnitude spectrum (real valued)
freqs : 1-D array
The frequencies corresponding to the elements in *spectrum*
See Also
--------
:func:`psd`
:func:`psd` returns the power spectral density.
:func:`complex_spectrum`
This function returns the absolute value of :func:`complex_spectrum`.
:func:`angle_spectrum`
:func:`angle_spectrum` returns the angles of the corresponding
frequencies.
:func:`phase_spectrum`
:func:`phase_spectrum` returns the phase (unwrapped angle) of the
corresponding frequencies.
:func:`specgram`
:func:`specgram` can return the magnitude spectrum of segments within
the signal.
"""
return _single_spectrum_helper(x=x, Fs=Fs, window=window, pad_to=pad_to,
sides=sides, mode='magnitude')
@docstring.dedent_interpd
def angle_spectrum(x, Fs=None, window=None, pad_to=None,
sides=None):
"""
Compute the angle of the frequency spectrum (wrapped phase spectrum) of
*x*. Data is padded to a length of *pad_to* and the windowing function
*window* is applied to the signal.
Parameters
----------
x : 1-D array or sequence
Array or sequence containing the data
%(Spectral)s
%(Single_Spectrum)s
Returns
-------
spectrum : 1-D array
The values for the angle spectrum in radians (real valued)
freqs : 1-D array
The frequencies corresponding to the elements in *spectrum*
See Also
--------
:func:`complex_spectrum`
This function returns the angle value of :func:`complex_spectrum`.
:func:`magnitude_spectrum`
:func:`angle_spectrum` returns the magnitudes of the corresponding
frequencies.
:func:`phase_spectrum`
:func:`phase_spectrum` returns the unwrapped version of this function.
:func:`specgram`
:func:`specgram` can return the angle spectrum of segments within the
signal.
"""
return _single_spectrum_helper(x=x, Fs=Fs, window=window, pad_to=pad_to,
sides=sides, mode='angle')
@docstring.dedent_interpd
def phase_spectrum(x, Fs=None, window=None, pad_to=None,
sides=None):
"""
Compute the phase of the frequency spectrum (unwrapped angle spectrum) of
*x*. Data is padded to a length of *pad_to* and the windowing function
*window* is applied to the signal.
Parameters
----------
x : 1-D array or sequence
Array or sequence containing the data
%(Spectral)s
%(Single_Spectrum)s
Returns
-------
spectrum : 1-D array
The values for the phase spectrum in radians (real valued)
freqs : 1-D array
The frequencies corresponding to the elements in *spectrum*
See Also
--------
:func:`complex_spectrum`
This function returns the angle value of :func:`complex_spectrum`.
:func:`magnitude_spectrum`
:func:`magnitude_spectrum` returns the magnitudes of the corresponding
frequencies.
:func:`angle_spectrum`
:func:`angle_spectrum` returns the wrapped version of this function.
:func:`specgram`
:func:`specgram` can return the phase spectrum of segments within the
signal.
"""
return _single_spectrum_helper(x=x, Fs=Fs, window=window, pad_to=pad_to,
sides=sides, mode='phase')
@docstring.dedent_interpd
def specgram(x, NFFT=None, Fs=None, detrend=None, window=None,
noverlap=None, pad_to=None, sides=None, scale_by_freq=None,
mode=None):
"""
Compute a spectrogram.
Compute and plot a spectrogram of data in x. Data are split into
NFFT length segments and the spectrum of each section is
computed. The windowing function window is applied to each
segment, and the amount of overlap of each segment is
specified with noverlap.
Parameters
----------
x : array_like
1-D array or sequence.
%(Spectral)s
%(PSD)s
noverlap : int, optional
The number of points of overlap between blocks. The default
value is 128.
mode : str, optional
What sort of spectrum to use, default is 'psd'.
'psd'
Returns the power spectral density.
'complex'
Returns the complex-valued frequency spectrum.
'magnitude'
Returns the magnitude spectrum.
'angle'
Returns the phase spectrum without unwrapping.
'phase'
Returns the phase spectrum with unwrapping.
Returns
-------
spectrum : array_like
2-D array, columns are the periodograms of successive segments.
freqs : array_like
1-D array, frequencies corresponding to the rows in *spectrum*.
t : array_like
1-D array, the times corresponding to midpoints of segments
(i.e the columns in *spectrum*).
See Also
--------
psd : differs in the overlap and in the return values.
complex_spectrum : similar, but with complex valued frequencies.
magnitude_spectrum : similar single segment when mode is 'magnitude'.
angle_spectrum : similar to single segment when mode is 'angle'.
phase_spectrum : similar to single segment when mode is 'phase'.
Notes
-----
detrend and scale_by_freq only apply when *mode* is set to 'psd'.
"""
if noverlap is None:
noverlap = 128 # default in _spectral_helper() is noverlap = 0
if NFFT is None:
NFFT = 256 # same default as in _spectral_helper()
if len(x) <= NFFT:
warnings.warn("Only one segment is calculated since parameter NFFT " +
"(=%d) >= signal length (=%d)." % (NFFT, len(x)))
spec, freqs, t = _spectral_helper(x=x, y=None, NFFT=NFFT, Fs=Fs,
detrend_func=detrend, window=window,
noverlap=noverlap, pad_to=pad_to,
sides=sides,
scale_by_freq=scale_by_freq,
mode=mode)
if mode != 'complex':
spec = spec.real # Needed since helper implements generically
return spec, freqs, t
_coh_error = """Coherence is calculated by averaging over *NFFT*
length segments. Your signal is too short for your choice of *NFFT*.
"""
@docstring.dedent_interpd
def cohere(x, y, NFFT=256, Fs=2, detrend=detrend_none, window=window_hanning,
noverlap=0, pad_to=None, sides='default', scale_by_freq=None):
"""
The coherence between *x* and *y*. Coherence is the normalized
cross spectral density:
.. math::
C_{xy} = \\frac{|P_{xy}|^2}{P_{xx}P_{yy}}
Parameters
----------
x, y
Array or sequence containing the data
%(Spectral)s
%(PSD)s
noverlap : integer
The number of points of overlap between blocks. The default value
is 0 (no overlap).
Returns
-------
The return value is the tuple (*Cxy*, *f*), where *f* are the
frequencies of the coherence vector. For cohere, scaling the
individual densities by the sampling frequency has no effect,
since the factors cancel out.
See Also
--------
:func:`psd`, :func:`csd` :
For information about the methods used to compute :math:`P_{xy}`,
:math:`P_{xx}` and :math:`P_{yy}`.
"""
if len(x) < 2 * NFFT:
raise ValueError(_coh_error)
Pxx, f = psd(x, NFFT, Fs, detrend, window, noverlap, pad_to, sides,
scale_by_freq)
Pyy, f = psd(y, NFFT, Fs, detrend, window, noverlap, pad_to, sides,
scale_by_freq)
Pxy, f = csd(x, y, NFFT, Fs, detrend, window, noverlap, pad_to, sides,
scale_by_freq)
Cxy = np.abs(Pxy) ** 2 / (Pxx * Pyy)
return Cxy, f
@cbook.deprecated('2.2')
def donothing_callback(*args):
pass
@cbook.deprecated('2.2', 'scipy.signal.coherence')
def cohere_pairs(X, ij, NFFT=256, Fs=2, detrend=detrend_none,
window=window_hanning, noverlap=0,
preferSpeedOverMemory=True,
progressCallback=donothing_callback,
returnPxx=False):
"""
Compute the coherence and phase for all pairs *ij*, in *X*.
*X* is a *numSamples* * *numCols* array
*ij* is a list of tuples. Each tuple is a pair of indexes into
the columns of X for which you want to compute coherence. For
example, if *X* has 64 columns, and you want to compute all
nonredundant pairs, define *ij* as::
ij = []
for i in range(64):
for j in range(i+1,64):
ij.append( (i,j) )
*preferSpeedOverMemory* is an optional bool. Defaults to true. If
False, limits the caching by only making one, rather than two,
complex cache arrays. This is useful if memory becomes critical.
Even when *preferSpeedOverMemory* is False, :func:`cohere_pairs`
will still give significant performance gains over calling
:func:`cohere` for each pair, and will use subtantially less
memory than if *preferSpeedOverMemory* is True. In my tests with
a 43000,64 array over all nonredundant pairs,
*preferSpeedOverMemory* = True delivered a 33% performance boost
on a 1.7GHZ Athlon with 512MB RAM compared with
*preferSpeedOverMemory* = False. But both solutions were more
than 10x faster than naively crunching all possible pairs through
:func:`cohere`.
Returns
-------
Cxy : dictionary of (*i*, *j*) tuples -> coherence vector for
that pair. i.e., ``Cxy[(i,j) = cohere(X[:,i], X[:,j])``.
Number of dictionary keys is ``len(ij)``.
Phase : dictionary of phases of the cross spectral density at
each frequency for each pair. Keys are (*i*, *j*).
freqs : vector of frequencies, equal in length to either the
coherence or phase vectors for any (*i*, *j*) key.
e.g., to make a coherence Bode plot::
subplot(211)
plot( freqs, Cxy[(12,19)])
subplot(212)
plot( freqs, Phase[(12,19)])
For a large number of pairs, :func:`cohere_pairs` can be much more
efficient than just calling :func:`cohere` for each pair, because
it caches most of the intensive computations. If :math:`N` is the
number of pairs, this function is :math:`O(N)` for most of the
heavy lifting, whereas calling cohere for each pair is
:math:`O(N^2)`. However, because of the caching, it is also more
memory intensive, making 2 additional complex arrays with
approximately the same number of elements as *X*.
See :file:`test/cohere_pairs_test.py` in the src tree for an
example script that shows that this :func:`cohere_pairs` and
:func:`cohere` give the same results for a given pair.
See Also
--------
:func:`psd`
For information about the methods used to compute :math:`P_{xy}`,
:math:`P_{xx}` and :math:`P_{yy}`.
"""
numRows, numCols = X.shape
# zero pad if X is too short
if numRows < NFFT:
tmp = X
X = np.zeros((NFFT, numCols), X.dtype)
X[:numRows, :] = tmp
del tmp
numRows, numCols = X.shape
# get all the columns of X that we are interested in by checking
# the ij tuples
allColumns = set()
for i, j in ij:
allColumns.add(i)
allColumns.add(j)
Ncols = len(allColumns)
# for real X, ignore the negative frequencies
if np.iscomplexobj(X):
numFreqs = NFFT
else:
numFreqs = NFFT//2+1
# cache the FFT of every windowed, detrended NFFT length segment
# of every channel. If preferSpeedOverMemory, cache the conjugate
# as well
if cbook.iterable(window):
if len(window) != NFFT:
raise ValueError("The length of the window must be equal to NFFT")
windowVals = window
else:
windowVals = window(np.ones(NFFT, X.dtype))
ind = list(xrange(0, numRows-NFFT+1, NFFT-noverlap))
numSlices = len(ind)
FFTSlices = {}
FFTConjSlices = {}
Pxx = {}
slices = range(numSlices)
normVal = np.linalg.norm(windowVals)**2
for iCol in allColumns:
progressCallback(i/Ncols, 'Cacheing FFTs')
Slices = np.zeros((numSlices, numFreqs), dtype=np.complex_)
for iSlice in slices:
thisSlice = X[ind[iSlice]:ind[iSlice]+NFFT, iCol]
thisSlice = windowVals*detrend(thisSlice)
Slices[iSlice, :] = np.fft.fft(thisSlice)[:numFreqs]
FFTSlices[iCol] = Slices
if preferSpeedOverMemory:
FFTConjSlices[iCol] = np.conj(Slices)
Pxx[iCol] = np.divide(np.mean(abs(Slices)**2, axis=0), normVal)
del Slices, ind, windowVals
# compute the coherences and phases for all pairs using the
# cached FFTs
Cxy = {}
Phase = {}
count = 0
N = len(ij)
for i, j in ij:
count += 1
if count % 10 == 0:
progressCallback(count/N, 'Computing coherences')
if preferSpeedOverMemory:
Pxy = FFTSlices[i] * FFTConjSlices[j]
else:
Pxy = FFTSlices[i] * np.conj(FFTSlices[j])
if numSlices > 1:
Pxy = np.mean(Pxy, axis=0)
# Pxy = np.divide(Pxy, normVal)
Pxy /= normVal
# Cxy[(i,j)] = np.divide(np.absolute(Pxy)**2, Pxx[i]*Pxx[j])
Cxy[i, j] = abs(Pxy)**2 / (Pxx[i]*Pxx[j])
Phase[i, j] = np.arctan2(Pxy.imag, Pxy.real)
freqs = Fs/NFFT*np.arange(numFreqs)
if returnPxx:
return Cxy, Phase, freqs, Pxx
else:
return Cxy, Phase, freqs
@cbook.deprecated('2.2', 'scipy.stats.entropy')
def entropy(y, bins):
r"""
Return the entropy of the data in *y* in units of nat.
.. math::
-\sum p_i \ln(p_i)
where :math:`p_i` is the probability of observing *y* in the
:math:`i^{th}` bin of *bins*. *bins* can be a number of bins or a
range of bins; see :func:`numpy.histogram`.
Compare *S* with analytic calculation for a Gaussian::
x = mu + sigma * randn(200000)
Sanalytic = 0.5 * ( 1.0 + log(2*pi*sigma**2.0) )
"""
n, bins = np.histogram(y, bins)
n = n.astype(float)
n = np.take(n, np.nonzero(n)[0]) # get the positive
p = np.divide(n, len(y))
delta = bins[1] - bins[0]
S = -1.0 * np.sum(p * np.log(p)) + np.log(delta)
return S
@cbook.deprecated('2.2', 'scipy.stats.norm.pdf')
def normpdf(x, *args):
"Return the normal pdf evaluated at *x*; args provides *mu*, *sigma*"
mu, sigma = args
return 1./(np.sqrt(2*np.pi)*sigma)*np.exp(-0.5 * (1./sigma*(x - mu))**2)
@cbook.deprecated('2.2')
def find(condition):
"Return the indices where ravel(condition) is true"
res, = np.nonzero(np.ravel(condition))
return res
@cbook.deprecated('2.2')
def longest_contiguous_ones(x):
"""
Return the indices of the longest stretch of contiguous ones in *x*,
assuming *x* is a vector of zeros and ones. If there are two
equally long stretches, pick the first.
"""
x = np.ravel(x)
if len(x) == 0:
return np.array([])
ind = (x == 0).nonzero()[0]
if len(ind) == 0:
return np.arange(len(x))
if len(ind) == len(x):
return np.array([])
y = np.zeros((len(x)+2,), x.dtype)
y[1:-1] = x
dif = np.diff(y)
up = (dif == 1).nonzero()[0]
dn = (dif == -1).nonzero()[0]
i = (dn-up == max(dn - up)).nonzero()[0][0]
ind = np.arange(up[i], dn[i])
return ind
@cbook.deprecated('2.2')
def longest_ones(x):
'''alias for longest_contiguous_ones'''
return longest_contiguous_ones(x)
@cbook.deprecated('2.2')
class PCA(object):
def __init__(self, a, standardize=True):
"""
compute the SVD of a and store data for PCA. Use project to
project the data onto a reduced set of dimensions
Parameters
----------
a : np.ndarray
A numobservations x numdims array
standardize : bool
True if input data are to be standardized. If False, only centering
will be carried out.
Attributes
----------
a
A centered unit sigma version of input ``a``.
numrows, numcols
The dimensions of ``a``.
mu
A numdims array of means of ``a``. This is the vector that points
to the origin of PCA space.
sigma
A numdims array of standard deviation of ``a``.
fracs
The proportion of variance of each of the principal components.
s
The actual eigenvalues of the decomposition.
Wt
The weight vector for projecting a numdims point or array into
PCA space.
Y
A projected into PCA space.
Notes
-----
The factor loadings are in the ``Wt`` factor, i.e., the factor loadings
for the first principal component are given by ``Wt[0]``. This row is
also the first eigenvector.
"""
n, m = a.shape
if n < m:
raise RuntimeError('we assume data in a is organized with '
'numrows>numcols')
self.numrows, self.numcols = n, m
self.mu = a.mean(axis=0)
self.sigma = a.std(axis=0)
self.standardize = standardize
a = self.center(a)
self.a = a
U, s, Vh = np.linalg.svd(a, full_matrices=False)
# Note: .H indicates the conjugate transposed / Hermitian.
# The SVD is commonly written as a = U s V.H.
# If U is a unitary matrix, it means that it satisfies U.H = inv(U).
# The rows of Vh are the eigenvectors of a.H a.
# The columns of U are the eigenvectors of a a.H.
# For row i in Vh and column i in U, the corresponding eigenvalue is
# s[i]**2.
self.Wt = Vh
# save the transposed coordinates
Y = np.dot(Vh, a.T).T
self.Y = Y
# save the eigenvalues
self.s = s**2
# and now the contribution of the individual components
vars = self.s / len(s)
self.fracs = vars/vars.sum()
def project(self, x, minfrac=0.):
'''
project x onto the principle axes, dropping any axes where fraction
of variance<minfrac
'''
x = np.asarray(x)
if x.shape[-1] != self.numcols:
raise ValueError('Expected an array with dims[-1]==%d' %
self.numcols)
Y = np.dot(self.Wt, self.center(x).T).T
mask = self.fracs >= minfrac
if x.ndim == 2:
Yreduced = Y[:, mask]
else:
Yreduced = Y[mask]
return Yreduced
def center(self, x):
'''
center and optionally standardize the data using the mean and sigma
from training set a
'''
if self.standardize:
return (x - self.mu)/self.sigma
else:
return (x - self.mu)
@staticmethod
def _get_colinear():
c0 = np.array([
0.19294738, 0.6202667, 0.45962655, 0.07608613, 0.135818,
0.83580842, 0.07218851, 0.48318321, 0.84472463, 0.18348462,
0.81585306, 0.96923926, 0.12835919, 0.35075355, 0.15807861,
0.837437, 0.10824303, 0.1723387, 0.43926494, 0.83705486])
c1 = np.array([
-1.17705601, -0.513883, -0.26614584, 0.88067144, 1.00474954,
-1.1616545, 0.0266109, 0.38227157, 1.80489433, 0.21472396,
-1.41920399, -2.08158544, -0.10559009, 1.68999268, 0.34847107,
-0.4685737, 1.23980423, -0.14638744, -0.35907697, 0.22442616])
c2 = c0 + 2*c1
c3 = -3*c0 + 4*c1
a = np.array([c3, c0, c1, c2]).T
return a
@cbook.deprecated('2.2', 'numpy.percentile')
def prctile(x, p=(0.0, 25.0, 50.0, 75.0, 100.0)):
"""
Return the percentiles of *x*. *p* can either be a sequence of
percentile values or a scalar. If *p* is a sequence, the ith
element of the return sequence is the *p*(i)-th percentile of *x*.
If *p* is a scalar, the largest value of *x* less than or equal to
the *p* percentage point in the sequence is returned.
"""
# This implementation derived from scipy.stats.scoreatpercentile
def _interpolate(a, b, fraction):
"""Returns the point at the given fraction between a and b, where
'fraction' must be between 0 and 1.
"""
return a + (b - a) * fraction
per = np.array(p)
values = np.sort(x, axis=None)
idxs = per / 100 * (values.shape[0] - 1)
ai = idxs.astype(int)
bi = ai + 1
frac = idxs % 1
# handle cases where attempting to interpolate past last index
cond = bi >= len(values)
if per.ndim:
ai[cond] -= 1
bi[cond] -= 1
frac[cond] += 1
else:
if cond:
ai -= 1
bi -= 1
frac += 1
return _interpolate(values[ai], values[bi], frac)
@cbook.deprecated('2.2')
def prctile_rank(x, p):
"""
Return the rank for each element in *x*, return the rank
0..len(*p*). e.g., if *p* = (25, 50, 75), the return value will be a
len(*x*) array with values in [0,1,2,3] where 0 indicates the
value is less than the 25th percentile, 1 indicates the value is
>= the 25th and < 50th percentile, ... and 3 indicates the value
is above the 75th percentile cutoff.
*p* is either an array of percentiles in [0..100] or a scalar which
indicates how many quantiles of data you want ranked.
"""
if not cbook.iterable(p):
p = np.arange(100.0/p, 100.0, 100.0/p)
else:
p = np.asarray(p)
if p.max() <= 1 or p.min() < 0 or p.max() > 100:
raise ValueError('percentiles should be in range 0..100, not 0..1')
ptiles = prctile(x, p)
return np.searchsorted(ptiles, x)
@cbook.deprecated('2.2')
def center_matrix(M, dim=0):
"""
Return the matrix *M* with each row having zero mean and unit std.
If *dim* = 1 operate on columns instead of rows. (*dim* is
opposite to the numpy axis kwarg.)
"""
M = np.asarray(M, float)
if dim:
M = (M - M.mean(axis=0)) / M.std(axis=0)
else:
M = (M - M.mean(axis=1)[:, np.newaxis])
M = M / M.std(axis=1)[:, np.newaxis]
return M
@cbook.deprecated('2.2', 'scipy.integrate.ode')
def rk4(derivs, y0, t):
"""
Integrate 1D or ND system of ODEs using 4-th order Runge-Kutta.
This is a toy implementation which may be useful if you find
yourself stranded on a system w/o scipy. Otherwise use
:func:`scipy.integrate`.
Parameters
----------
y0
initial state vector
t
sample times
derivs
returns the derivative of the system and has the
signature ``dy = derivs(yi, ti)``
Examples
--------
A 2D system::
def derivs6(x,t):
d1 = x[0] + 2*x[1]
d2 = -3*x[0] + 4*x[1]
return (d1, d2)
dt = 0.0005
t = arange(0.0, 2.0, dt)
y0 = (1,2)
yout = rk4(derivs6, y0, t)
A 1D system::
alpha = 2
def derivs(x,t):
return -alpha*x + exp(-t)
y0 = 1
yout = rk4(derivs, y0, t)
If you have access to scipy, you should probably be using the
scipy.integrate tools rather than this function.
"""
try:
Ny = len(y0)
except TypeError:
yout = np.zeros((len(t),), float)
else:
yout = np.zeros((len(t), Ny), float)
yout[0] = y0
i = 0
for i in np.arange(len(t)-1):
thist = t[i]
dt = t[i+1] - thist
dt2 = dt/2.0
y0 = yout[i]
k1 = np.asarray(derivs(y0, thist))
k2 = np.asarray(derivs(y0 + dt2*k1, thist+dt2))
k3 = np.asarray(derivs(y0 + dt2*k2, thist+dt2))
k4 = np.asarray(derivs(y0 + dt*k3, thist+dt))
yout[i+1] = y0 + dt/6.0*(k1 + 2*k2 + 2*k3 + k4)
return yout
@cbook.deprecated('2.2')
def bivariate_normal(X, Y, sigmax=1.0, sigmay=1.0,
mux=0.0, muy=0.0, sigmaxy=0.0):
"""
Bivariate Gaussian distribution for equal shape *X*, *Y*.
See `bivariate normal
<http://mathworld.wolfram.com/BivariateNormalDistribution.html>`_
at mathworld.
"""
Xmu = X-mux
Ymu = Y-muy
rho = sigmaxy/(sigmax*sigmay)
z = Xmu**2/sigmax**2 + Ymu**2/sigmay**2 - 2*rho*Xmu*Ymu/(sigmax*sigmay)
denom = 2*np.pi*sigmax*sigmay*np.sqrt(1-rho**2)
return np.exp(-z/(2*(1-rho**2))) / denom
@cbook.deprecated('2.2')
def get_xyz_where(Z, Cond):
"""
*Z* and *Cond* are *M* x *N* matrices. *Z* are data and *Cond* is
a boolean matrix where some condition is satisfied. Return value
is (*x*, *y*, *z*) where *x* and *y* are the indices into *Z* and
*z* are the values of *Z* at those indices. *x*, *y*, and *z* are
1D arrays.
"""
X, Y = np.indices(Z.shape)
return X[Cond], Y[Cond], Z[Cond]
@cbook.deprecated('2.2')
def get_sparse_matrix(M, N, frac=0.1):
"""
Return a *M* x *N* sparse matrix with *frac* elements randomly
filled.
"""
data = np.zeros((M, N))*0.
for i in range(int(M*N*frac)):
x = np.random.randint(0, M-1)
y = np.random.randint(0, N-1)
data[x, y] = np.random.rand()
return data
@cbook.deprecated('2.2', 'numpy.hypot')
def dist(x, y):
"""
Return the distance between two points.
"""
d = x-y
return np.sqrt(np.dot(d, d))
@cbook.deprecated('2.2')
def dist_point_to_segment(p, s0, s1):
"""
Get the distance of a point to a segment.
*p*, *s0*, *s1* are *xy* sequences
This algorithm from
http://geomalgorithms.com/a02-_lines.html
"""
p = np.asarray(p, float)
s0 = np.asarray(s0, float)
s1 = np.asarray(s1, float)
v = s1 - s0
w = p - s0
c1 = np.dot(w, v)
if c1 <= 0:
return dist(p, s0)
c2 = np.dot(v, v)
if c2 <= c1:
return dist(p, s1)
b = c1 / c2
pb = s0 + b * v
return dist(p, pb)
@cbook.deprecated('2.2')
def segments_intersect(s1, s2):
"""
Return *True* if *s1* and *s2* intersect.
*s1* and *s2* are defined as::
s1: (x1, y1), (x2, y2)
s2: (x3, y3), (x4, y4)
"""
(x1, y1), (x2, y2) = s1
(x3, y3), (x4, y4) = s2
den = ((y4-y3) * (x2-x1)) - ((x4-x3)*(y2-y1))
n1 = ((x4-x3) * (y1-y3)) - ((y4-y3)*(x1-x3))
n2 = ((x2-x1) * (y1-y3)) - ((y2-y1)*(x1-x3))
if den == 0:
# lines parallel
return False
u1 = n1/den
u2 = n2/den
return 0.0 <= u1 <= 1.0 and 0.0 <= u2 <= 1.0
@cbook.deprecated('2.2')
def fftsurr(x, detrend=detrend_none, window=window_none):
"""
Compute an FFT phase randomized surrogate of *x*.
"""
if cbook.iterable(window):
x = window*detrend(x)
else:
x = window(detrend(x))
z = np.fft.fft(x)
a = 2.*np.pi*1j
phase = a * np.random.rand(len(x))
z = z*np.exp(phase)
return np.fft.ifft(z).real
@cbook.deprecated('2.2')
def movavg(x, n):
"""
Compute the len(*n*) moving average of *x*.
"""
w = np.empty((n,), dtype=float)
w[:] = 1.0/n
return np.convolve(x, w, mode='valid')
# the following code was written and submitted by Fernando Perez
# from the ipython numutils package under a BSD license
# begin fperez functions
"""
A set of convenient utilities for numerical work.
Most of this module requires numpy or is meant to be used with it.
Copyright (c) 2001-2004, Fernando Perez. <Fernando.Perez@colorado.edu>
All rights reserved.
This license was generated from the BSD license template as found in:
http://www.opensource.org/licenses/bsd-license.php
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the IPython project nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# *****************************************************************************
# Globals
# ****************************************************************************
# function definitions
exp_safe_MIN = math.log(2.2250738585072014e-308)
exp_safe_MAX = 1.7976931348623157e+308
@cbook.deprecated("2.2", 'numpy.exp')
def exp_safe(x):
"""
Compute exponentials which safely underflow to zero.
Slow, but convenient to use. Note that numpy provides proper
floating point exception handling with access to the underlying
hardware.
"""
if type(x) is np.ndarray:
return np.exp(np.clip(x, exp_safe_MIN, exp_safe_MAX))
else:
return math.exp(x)
@cbook.deprecated("2.2", alternative='numpy.array(list(map(...)))')
def amap(fn, *args):
"""
amap(function, sequence[, sequence, ...]) -> array.
Works like :func:`map`, but it returns an array. This is just a
convenient shorthand for ``numpy.array(map(...))``.
"""
return np.array(list(map(fn, *args)))
@cbook.deprecated("2.2")
def rms_flat(a):
"""
Return the root mean square of all the elements of *a*, flattened out.
"""
return np.sqrt(np.mean(np.abs(a) ** 2))
@cbook.deprecated("2.2", alternative='numpy.linalg.norm(a, ord=1)')
def l1norm(a):
"""
Return the *l1* norm of *a*, flattened out.
Implemented as a separate function (not a call to :func:`norm` for speed).
"""
return np.sum(np.abs(a))
@cbook.deprecated("2.2", alternative='numpy.linalg.norm(a, ord=2)')
def l2norm(a):
"""
Return the *l2* norm of *a*, flattened out.
Implemented as a separate function (not a call to :func:`norm` for speed).
"""
return np.sqrt(np.sum(np.abs(a) ** 2))
@cbook.deprecated("2.2", alternative='numpy.linalg.norm(a.flat, ord=p)')
def norm_flat(a, p=2):
"""
norm(a,p=2) -> l-p norm of a.flat
Return the l-p norm of *a*, considered as a flat array. This is NOT a true
matrix norm, since arrays of arbitrary rank are always flattened.
*p* can be a number or the string 'Infinity' to get the L-infinity norm.
"""
# This function was being masked by a more general norm later in
# the file. We may want to simply delete it.
if p == 'Infinity':
return np.max(np.abs(a))
else:
return np.sum(np.abs(a) ** p) ** (1 / p)
@cbook.deprecated("2.2", 'numpy.arange')
def frange(xini, xfin=None, delta=None, **kw):
"""
frange([start,] stop[, step, keywords]) -> array of floats
Return a numpy ndarray containing a progression of floats. Similar to
:func:`numpy.arange`, but defaults to a closed interval.
``frange(x0, x1)`` returns ``[x0, x0+1, x0+2, ..., x1]``; *start*
defaults to 0, and the endpoint *is included*. This behavior is
different from that of :func:`range` and
:func:`numpy.arange`. This is deliberate, since :func:`frange`
will probably be more useful for generating lists of points for
function evaluation, and endpoints are often desired in this
use. The usual behavior of :func:`range` can be obtained by
setting the keyword *closed* = 0, in this case, :func:`frange`
basically becomes :func:numpy.arange`.
When *step* is given, it specifies the increment (or
decrement). All arguments can be floating point numbers.
``frange(x0,x1,d)`` returns ``[x0,x0+d,x0+2d,...,xfin]`` where
*xfin* <= *x1*.
:func:`frange` can also be called with the keyword *npts*. This
sets the number of points the list should contain (and overrides
the value *step* might have been given). :func:`numpy.arange`
doesn't offer this option.
Examples::
>>> frange(3)
array([ 0., 1., 2., 3.])
>>> frange(3,closed=0)
array([ 0., 1., 2.])
>>> frange(1,6,2)
array([1, 3, 5]) or 1,3,5,7, depending on floating point vagueries
>>> frange(1,6.5,npts=5)
array([ 1. , 2.375, 3.75 , 5.125, 6.5 ])
"""
# defaults
kw.setdefault('closed', 1)
endpoint = kw['closed'] != 0
# funny logic to allow the *first* argument to be optional (like range())
# This was modified with a simpler version from a similar frange() found
# at http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66472
if xfin is None:
xfin = xini + 0.0
xini = 0.0
if delta is None:
delta = 1.0
# compute # of points, spacing and return final list
try:
npts = kw['npts']
delta = (xfin-xini) / (npts-endpoint)
except KeyError:
npts = int(np.round((xfin-xini)/delta)) + endpoint
# round finds the nearest, so the endpoint can be up to
# delta/2 larger than xfin.
return np.arange(npts)*delta+xini
# end frange()
@cbook.deprecated("2.2", 'numpy.identity')
def identity(n, rank=2, dtype='l', typecode=None):
"""
Returns the identity matrix of shape (*n*, *n*, ..., *n*) (rank *r*).
For ranks higher than 2, this object is simply a multi-index Kronecker
delta::
/ 1 if i0=i1=...=iR,
id[i0,i1,...,iR] = -|
\\ 0 otherwise.
Optionally a *dtype* (or typecode) may be given (it defaults to 'l').
Since rank defaults to 2, this function behaves in the default case (when
only *n* is given) like ``numpy.identity(n)`` -- but surprisingly, it is
much faster.
"""
if typecode is not None:
dtype = typecode
iden = np.zeros((n,)*rank, dtype)
for i in range(n):
idx = (i,)*rank
iden[idx] = 1
return iden
@cbook.deprecated("2.2")
def base_repr(number, base=2, padding=0):
"""
Return the representation of a *number* in any given *base*.
"""
chars = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
if number < base:
return (padding - 1) * chars[0] + chars[int(number)]
max_exponent = int(math.log(number)/math.log(base))
max_power = long(base) ** max_exponent
lead_digit = int(number/max_power)
return (chars[lead_digit] +
base_repr(number - max_power * lead_digit, base,
max(padding - 1, max_exponent)))
@cbook.deprecated("2.2")
def binary_repr(number, max_length=1025):
"""
Return the binary representation of the input *number* as a
string.
This is more efficient than using :func:`base_repr` with base 2.
Increase the value of max_length for very large numbers. Note that
on 32-bit machines, 2**1023 is the largest integer power of 2
which can be converted to a Python float.
"""
# assert number < 2L << max_length
shifts = map(operator.rshift, max_length * [number],
range(max_length - 1, -1, -1))
digits = list(map(operator.mod, shifts, max_length * [2]))
if not digits.count(1):
return 0
digits = digits[digits.index(1):]
return ''.join(map(repr, digits)).replace('L', '')
@cbook.deprecated("2.2", 'numpy.log2')
def log2(x, ln2=math.log(2.0)):
"""
Return the log(*x*) in base 2.
This is a _slow_ function but which is guaranteed to return the correct
integer value if the input is an integer exact power of 2.
"""
try:
bin_n = binary_repr(x)[1:]
except (AssertionError, TypeError):
return math.log(x)/ln2
else:
if '1' in bin_n:
return math.log(x)/ln2
else:
return len(bin_n)
@cbook.deprecated("2.2")
def ispower2(n):
"""
Returns the log base 2 of *n* if *n* is a power of 2, zero otherwise.
Note the potential ambiguity if *n* == 1: 2**0 == 1, interpret accordingly.
"""
bin_n = binary_repr(n)[1:]
if '1' in bin_n:
return 0
else:
return len(bin_n)
@cbook.deprecated("2.2")
def isvector(X):
"""
Like the MATLAB function with the same name, returns *True*
if the supplied numpy array or matrix *X* looks like a vector,
meaning it has a one non-singleton axis (i.e., it can have
multiple axes, but all must have length 1, except for one of
them).
If you just want to see if the array has 1 axis, use X.ndim == 1.
"""
return np.prod(X.shape) == np.max(X.shape)
# end fperez numutils code
# helpers for loading, saving, manipulating and viewing numpy record arrays
@cbook.deprecated("2.2", 'numpy.isnan')
def safe_isnan(x):
':func:`numpy.isnan` for arbitrary types'
if isinstance(x, six.string_types):
return False
try:
b = np.isnan(x)
except NotImplementedError:
return False
except TypeError:
return False
else:
return b
@cbook.deprecated("2.2", 'numpy.isinf')
def safe_isinf(x):
':func:`numpy.isinf` for arbitrary types'
if isinstance(x, six.string_types):
return False
try:
b = np.isinf(x)
except NotImplementedError:
return False
except TypeError:
return False
else:
return b
@cbook.deprecated("2.2")
def rec_append_fields(rec, names, arrs, dtypes=None):
"""
Return a new record array with field names populated with data
from arrays in *arrs*. If appending a single field, then *names*,
*arrs* and *dtypes* do not have to be lists. They can just be the
values themselves.
"""
if (not isinstance(names, six.string_types) and cbook.iterable(names)
and len(names) and isinstance(names[0], six.string_types)):
if len(names) != len(arrs):
raise ValueError("number of arrays do not match number of names")
else: # we have only 1 name and 1 array
names = [names]
arrs = [arrs]
arrs = list(map(np.asarray, arrs))
if dtypes is None:
dtypes = [a.dtype for a in arrs]
elif not cbook.iterable(dtypes):
dtypes = [dtypes]
if len(arrs) != len(dtypes):
if len(dtypes) == 1:
dtypes = dtypes * len(arrs)
else:
raise ValueError("dtypes must be None, a single dtype or a list")
old_dtypes = rec.dtype.descr
if six.PY2:
old_dtypes = [(name.encode('utf-8'), dt) for name, dt in old_dtypes]
newdtype = np.dtype(old_dtypes + list(zip(names, dtypes)))
newrec = np.recarray(rec.shape, dtype=newdtype)
for field in rec.dtype.fields:
newrec[field] = rec[field]
for name, arr in zip(names, arrs):
newrec[name] = arr
return newrec
@cbook.deprecated("2.2")
def rec_drop_fields(rec, names):
"""
Return a new numpy record array with fields in *names* dropped.
"""
names = set(names)
newdtype = np.dtype([(name, rec.dtype[name]) for name in rec.dtype.names
if name not in names])
newrec = np.recarray(rec.shape, dtype=newdtype)
for field in newdtype.names:
newrec[field] = rec[field]
return newrec
@cbook.deprecated("2.2")
def rec_keep_fields(rec, names):
"""
Return a new numpy record array with only fields listed in names
"""
if isinstance(names, six.string_types):
names = names.split(',')
arrays = []
for name in names:
arrays.append(rec[name])
return np.rec.fromarrays(arrays, names=names)
@cbook.deprecated("2.2")
def rec_groupby(r, groupby, stats):
"""
*r* is a numpy record array
*groupby* is a sequence of record array attribute names that
together form the grouping key. e.g., ('date', 'productcode')
*stats* is a sequence of (*attr*, *func*, *outname*) tuples which
will call ``x = func(attr)`` and assign *x* to the record array
output with attribute *outname*. For example::
stats = ( ('sales', len, 'numsales'), ('sales', np.mean, 'avgsale') )
Return record array has *dtype* names for each attribute name in
the *groupby* argument, with the associated group values, and
for each outname name in the *stats* argument, with the associated
stat summary output.
"""
# build a dictionary from groupby keys-> list of indices into r with
# those keys
rowd = {}
for i, row in enumerate(r):
key = tuple([row[attr] for attr in groupby])
rowd.setdefault(key, []).append(i)
rows = []
# sort the output by groupby keys
for key in sorted(rowd):
row = list(key)
# get the indices for this groupby key
ind = rowd[key]
thisr = r[ind]
# call each stat function for this groupby slice
row.extend([func(thisr[attr]) for attr, func, outname in stats])
rows.append(row)
# build the output record array with groupby and outname attributes
attrs, funcs, outnames = list(zip(*stats))
names = list(groupby)
names.extend(outnames)
return np.rec.fromrecords(rows, names=names)
@cbook.deprecated("2.2")
def rec_summarize(r, summaryfuncs):
"""
*r* is a numpy record array
*summaryfuncs* is a list of (*attr*, *func*, *outname*) tuples
which will apply *func* to the array *r*[attr] and assign the
output to a new attribute name *outname*. The returned record
array is identical to *r*, with extra arrays for each element in
*summaryfuncs*.
"""
names = list(r.dtype.names)
arrays = [r[name] for name in names]
for attr, func, outname in summaryfuncs:
names.append(outname)
arrays.append(np.asarray(func(r[attr])))
return np.rec.fromarrays(arrays, names=names)
@cbook.deprecated("2.2")
def rec_join(key, r1, r2, jointype='inner', defaults=None, r1postfix='1',
r2postfix='2'):
"""
Join record arrays *r1* and *r2* on *key*; *key* is a tuple of
field names -- if *key* is a string it is assumed to be a single
attribute name. If *r1* and *r2* have equal values on all the keys
in the *key* tuple, then their fields will be merged into a new
record array containing the intersection of the fields of *r1* and
*r2*.
*r1* (also *r2*) must not have any duplicate keys.
The *jointype* keyword can be 'inner', 'outer', 'leftouter'. To
do a rightouter join just reverse *r1* and *r2*.
The *defaults* keyword is a dictionary filled with
``{column_name:default_value}`` pairs.
The keywords *r1postfix* and *r2postfix* are postfixed to column names
(other than keys) that are both in *r1* and *r2*.
"""
if isinstance(key, six.string_types):
key = (key, )
for name in key:
if name not in r1.dtype.names:
raise ValueError('r1 does not have key field %s' % name)
if name not in r2.dtype.names:
raise ValueError('r2 does not have key field %s' % name)
def makekey(row):
return tuple([row[name] for name in key])
r1d = {makekey(row): i for i, row in enumerate(r1)}
r2d = {makekey(row): i for i, row in enumerate(r2)}
r1keys = set(r1d)
r2keys = set(r2d)
common_keys = r1keys & r2keys
r1ind = np.array([r1d[k] for k in common_keys])
r2ind = np.array([r2d[k] for k in common_keys])
common_len = len(common_keys)
left_len = right_len = 0
if jointype == "outer" or jointype == "leftouter":
left_keys = r1keys.difference(r2keys)
left_ind = np.array([r1d[k] for k in left_keys])
left_len = len(left_ind)
if jointype == "outer":
right_keys = r2keys.difference(r1keys)
right_ind = np.array([r2d[k] for k in right_keys])
right_len = len(right_ind)
def key_desc(name):
'''
if name is a string key, use the larger size of r1 or r2 before
merging
'''
dt1 = r1.dtype[name]
if dt1.type != np.string_:
return (name, dt1.descr[0][1])
dt2 = r2.dtype[name]
if dt1 != dt2:
raise ValueError("The '{}' fields in arrays 'r1' and 'r2' must "
"have the same dtype".format(name))
if dt1.num > dt2.num:
return (name, dt1.descr[0][1])
else:
return (name, dt2.descr[0][1])
keydesc = [key_desc(name) for name in key]
def mapped_r1field(name):
"""
The column name in *newrec* that corresponds to the column in *r1*.
"""
if name in key or name not in r2.dtype.names:
return name
else:
return name + r1postfix
def mapped_r2field(name):
"""
The column name in *newrec* that corresponds to the column in *r2*.
"""
if name in key or name not in r1.dtype.names:
return name
else:
return name + r2postfix
r1desc = [(mapped_r1field(desc[0]), desc[1]) for desc in r1.dtype.descr
if desc[0] not in key]
r2desc = [(mapped_r2field(desc[0]), desc[1]) for desc in r2.dtype.descr
if desc[0] not in key]
all_dtypes = keydesc + r1desc + r2desc
if six.PY2:
all_dtypes = [(name.encode('utf-8'), dt) for name, dt in all_dtypes]
newdtype = np.dtype(all_dtypes)
newrec = np.recarray((common_len + left_len + right_len,), dtype=newdtype)
if defaults is not None:
for thiskey in defaults:
if thiskey not in newdtype.names:
warnings.warn('rec_join defaults key="%s" not in new dtype '
'names "%s"' % (thiskey, newdtype.names))
for name in newdtype.names:
dt = newdtype[name]
if dt.kind in ('f', 'i'):
newrec[name] = 0
if jointype != 'inner' and defaults is not None:
# fill in the defaults enmasse
newrec_fields = list(newrec.dtype.fields)
for k, v in six.iteritems(defaults):
if k in newrec_fields:
newrec[k] = v
for field in r1.dtype.names:
newfield = mapped_r1field(field)
if common_len:
newrec[newfield][:common_len] = r1[field][r1ind]
if (jointype == "outer" or jointype == "leftouter") and left_len:
newrec[newfield][common_len:(common_len+left_len)] = (
r1[field][left_ind]
)
for field in r2.dtype.names:
newfield = mapped_r2field(field)
if field not in key and common_len:
newrec[newfield][:common_len] = r2[field][r2ind]
if jointype == "outer" and right_len:
newrec[newfield][-right_len:] = r2[field][right_ind]
newrec.sort(order=key)
return newrec
@cbook.deprecated("2.2")
def recs_join(key, name, recs, jointype='outer', missing=0., postfixes=None):
"""
Join a sequence of record arrays on single column key.
This function only joins a single column of the multiple record arrays
*key*
is the column name that acts as a key
*name*
is the name of the column that we want to join
*recs*
is a list of record arrays to join
*jointype*
is a string 'inner' or 'outer'
*missing*
is what any missing field is replaced by
*postfixes*
if not None, a len recs sequence of postfixes
returns a record array with columns [rowkey, name0, name1, ... namen-1].
or if postfixes [PF0, PF1, ..., PFN-1] are supplied,
[rowkey, namePF0, namePF1, ... namePFN-1].
Example::
r = recs_join("date", "close", recs=[r0, r1], missing=0.)
"""
results = []
aligned_iters = cbook.align_iterators(operator.attrgetter(key),
*[iter(r) for r in recs])
def extract(r):
if r is None:
return missing
else:
return r[name]
if jointype == "outer":
for rowkey, row in aligned_iters:
results.append([rowkey] + list(map(extract, row)))
elif jointype == "inner":
for rowkey, row in aligned_iters:
if None not in row: # throw out any Nones
results.append([rowkey] + list(map(extract, row)))
if postfixes is None:
postfixes = ['%d' % i for i in range(len(recs))]
names = ",".join([key] + ["%s%s" % (name, postfix)
for postfix in postfixes])
return np.rec.fromrecords(results, names=names)
@cbook.deprecated("2.2")
def csv2rec(fname, comments='#', skiprows=0, checkrows=0, delimiter=',',
converterd=None, names=None, missing='', missingd=None,
use_mrecords=False, dayfirst=False, yearfirst=False):
"""
Load data from comma/space/tab delimited file in *fname* into a
numpy record array and return the record array.
If *names* is *None*, a header row is required to automatically
assign the recarray names. The headers will be lower cased,
spaces will be converted to underscores, and illegal attribute
name characters removed. If *names* is not *None*, it is a
sequence of names to use for the column names. In this case, it
is assumed there is no header row.
- *fname*: can be a filename or a file handle. Support for gzipped
files is automatic, if the filename ends in '.gz'
- *comments*: the character used to indicate the start of a comment
in the file, or *None* to switch off the removal of comments
- *skiprows*: is the number of rows from the top to skip
- *checkrows*: is the number of rows to check to validate the column
data type. When set to zero all rows are validated.
- *converterd*: if not *None*, is a dictionary mapping column number or
munged column name to a converter function.
- *names*: if not None, is a list of header names. In this case, no
header will be read from the file
- *missingd* is a dictionary mapping munged column names to field values
which signify that the field does not contain actual data and should
be masked, e.g., '0000-00-00' or 'unused'
- *missing*: a string whose value signals a missing field regardless of
the column it appears in
- *use_mrecords*: if True, return an mrecords.fromrecords record array if
any of the data are missing
- *dayfirst*: default is False so that MM-DD-YY has precedence over
DD-MM-YY. See
http://labix.org/python-dateutil#head-b95ce2094d189a89f80f5ae52a05b4ab7b41af47
for further information.
- *yearfirst*: default is False so that MM-DD-YY has precedence over
YY-MM-DD. See
http://labix.org/python-dateutil#head-b95ce2094d189a89f80f5ae52a05b4ab7b41af47
for further information.
If no rows are found, *None* is returned
"""
if converterd is None:
converterd = dict()
if missingd is None:
missingd = {}
import dateutil.parser
import datetime
fh = cbook.to_filehandle(fname)
delimiter = str(delimiter)
class FH:
"""
For space-delimited files, we want different behavior than
comma or tab. Generally, we want multiple spaces to be
treated as a single separator, whereas with comma and tab we
want multiple commas to return multiple (empty) fields. The
join/strip trick below effects this.
"""
def __init__(self, fh):
self.fh = fh
def close(self):
self.fh.close()
def seek(self, arg):
self.fh.seek(arg)
def fix(self, s):
return ' '.join(s.split())
def __next__(self):
return self.fix(next(self.fh))
def __iter__(self):
for line in self.fh:
yield self.fix(line)
if delimiter == ' ':
fh = FH(fh)
reader = csv.reader(fh, delimiter=delimiter)
def process_skiprows(reader):
if skiprows:
for i, row in enumerate(reader):
if i >= (skiprows-1):
break
return fh, reader
process_skiprows(reader)
def ismissing(name, val):
"Should the value val in column name be masked?"
return val == missing or val == missingd.get(name) or val == ''
def with_default_value(func, default):
def newfunc(name, val):
if ismissing(name, val):
return default
else:
return func(val)
return newfunc
def mybool(x):
if x == 'True':
return True
elif x == 'False':
return False
else:
raise ValueError('invalid bool')
dateparser = dateutil.parser.parse
def mydateparser(x):
# try and return a datetime object
d = dateparser(x, dayfirst=dayfirst, yearfirst=yearfirst)
return d
mydateparser = with_default_value(mydateparser, datetime.datetime(1, 1, 1))
myfloat = with_default_value(float, np.nan)
myint = with_default_value(int, -1)
mystr = with_default_value(str, '')
mybool = with_default_value(mybool, None)
def mydate(x):
# try and return a date object
d = dateparser(x, dayfirst=dayfirst, yearfirst=yearfirst)
if d.hour > 0 or d.minute > 0 or d.second > 0:
raise ValueError('not a date')
return d.date()
mydate = with_default_value(mydate, datetime.date(1, 1, 1))
def get_func(name, item, func):
# promote functions in this order
funcs = [mybool, myint, myfloat, mydate, mydateparser, mystr]
for func in funcs[funcs.index(func):]:
try:
func(name, item)
except Exception:
continue
return func
raise ValueError('Could not find a working conversion function')
# map column names that clash with builtins -- TODO - extend this list
itemd = {
'return': 'return_',
'file': 'file_',
'print': 'print_',
}
def get_converters(reader, comments):
converters = None
i = 0
for row in reader:
if (len(row) and comments is not None and
row[0].startswith(comments)):
continue
if i == 0:
converters = [mybool]*len(row)
if checkrows and i > checkrows:
break
i += 1
for j, (name, item) in enumerate(zip(names, row)):
func = converterd.get(j)
if func is None:
func = converterd.get(name)
if func is None:
func = converters[j]
if len(item.strip()):
func = get_func(name, item, func)
else:
# how should we handle custom converters and defaults?
func = with_default_value(func, None)
converters[j] = func
return converters
# Get header and remove invalid characters
needheader = names is None
if needheader:
for row in reader:
if (len(row) and comments is not None and
row[0].startswith(comments)):
continue
headers = row
break
# remove these chars
delete = set(r"""~!@#$%^&*()-=+~\|}[]{';: /?.>,<""")
delete.add('"')
names = []
seen = dict()
for i, item in enumerate(headers):
item = item.strip().lower().replace(' ', '_')
item = ''.join([c for c in item if c not in delete])
if not len(item):
item = 'column%d' % i
item = itemd.get(item, item)
cnt = seen.get(item, 0)
if cnt > 0:
names.append(item + '_%d' % cnt)
else:
names.append(item)
seen[item] = cnt+1
else:
if isinstance(names, six.string_types):
names = [n.strip() for n in names.split(',')]
# get the converter functions by inspecting checkrows
converters = get_converters(reader, comments)
if converters is None:
raise ValueError('Could not find any valid data in CSV file')
# reset the reader and start over
fh.seek(0)
reader = csv.reader(fh, delimiter=delimiter)
process_skiprows(reader)
if needheader:
while True:
# skip past any comments and consume one line of column header
row = next(reader)
if (len(row) and comments is not None and
row[0].startswith(comments)):
continue
break
# iterate over the remaining rows and convert the data to date
# objects, ints, or floats as appropriate
rows = []
rowmasks = []
for i, row in enumerate(reader):
if not len(row):
continue
if comments is not None and row[0].startswith(comments):
continue
# Ensure that the row returned always has the same nr of elements
row.extend([''] * (len(converters) - len(row)))
rows.append([func(name, val)
for func, name, val in zip(converters, names, row)])
rowmasks.append([ismissing(name, val)
for name, val in zip(names, row)])
fh.close()
if not len(rows):
return None
if use_mrecords and np.any(rowmasks):
r = np.ma.mrecords.fromrecords(rows, names=names, mask=rowmasks)
else:
r = np.rec.fromrecords(rows, names=names)
return r
# a series of classes for describing the format intentions of various rec views
@cbook.deprecated("2.2")
class FormatObj(object):
def tostr(self, x):
return self.toval(x)
def toval(self, x):
return str(x)
def fromstr(self, s):
return s
def __hash__(self):
"""
override the hash function of any of the formatters, so that we don't
create duplicate excel format styles
"""
return hash(self.__class__)
@cbook.deprecated("2.2")
class FormatString(FormatObj):
def tostr(self, x):
val = repr(x)
return val[1:-1]
@cbook.deprecated("2.2")
class FormatFormatStr(FormatObj):
def __init__(self, fmt):
self.fmt = fmt
def tostr(self, x):
if x is None:
return 'None'
return self.fmt % self.toval(x)
@cbook.deprecated("2.2")
class FormatFloat(FormatFormatStr):
def __init__(self, precision=4, scale=1.):
FormatFormatStr.__init__(self, '%%1.%df' % precision)
self.precision = precision
self.scale = scale
def __hash__(self):
return hash((self.__class__, self.precision, self.scale))
def toval(self, x):
if x is not None:
x = x * self.scale
return x
def fromstr(self, s):
return float(s)/self.scale
@cbook.deprecated("2.2")
class FormatInt(FormatObj):
def tostr(self, x):
return '%d' % int(x)
def toval(self, x):
return int(x)
def fromstr(self, s):
return int(s)
@cbook.deprecated("2.2")
class FormatBool(FormatObj):
def toval(self, x):
return str(x)
def fromstr(self, s):
return bool(s)
@cbook.deprecated("2.2")
class FormatPercent(FormatFloat):
def __init__(self, precision=4):
FormatFloat.__init__(self, precision, scale=100.)
@cbook.deprecated("2.2")
class FormatThousands(FormatFloat):
def __init__(self, precision=4):
FormatFloat.__init__(self, precision, scale=1e-3)
@cbook.deprecated("2.2")
class FormatMillions(FormatFloat):
def __init__(self, precision=4):
FormatFloat.__init__(self, precision, scale=1e-6)
@cbook.deprecated("2.2", alternative='date.strftime')
class FormatDate(FormatObj):
def __init__(self, fmt):
self.fmt = fmt
def __hash__(self):
return hash((self.__class__, self.fmt))
def toval(self, x):
if x is None:
return 'None'
return x.strftime(self.fmt)
def fromstr(self, x):
import dateutil.parser
return dateutil.parser.parse(x).date()
@cbook.deprecated("2.2", alternative='datetime.strftime')
class FormatDatetime(FormatDate):
def __init__(self, fmt='%Y-%m-%d %H:%M:%S'):
FormatDate.__init__(self, fmt)
def fromstr(self, x):
import dateutil.parser
return dateutil.parser.parse(x)
@cbook.deprecated("2.2")
def get_formatd(r, formatd=None):
'build a formatd guaranteed to have a key for every dtype name'
defaultformatd = {
np.bool_: FormatBool(),
np.int16: FormatInt(),
np.int32: FormatInt(),
np.int64: FormatInt(),
np.float32: FormatFloat(),
np.float64: FormatFloat(),
np.object_: FormatObj(),
np.string_: FormatString()}
if formatd is None:
formatd = dict()
for i, name in enumerate(r.dtype.names):
dt = r.dtype[name]
format = formatd.get(name)
if format is None:
format = defaultformatd.get(dt.type, FormatObj())
formatd[name] = format
return formatd
@cbook.deprecated("2.2")
def csvformat_factory(format):
format = copy.deepcopy(format)
if isinstance(format, FormatFloat):
format.scale = 1. # override scaling for storage
format.fmt = '%r'
return format
@cbook.deprecated("2.2", alternative='numpy.recarray.tofile')
def rec2txt(r, header=None, padding=3, precision=3, fields=None):
"""
Returns a textual representation of a record array.
Parameters
----------
r: numpy recarray
header: list
column headers
padding:
space between each column
precision: number of decimal places to use for floats.
Set to an integer to apply to all floats. Set to a
list of integers to apply precision individually.
Precision for non-floats is simply ignored.
fields : list
If not None, a list of field names to print. fields
can be a list of strings like ['field1', 'field2'] or a single
comma separated string like 'field1,field2'
Examples
--------
For ``precision=[0,2,3]``, the output is ::
ID Price Return
ABC 12.54 0.234
XYZ 6.32 -0.076
"""
if fields is not None:
r = rec_keep_fields(r, fields)
if cbook.is_numlike(precision):
precision = [precision]*len(r.dtype)
def get_type(item, atype=int):
tdict = {None: int, int: float, float: str}
try:
atype(str(item))
except:
return get_type(item, tdict[atype])
return atype
def get_justify(colname, column, precision):
ntype = column.dtype
if np.issubdtype(ntype, np.character):
fixed_width = int(ntype.str[2:])
length = max(len(colname), fixed_width)
return 0, length+padding, "%s" # left justify
if np.issubdtype(ntype, np.integer):
length = max(len(colname),
np.max(list(map(len, list(map(str, column))))))
return 1, length+padding, "%d" # right justify
if np.issubdtype(ntype, np.floating):
fmt = "%." + str(precision) + "f"
length = max(
len(colname),
np.max(list(map(len, list(map(lambda x: fmt % x, column)))))
)
return 1, length+padding, fmt # right justify
return (0,
max(len(colname),
np.max(list(map(len, list(map(str, column))))))+padding,
"%s")
if header is None:
header = r.dtype.names
justify_pad_prec = [get_justify(header[i], r.__getitem__(colname),
precision[i])
for i, colname in enumerate(r.dtype.names)]
justify_pad_prec_spacer = []
for i in range(len(justify_pad_prec)):
just, pad, prec = justify_pad_prec[i]
if i == 0:
justify_pad_prec_spacer.append((just, pad, prec, 0))
else:
pjust, ppad, pprec = justify_pad_prec[i-1]
if pjust == 0 and just == 1:
justify_pad_prec_spacer.append((just, pad-padding, prec, 0))
elif pjust == 1 and just == 0:
justify_pad_prec_spacer.append((just, pad, prec, padding))
else:
justify_pad_prec_spacer.append((just, pad, prec, 0))
def format(item, just_pad_prec_spacer):
just, pad, prec, spacer = just_pad_prec_spacer
if just == 0:
return spacer*' ' + str(item).ljust(pad)
else:
if get_type(item) == float:
item = (prec % float(item))
elif get_type(item) == int:
item = (prec % int(item))
return item.rjust(pad)
textl = []
textl.append(''.join([format(colitem, justify_pad_prec_spacer[j])
for j, colitem in enumerate(header)]))
for i, row in enumerate(r):
textl.append(''.join([format(colitem, justify_pad_prec_spacer[j])
for j, colitem in enumerate(row)]))
if i == 0:
textl[0] = textl[0].rstrip()
text = os.linesep.join(textl)
return text
@cbook.deprecated("2.2", alternative='numpy.recarray.tofile')
def rec2csv(r, fname, delimiter=',', formatd=None, missing='',
missingd=None, withheader=True):
"""
Save the data from numpy recarray *r* into a
comma-/space-/tab-delimited file. The record array dtype names
will be used for column headers.
*fname*: can be a filename or a file handle. Support for gzipped
files is automatic, if the filename ends in '.gz'
*withheader*: if withheader is False, do not write the attribute
names in the first row
for formatd type FormatFloat, we override the precision to store
full precision floats in the CSV file
See Also
--------
:func:`csv2rec`
For information about *missing* and *missingd*, which can be used to
fill in masked values into your CSV file.
"""
delimiter = str(delimiter)
if missingd is None:
missingd = dict()
def with_mask(func):
def newfunc(val, mask, mval):
if mask:
return mval
else:
return func(val)
return newfunc
if r.ndim != 1:
raise ValueError('rec2csv only operates on 1 dimensional recarrays')
formatd = get_formatd(r, formatd)
funcs = []
for i, name in enumerate(r.dtype.names):
funcs.append(with_mask(csvformat_factory(formatd[name]).tostr))
fh, opened = cbook.to_filehandle(fname, 'wb', return_opened=True)
writer = csv.writer(fh, delimiter=delimiter)
header = r.dtype.names
if withheader:
writer.writerow(header)
# Our list of specials for missing values
mvals = []
for name in header:
mvals.append(missingd.get(name, missing))
ismasked = False
if len(r):
row = r[0]
ismasked = hasattr(row, '_fieldmask')
for row in r:
if ismasked:
row, rowmask = row.item(), row._fieldmask.item()
else:
rowmask = [False] * len(row)
writer.writerow([func(val, mask, mval) for func, val, mask, mval
in zip(funcs, row, rowmask, mvals)])
if opened:
fh.close()
@cbook.deprecated('2.2')
def griddata(x, y, z, xi, yi, interp='nn'):
"""
Interpolates from a nonuniformly spaced grid to some other grid.
Fits a surface of the form z = f(`x`, `y`) to the data in the
(usually) nonuniformly spaced vectors (`x`, `y`, `z`), then
interpolates this surface at the points specified by
(`xi`, `yi`) to produce `zi`.
Parameters
----------
x, y, z : 1d array_like
Coordinates of grid points to interpolate from.
xi, yi : 1d or 2d array_like
Coordinates of grid points to interpolate to.
interp : string key from {'nn', 'linear'}
Interpolation algorithm, either 'nn' for natural neighbor, or
'linear' for linear interpolation.
Returns
-------
2d float array
Array of values interpolated at (`xi`, `yi`) points. Array
will be masked is any of (`xi`, `yi`) are outside the convex
hull of (`x`, `y`).
Notes
-----
If `interp` is 'nn' (the default), uses natural neighbor
interpolation based on Delaunay triangulation. This option is
only available if the mpl_toolkits.natgrid module is installed.
This can be downloaded from https://github.com/matplotlib/natgrid.
The (`xi`, `yi`) grid must be regular and monotonically increasing
in this case.
If `interp` is 'linear', linear interpolation is used via
matplotlib.tri.LinearTriInterpolator.
Instead of using `griddata`, more flexible functionality and other
interpolation options are available using a
matplotlib.tri.Triangulation and a matplotlib.tri.TriInterpolator.
"""
# Check input arguments.
x = np.asanyarray(x, dtype=np.float64)
y = np.asanyarray(y, dtype=np.float64)
z = np.asanyarray(z, dtype=np.float64)
if x.shape != y.shape or x.shape != z.shape or x.ndim != 1:
raise ValueError("x, y and z must be equal-length 1-D arrays")
xi = np.asanyarray(xi, dtype=np.float64)
yi = np.asanyarray(yi, dtype=np.float64)
if xi.ndim != yi.ndim:
raise ValueError("xi and yi must be arrays with the same number of "
"dimensions (1 or 2)")
if xi.ndim == 2 and xi.shape != yi.shape:
raise ValueError("if xi and yi are 2D arrays, they must have the same "
"shape")
if xi.ndim == 1:
xi, yi = np.meshgrid(xi, yi)
if interp == 'nn':
use_nn_interpolation = True
elif interp == 'linear':
use_nn_interpolation = False
else:
raise ValueError("interp keyword must be one of 'linear' (for linear "
"interpolation) or 'nn' (for natural neighbor "
"interpolation). Default is 'nn'.")
# Remove masked points.
mask = np.ma.getmask(z)
if mask is not np.ma.nomask:
x = x.compress(~mask)
y = y.compress(~mask)
z = z.compressed()
if use_nn_interpolation:
try:
from mpl_toolkits.natgrid import _natgrid
except ImportError:
raise RuntimeError(
"To use interp='nn' (Natural Neighbor interpolation) in "
"griddata, natgrid must be installed. Either install it "
"from http://github.com/matplotlib/natgrid or use "
"interp='linear' instead.")
if xi.ndim == 2:
# natgrid expects 1D xi and yi arrays.
xi = xi[0, :]
yi = yi[:, 0]
# Override default natgrid internal parameters.
_natgrid.seti(b'ext', 0)
_natgrid.setr(b'nul', np.nan)
if np.min(np.diff(xi)) < 0 or np.min(np.diff(yi)) < 0:
raise ValueError("Output grid defined by xi,yi must be monotone "
"increasing")
# Allocate array for output (buffer will be overwritten by natgridd)
zi = np.empty((yi.shape[0], xi.shape[0]), np.float64)
# Natgrid requires each array to be contiguous rather than e.g. a view
# that is a non-contiguous slice of another array. Use numpy.require
# to deal with this, which will copy if necessary.
x = np.require(x, requirements=['C'])
y = np.require(y, requirements=['C'])
z = np.require(z, requirements=['C'])
xi = np.require(xi, requirements=['C'])
yi = np.require(yi, requirements=['C'])
_natgrid.natgridd(x, y, z, xi, yi, zi)
# Mask points on grid outside convex hull of input data.
if np.any(np.isnan(zi)):
zi = np.ma.masked_where(np.isnan(zi), zi)
return zi
else:
# Linear interpolation performed using a matplotlib.tri.Triangulation
# and a matplotlib.tri.LinearTriInterpolator.
from .tri import Triangulation, LinearTriInterpolator
triang = Triangulation(x, y)
interpolator = LinearTriInterpolator(triang, z)
return interpolator(xi, yi)
##################################################
# Linear interpolation algorithms
##################################################
@cbook.deprecated("2.2", alternative="numpy.interp")
def less_simple_linear_interpolation(x, y, xi, extrap=False):
"""
This function provides simple (but somewhat less so than
:func:`cbook.simple_linear_interpolation`) linear interpolation.
:func:`simple_linear_interpolation` will give a list of point
between a start and an end, while this does true linear
interpolation at an arbitrary set of points.
This is very inefficient linear interpolation meant to be used
only for a small number of points in relatively non-intensive use
cases. For real linear interpolation, use scipy.
"""
x = np.asarray(x)
y = np.asarray(y)
xi = np.atleast_1d(xi)
s = list(y.shape)
s[0] = len(xi)
yi = np.tile(np.nan, s)
for ii, xx in enumerate(xi):
bb = x == xx
if np.any(bb):
jj, = np.nonzero(bb)
yi[ii] = y[jj[0]]
elif xx < x[0]:
if extrap:
yi[ii] = y[0]
elif xx > x[-1]:
if extrap:
yi[ii] = y[-1]
else:
jj, = np.nonzero(x < xx)
jj = max(jj)
yi[ii] = y[jj] + (xx-x[jj])/(x[jj+1]-x[jj]) * (y[jj+1]-y[jj])
return yi
@cbook.deprecated("2.2")
def slopes(x, y):
"""
:func:`slopes` calculates the slope *y*'(*x*)
The slope is estimated using the slope obtained from that of a
parabola through any three consecutive points.
This method should be superior to that described in the appendix
of A CONSISTENTLY WELL BEHAVED METHOD OF INTERPOLATION by Russel
W. Stineman (Creative Computing July 1980) in at least one aspect:
Circles for interpolation demand a known aspect ratio between
*x*- and *y*-values. For many functions, however, the abscissa
are given in different dimensions, so an aspect ratio is
completely arbitrary.
The parabola method gives very similar results to the circle
method for most regular cases but behaves much better in special
cases.
Norbert Nemec, Institute of Theoretical Physics, University or
Regensburg, April 2006 Norbert.Nemec at physik.uni-regensburg.de
(inspired by a original implementation by Halldor Bjornsson,
Icelandic Meteorological Office, March 2006 halldor at vedur.is)
"""
# Cast key variables as float.
x = np.asarray(x, float)
y = np.asarray(y, float)
yp = np.zeros(y.shape, float)
dx = x[1:] - x[:-1]
dy = y[1:] - y[:-1]
dydx = dy/dx
yp[1:-1] = (dydx[:-1] * dx[1:] + dydx[1:] * dx[:-1])/(dx[1:] + dx[:-1])
yp[0] = 2.0 * dy[0]/dx[0] - yp[1]
yp[-1] = 2.0 * dy[-1]/dx[-1] - yp[-2]
return yp
@cbook.deprecated("2.2")
def stineman_interp(xi, x, y, yp=None):
"""
Given data vectors *x* and *y*, the slope vector *yp* and a new
abscissa vector *xi*, the function :func:`stineman_interp` uses
Stineman interpolation to calculate a vector *yi* corresponding to
*xi*.
Here's an example that generates a coarse sine curve, then
interpolates over a finer abscissa::
x = linspace(0,2*pi,20); y = sin(x); yp = cos(x)
xi = linspace(0,2*pi,40);
yi = stineman_interp(xi,x,y,yp);
plot(x,y,'o',xi,yi)
The interpolation method is described in the article A
CONSISTENTLY WELL BEHAVED METHOD OF INTERPOLATION by Russell
W. Stineman. The article appeared in the July 1980 issue of
Creative Computing with a note from the editor stating that while
they were:
not an academic journal but once in a while something serious
and original comes in adding that this was
"apparently a real solution" to a well known problem.
For *yp* = *None*, the routine automatically determines the slopes
using the :func:`slopes` routine.
*x* is assumed to be sorted in increasing order.
For values ``xi[j] < x[0]`` or ``xi[j] > x[-1]``, the routine
tries an extrapolation. The relevance of the data obtained from
this, of course, is questionable...
Original implementation by Halldor Bjornsson, Icelandic
Meteorolocial Office, March 2006 halldor at vedur.is
Completely reworked and optimized for Python by Norbert Nemec,
Institute of Theoretical Physics, University or Regensburg, April
2006 Norbert.Nemec at physik.uni-regensburg.de
"""
# Cast key variables as float.
x = np.asarray(x, float)
y = np.asarray(y, float)
if x.shape != y.shape:
raise ValueError("'x' and 'y' must be of same shape")
if yp is None:
yp = slopes(x, y)
else:
yp = np.asarray(yp, float)
xi = np.asarray(xi, float)
yi = np.zeros(xi.shape, float)
# calculate linear slopes
dx = x[1:] - x[:-1]
dy = y[1:] - y[:-1]
s = dy/dx # note length of s is N-1 so last element is #N-2
# find the segment each xi is in
# this line actually is the key to the efficiency of this implementation
idx = np.searchsorted(x[1:-1], xi)
# now we have generally: x[idx[j]] <= xi[j] <= x[idx[j]+1]
# except at the boundaries, where it may be that xi[j] < x[0] or
# xi[j] > x[-1]
# the y-values that would come out from a linear interpolation:
sidx = s.take(idx)
xidx = x.take(idx)
yidx = y.take(idx)
xidxp1 = x.take(idx+1)
yo = yidx + sidx * (xi - xidx)
# the difference that comes when using the slopes given in yp
# using the yp slope of the left point
dy1 = (yp.take(idx) - sidx) * (xi - xidx)
# using the yp slope of the right point
dy2 = (yp.take(idx+1)-sidx) * (xi - xidxp1)
dy1dy2 = dy1*dy2
# The following is optimized for Python. The solution actually
# does more calculations than necessary but exploiting the power
# of numpy, this is far more efficient than coding a loop by hand
# in Python
yi = yo + dy1dy2 * np.choose(np.array(np.sign(dy1dy2), np.int32)+1,
((2*xi-xidx-xidxp1)/((dy1-dy2)*(xidxp1-xidx)),
0.0,
1/(dy1+dy2),))
return yi
class GaussianKDE(object):
"""
Representation of a kernel-density estimate using Gaussian kernels.
Parameters
----------
dataset : array_like
Datapoints to estimate from. In case of univariate data this is a 1-D
array, otherwise a 2-D array with shape (# of dims, # of data).
bw_method : str, scalar or callable, optional
The method used to calculate the estimator bandwidth. This can be
'scott', 'silverman', a scalar constant or a callable. If a
scalar, this will be used directly as `kde.factor`. If a
callable, it should take a `GaussianKDE` instance as only
parameter and return a scalar. If None (default), 'scott' is used.
Attributes
----------
dataset : ndarray
The dataset with which `gaussian_kde` was initialized.
dim : int
Number of dimensions.
num_dp : int
Number of datapoints.
factor : float
The bandwidth factor, obtained from `kde.covariance_factor`, with which
the covariance matrix is multiplied.
covariance : ndarray
The covariance matrix of `dataset`, scaled by the calculated bandwidth
(`kde.factor`).
inv_cov : ndarray
The inverse of `covariance`.
Methods
-------
kde.evaluate(points) : ndarray
Evaluate the estimated pdf on a provided set of points.
kde(points) : ndarray
Same as kde.evaluate(points)
"""
# This implementation with minor modification was too good to pass up.
# from scipy: https://github.com/scipy/scipy/blob/master/scipy/stats/kde.py
def __init__(self, dataset, bw_method=None):
self.dataset = np.atleast_2d(dataset)
if not np.array(self.dataset).size > 1:
raise ValueError("`dataset` input should have multiple elements.")
self.dim, self.num_dp = np.array(self.dataset).shape
isString = isinstance(bw_method, six.string_types)
if bw_method is None:
pass
elif (isString and bw_method == 'scott'):
self.covariance_factor = self.scotts_factor
elif (isString and bw_method == 'silverman'):
self.covariance_factor = self.silverman_factor
elif (np.isscalar(bw_method) and not isString):
self._bw_method = 'use constant'
self.covariance_factor = lambda: bw_method
elif callable(bw_method):
self._bw_method = bw_method
self.covariance_factor = lambda: self._bw_method(self)
else:
raise ValueError("`bw_method` should be 'scott', 'silverman', a "
"scalar or a callable")
# Computes the covariance matrix for each Gaussian kernel using
# covariance_factor().
self.factor = self.covariance_factor()
# Cache covariance and inverse covariance of the data
if not hasattr(self, '_data_inv_cov'):
self.data_covariance = np.atleast_2d(
np.cov(
self.dataset,
rowvar=1,
bias=False))
self.data_inv_cov = np.linalg.inv(self.data_covariance)
self.covariance = self.data_covariance * self.factor ** 2
self.inv_cov = self.data_inv_cov / self.factor ** 2
self.norm_factor = np.sqrt(
np.linalg.det(
2 * np.pi * self.covariance)) * self.num_dp
def scotts_factor(self):
return np.power(self.num_dp, -1. / (self.dim + 4))
def silverman_factor(self):
return np.power(
self.num_dp * (self.dim + 2.0) / 4.0, -1. / (self.dim + 4))
# Default method to calculate bandwidth, can be overwritten by subclass
covariance_factor = scotts_factor
def evaluate(self, points):
"""Evaluate the estimated pdf on a set of points.
Parameters
----------
points : (# of dimensions, # of points)-array
Alternatively, a (# of dimensions,) vector can be passed in and
treated as a single point.
Returns
-------
values : (# of points,)-array
The values at each point.
Raises
------
ValueError : if the dimensionality of the input points is different
than the dimensionality of the KDE.
"""
points = np.atleast_2d(points)
dim, num_m = np.array(points).shape
if dim != self.dim:
raise ValueError("points have dimension {}, dataset has dimension "
"{}".format(dim, self.dim))
result = np.zeros((num_m,), dtype=float)
if num_m >= self.num_dp:
# there are more points than data, so loop over data
for i in range(self.num_dp):
diff = self.dataset[:, i, np.newaxis] - points
tdiff = np.dot(self.inv_cov, diff)
energy = np.sum(diff * tdiff, axis=0) / 2.0
result = result + np.exp(-energy)
else:
# loop over points
for i in range(num_m):
diff = self.dataset - points[:, i, np.newaxis]
tdiff = np.dot(self.inv_cov, diff)
energy = np.sum(diff * tdiff, axis=0) / 2.0
result[i] = np.sum(np.exp(-energy), axis=0)
result = result / self.norm_factor
return result
__call__ = evaluate
##################################################
# Code related to things in and around polygons
##################################################
@cbook.deprecated("2.2")
def inside_poly(points, verts):
"""
*points* is a sequence of *x*, *y* points.
*verts* is a sequence of *x*, *y* vertices of a polygon.
Return value is a sequence of indices into points for the points
that are inside the polygon.
"""
# Make a closed polygon path
poly = Path(verts)
# Check to see which points are contained within the Path
return [idx for idx, p in enumerate(points) if poly.contains_point(p)]
@cbook.deprecated("2.2")
def poly_below(xmin, xs, ys):
"""
Given a sequence of *xs* and *ys*, return the vertices of a
polygon that has a horizontal base at *xmin* and an upper bound at
the *ys*. *xmin* is a scalar.
Intended for use with :meth:`matplotlib.axes.Axes.fill`, e.g.,::
xv, yv = poly_below(0, x, y)
ax.fill(xv, yv)
"""
if any(isinstance(var, np.ma.MaskedArray) for var in [xs, ys]):
numpy = np.ma
else:
numpy = np
xs = numpy.asarray(xs)
ys = numpy.asarray(ys)
Nx = len(xs)
Ny = len(ys)
if Nx != Ny:
raise ValueError("'xs' and 'ys' must have the same length")
x = xmin*numpy.ones(2*Nx)
y = numpy.ones(2*Nx)
x[:Nx] = xs
y[:Nx] = ys
y[Nx:] = ys[::-1]
return x, y
@cbook.deprecated("2.2")
def poly_between(x, ylower, yupper):
"""
Given a sequence of *x*, *ylower* and *yupper*, return the polygon
that fills the regions between them. *ylower* or *yupper* can be
scalar or iterable. If they are iterable, they must be equal in
length to *x*.
Return value is *x*, *y* arrays for use with
:meth:`matplotlib.axes.Axes.fill`.
"""
if any(isinstance(var, np.ma.MaskedArray) for var in [ylower, yupper, x]):
numpy = np.ma
else:
numpy = np
Nx = len(x)
if not cbook.iterable(ylower):
ylower = ylower*numpy.ones(Nx)
if not cbook.iterable(yupper):
yupper = yupper*numpy.ones(Nx)
x = numpy.concatenate((x, x[::-1]))
y = numpy.concatenate((yupper, ylower[::-1]))
return x, y
@cbook.deprecated('2.2')
def is_closed_polygon(X):
"""
Tests whether first and last object in a sequence are the same. These are
presumably coordinates on a polygonal curve, in which case this function
tests if that curve is closed.
"""
return np.all(X[0] == X[-1])
@cbook.deprecated("2.2", message='Moved to matplotlib.cbook')
def contiguous_regions(mask):
"""
return a list of (ind0, ind1) such that mask[ind0:ind1].all() is
True and we cover all such regions
"""
return cbook.contiguous_regions(mask)
@cbook.deprecated("2.2")
def cross_from_below(x, threshold):
"""
return the indices into *x* where *x* crosses some threshold from
below, e.g., the i's where::
x[i-1]<threshold and x[i]>=threshold
Example code::
import matplotlib.pyplot as plt
t = np.arange(0.0, 2.0, 0.1)
s = np.sin(2*np.pi*t)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(t, s, '-o')
ax.axhline(0.5)
ax.axhline(-0.5)
ind = cross_from_below(s, 0.5)
ax.vlines(t[ind], -1, 1)
ind = cross_from_above(s, -0.5)
ax.vlines(t[ind], -1, 1)
plt.show()
See Also
--------
:func:`cross_from_above` and :func:`contiguous_regions`
"""
x = np.asarray(x)
ind = np.nonzero((x[:-1] < threshold) & (x[1:] >= threshold))[0]
if len(ind):
return ind+1
else:
return ind
@cbook.deprecated("2.2")
def cross_from_above(x, threshold):
"""
return the indices into *x* where *x* crosses some threshold from
below, e.g., the i's where::
x[i-1]>threshold and x[i]<=threshold
See Also
--------
:func:`cross_from_below` and :func:`contiguous_regions`
"""
x = np.asarray(x)
ind = np.nonzero((x[:-1] >= threshold) & (x[1:] < threshold))[0]
if len(ind):
return ind+1
else:
return ind
##################################################
# Vector and path length geometry calculations
##################################################
@cbook.deprecated('2.2')
def vector_lengths(X, P=2., axis=None):
"""
Finds the length of a set of vectors in *n* dimensions. This is
like the :func:`numpy.norm` function for vectors, but has the ability to
work over a particular axis of the supplied array or matrix.
Computes ``(sum((x_i)^P))^(1/P)`` for each ``{x_i}`` being the
elements of *X* along the given axis. If *axis* is *None*,
compute over all elements of *X*.
"""
X = np.asarray(X)
return (np.sum(X**(P), axis=axis))**(1./P)
@cbook.deprecated('2.2')
def distances_along_curve(X):
"""
Computes the distance between a set of successive points in *N* dimensions.
Where *X* is an *M* x *N* array or matrix. The distances between
successive rows is computed. Distance is the standard Euclidean
distance.
"""
X = np.diff(X, axis=0)
return vector_lengths(X, axis=1)
@cbook.deprecated('2.2')
def path_length(X):
"""
Computes the distance travelled along a polygonal curve in *N* dimensions.
Where *X* is an *M* x *N* array or matrix. Returns an array of
length *M* consisting of the distance along the curve at each point
(i.e., the rows of *X*).
"""
X = distances_along_curve(X)
return np.concatenate((np.zeros(1), np.cumsum(X)))
@cbook.deprecated('2.2')
def quad2cubic(q0x, q0y, q1x, q1y, q2x, q2y):
"""
Converts a quadratic Bezier curve to a cubic approximation.
The inputs are the *x* and *y* coordinates of the three control
points of a quadratic curve, and the output is a tuple of *x* and
*y* coordinates of the four control points of the cubic curve.
"""
# TODO: Candidate for deprecation -- no longer used internally
# c0x, c0y = q0x, q0y
c1x, c1y = q0x + 2./3. * (q1x - q0x), q0y + 2./3. * (q1y - q0y)
c2x, c2y = c1x + 1./3. * (q2x - q0x), c1y + 1./3. * (q2y - q0y)
# c3x, c3y = q2x, q2y
return q0x, q0y, c1x, c1y, c2x, c2y, q2x, q2y
@cbook.deprecated("2.2")
def offset_line(y, yerr):
"""
Offsets an array *y* by +/- an error and returns a tuple
(y - err, y + err).
The error term can be:
* A scalar. In this case, the returned tuple is obvious.
* A vector of the same length as *y*. The quantities y +/- err are computed
component-wise.
* A tuple of length 2. In this case, yerr[0] is the error below *y* and
yerr[1] is error above *y*. For example::
from pylab import *
x = linspace(0, 2*pi, num=100, endpoint=True)
y = sin(x)
y_minus, y_plus = mlab.offset_line(y, 0.1)
plot(x, y)
fill_between(x, ym, y2=yp)
show()
"""
if cbook.is_numlike(yerr) or (cbook.iterable(yerr) and
len(yerr) == len(y)):
ymin = y - yerr
ymax = y + yerr
elif len(yerr) == 2:
ymin, ymax = y - yerr[0], y + yerr[1]
else:
raise ValueError("yerr must be scalar, 1xN or 2xN")
return ymin, ymax
| 124,919 | 29.897848 | 84 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/pyplot.py
|
# Note: The first part of this file can be modified in place, but the latter
# part is autogenerated by the boilerplate.py script.
"""
`matplotlib.pyplot` is a state-based interface to matplotlib. It provides
a MATLAB-like way of plotting.
pyplot is mainly intended for interactive plots and simple cases of programmatic
plot generation::
import numpy as np
import matplotlib.pyplot as plt
x = np.arange(0, 5, 0.1)
y = np.sin(x)
plt.plot(x, y)
The object-oriented API is recommended for more complex plots.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import sys
import time
import warnings
from cycler import cycler
import matplotlib
import matplotlib.colorbar
from matplotlib import style
from matplotlib import _pylab_helpers, interactive
from matplotlib.cbook import dedent, silent_list, is_numlike
from matplotlib.cbook import _string_to_bool
from matplotlib.cbook import deprecated, warn_deprecated
from matplotlib import docstring
from matplotlib.backend_bases import FigureCanvasBase
from matplotlib.figure import Figure, figaspect
from matplotlib.gridspec import GridSpec
from matplotlib.image import imread as _imread
from matplotlib.image import imsave as _imsave
from matplotlib import rcParams, rcParamsDefault, get_backend
from matplotlib import rc_context
from matplotlib.rcsetup import interactive_bk as _interactive_bk
from matplotlib.artist import getp, get, Artist
from matplotlib.artist import setp as _setp
from matplotlib.axes import Axes, Subplot
from matplotlib.projections import PolarAxes
from matplotlib import mlab # for csv2rec, detrend_none, window_hanning
from matplotlib.scale import get_scale_docs, get_scale_names
from matplotlib import cm
from matplotlib.cm import get_cmap, register_cmap
import numpy as np
# We may not need the following imports here:
from matplotlib.colors import Normalize
from matplotlib.lines import Line2D
from matplotlib.text import Text, Annotation
from matplotlib.patches import Polygon, Rectangle, Circle, Arrow
from matplotlib.widgets import SubplotTool, Button, Slider, Widget
from .ticker import TickHelper, Formatter, FixedFormatter, NullFormatter,\
FuncFormatter, FormatStrFormatter, ScalarFormatter,\
LogFormatter, LogFormatterExponent, LogFormatterMathtext,\
Locator, IndexLocator, FixedLocator, NullLocator,\
LinearLocator, LogLocator, AutoLocator, MultipleLocator,\
MaxNLocator
from matplotlib.backends import pylab_setup
## Backend detection ##
def _backend_selection():
""" If rcParams['backend_fallback'] is true, check to see if the
current backend is compatible with the current running event
loop, and if not switches to a compatible one.
"""
backend = rcParams['backend']
if not rcParams['backend_fallback'] or backend not in _interactive_bk:
return
is_agg_backend = rcParams['backend'].endswith('Agg')
if 'wx' in sys.modules and not backend in ('WX', 'WXAgg'):
import wx
if wx.App.IsMainLoopRunning():
rcParams['backend'] = 'wx' + 'Agg' * is_agg_backend
elif 'PyQt4.QtCore' in sys.modules and not backend == 'Qt4Agg':
import PyQt4.QtGui
if not PyQt4.QtGui.qApp.startingUp():
# The mainloop is running.
rcParams['backend'] = 'qt4Agg'
elif 'PyQt5.QtCore' in sys.modules and not backend == 'Qt5Agg':
import PyQt5.QtWidgets
if not PyQt5.QtWidgets.qApp.startingUp():
# The mainloop is running.
rcParams['backend'] = 'qt5Agg'
elif ('gtk' in sys.modules and
backend not in ('GTK', 'GTKAgg', 'GTKCairo')):
if 'gi' in sys.modules:
from gi.repository import GObject
ml = GObject.MainLoop
else:
import gobject
ml = gobject.MainLoop
if ml().is_running():
rcParams['backend'] = 'gtk' + 'Agg' * is_agg_backend
elif 'Tkinter' in sys.modules and not backend == 'TkAgg':
# import Tkinter
pass # what if anything do we need to do for tkinter?
_backend_selection()
## Global ##
_backend_mod, new_figure_manager, draw_if_interactive, _show = pylab_setup()
_IP_REGISTERED = None
_INSTALL_FIG_OBSERVER = False
def install_repl_displayhook():
"""
Install a repl display hook so that any stale figure are automatically
redrawn when control is returned to the repl.
This works with IPython terminals and kernels,
as well as vanilla python shells.
"""
global _IP_REGISTERED
global _INSTALL_FIG_OBSERVER
class _NotIPython(Exception):
pass
# see if we have IPython hooks around, if use them
try:
if 'IPython' in sys.modules:
from IPython import get_ipython
ip = get_ipython()
if ip is None:
raise _NotIPython()
if _IP_REGISTERED:
return
def post_execute():
if matplotlib.is_interactive():
draw_all()
# IPython >= 2
try:
ip.events.register('post_execute', post_execute)
except AttributeError:
# IPython 1.x
ip.register_post_execute(post_execute)
_IP_REGISTERED = post_execute
_INSTALL_FIG_OBSERVER = False
# trigger IPython's eventloop integration, if available
from IPython.core.pylabtools import backend2gui
ipython_gui_name = backend2gui.get(get_backend())
if ipython_gui_name:
ip.enable_gui(ipython_gui_name)
else:
_INSTALL_FIG_OBSERVER = True
# import failed or ipython is not running
except (ImportError, _NotIPython):
_INSTALL_FIG_OBSERVER = True
def uninstall_repl_displayhook():
"""
Uninstalls the matplotlib display hook.
.. warning
Need IPython >= 2 for this to work. For IPython < 2 will raise a
``NotImplementedError``
.. warning
If you are using vanilla python and have installed another
display hook this will reset ``sys.displayhook`` to what ever
function was there when matplotlib installed it's displayhook,
possibly discarding your changes.
"""
global _IP_REGISTERED
global _INSTALL_FIG_OBSERVER
if _IP_REGISTERED:
from IPython import get_ipython
ip = get_ipython()
try:
ip.events.unregister('post_execute', _IP_REGISTERED)
except AttributeError:
raise NotImplementedError("Can not unregister events "
"in IPython < 2.0")
_IP_REGISTERED = None
if _INSTALL_FIG_OBSERVER:
_INSTALL_FIG_OBSERVER = False
draw_all = _pylab_helpers.Gcf.draw_all
@docstring.copy_dedent(Artist.findobj)
def findobj(o=None, match=None, include_self=True):
if o is None:
o = gcf()
return o.findobj(match, include_self=include_self)
def switch_backend(newbackend):
"""
Switch the default backend. This feature is **experimental**, and
is only expected to work switching to an image backend. e.g., if
you have a bunch of PostScript scripts that you want to run from
an interactive ipython session, you may want to switch to the PS
backend before running them to avoid having a bunch of GUI windows
popup. If you try to interactively switch from one GUI backend to
another, you will explode.
Calling this command will close all open windows.
"""
close('all')
global _backend_mod, new_figure_manager, draw_if_interactive, _show
matplotlib.use(newbackend, warn=False, force=True)
from matplotlib.backends import pylab_setup
_backend_mod, new_figure_manager, draw_if_interactive, _show = pylab_setup()
def show(*args, **kw):
"""
Display a figure.
When running in ipython with its pylab mode, display all
figures and return to the ipython prompt.
In non-interactive mode, display all figures and block until
the figures have been closed; in interactive mode it has no
effect unless figures were created prior to a change from
non-interactive to interactive mode (not recommended). In
that case it displays the figures but does not block.
A single experimental keyword argument, *block*, may be
set to True or False to override the blocking behavior
described above.
"""
global _show
return _show(*args, **kw)
def isinteractive():
"""
Return status of interactive mode.
"""
return matplotlib.is_interactive()
def ioff():
"""Turn interactive mode off."""
matplotlib.interactive(False)
uninstall_repl_displayhook()
def ion():
"""Turn interactive mode on."""
matplotlib.interactive(True)
install_repl_displayhook()
def pause(interval):
"""
Pause for *interval* seconds.
If there is an active figure, it will be updated and displayed before the
pause, and the GUI event loop (if any) will run during the pause.
This can be used for crude animation. For more complex animation, see
:mod:`matplotlib.animation`.
Notes
-----
This function is experimental; its behavior may be changed or extended in a
future release.
"""
manager = _pylab_helpers.Gcf.get_active()
if manager is not None:
canvas = manager.canvas
if canvas.figure.stale:
canvas.draw_idle()
show(block=False)
canvas.start_event_loop(interval)
else:
time.sleep(interval)
@docstring.copy_dedent(matplotlib.rc)
def rc(*args, **kwargs):
matplotlib.rc(*args, **kwargs)
@docstring.copy_dedent(matplotlib.rc_context)
def rc_context(rc=None, fname=None):
return matplotlib.rc_context(rc, fname)
@docstring.copy_dedent(matplotlib.rcdefaults)
def rcdefaults():
matplotlib.rcdefaults()
if matplotlib.is_interactive():
draw_all()
# The current "image" (ScalarMappable) is retrieved or set
# only via the pyplot interface using the following two
# functions:
def gci():
"""
Get the current colorable artist. Specifically, returns the
current :class:`~matplotlib.cm.ScalarMappable` instance (image or
patch collection), or *None* if no images or patch collections
have been defined. The commands :func:`~matplotlib.pyplot.imshow`
and :func:`~matplotlib.pyplot.figimage` create
:class:`~matplotlib.image.Image` instances, and the commands
:func:`~matplotlib.pyplot.pcolor` and
:func:`~matplotlib.pyplot.scatter` create
:class:`~matplotlib.collections.Collection` instances. The
current image is an attribute of the current axes, or the nearest
earlier axes in the current figure that contains an image.
"""
return gcf()._gci()
def sci(im):
"""
Set the current image. This image will be the target of colormap
commands like :func:`~matplotlib.pyplot.jet`,
:func:`~matplotlib.pyplot.hot` or
:func:`~matplotlib.pyplot.clim`). The current image is an
attribute of the current axes.
"""
gca()._sci(im)
## Any Artist ##
# (getp is simply imported)
@docstring.copy(_setp)
def setp(*args, **kwargs):
return _setp(*args, **kwargs)
def xkcd(scale=1, length=100, randomness=2):
"""
Turns on `xkcd <https://xkcd.com/>`_ sketch-style drawing mode.
This will only have effect on things drawn after this function is
called.
For best results, the "Humor Sans" font should be installed: it is
not included with matplotlib.
Parameters
----------
scale : float, optional
The amplitude of the wiggle perpendicular to the source line.
length : float, optional
The length of the wiggle along the line.
randomness : float, optional
The scale factor by which the length is shrunken or expanded.
Notes
-----
This function works by a number of rcParams, so it will probably
override others you have set before.
If you want the effects of this function to be temporary, it can
be used as a context manager, for example::
with plt.xkcd():
# This figure will be in XKCD-style
fig1 = plt.figure()
# ...
# This figure will be in regular style
fig2 = plt.figure()
"""
if rcParams['text.usetex']:
raise RuntimeError(
"xkcd mode is not compatible with text.usetex = True")
from matplotlib import patheffects
xkcd_ctx = rc_context({
'font.family': ['xkcd', 'Humor Sans', 'Comic Sans MS'],
'font.size': 14.0,
'path.sketch': (scale, length, randomness),
'path.effects': [patheffects.withStroke(linewidth=4, foreground="w")],
'axes.linewidth': 1.5,
'lines.linewidth': 2.0,
'figure.facecolor': 'white',
'grid.linewidth': 0.0,
'axes.grid': False,
'axes.unicode_minus': False,
'axes.edgecolor': 'black',
'xtick.major.size': 8,
'xtick.major.width': 3,
'ytick.major.size': 8,
'ytick.major.width': 3,
})
xkcd_ctx.__enter__()
# In order to make the call to `xkcd` that does not use a context manager
# (cm) work, we need to enter into the cm ourselves, and return a dummy
# cm that does nothing on entry and cleans up the xkcd context on exit.
# Additionally, we need to keep a reference to the dummy cm because it
# would otherwise be exited when GC'd.
class dummy_ctx(object):
def __enter__(self):
pass
__exit__ = xkcd_ctx.__exit__
return dummy_ctx()
## Figures ##
def figure(num=None, # autoincrement if None, else integer from 1-N
figsize=None, # defaults to rc figure.figsize
dpi=None, # defaults to rc figure.dpi
facecolor=None, # defaults to rc figure.facecolor
edgecolor=None, # defaults to rc figure.edgecolor
frameon=True,
FigureClass=Figure,
clear=False,
**kwargs
):
"""
Creates a new figure.
Parameters
----------
num : integer or string, optional, default: none
If not provided, a new figure will be created, and the figure number
will be incremented. The figure objects holds this number in a `number`
attribute.
If num is provided, and a figure with this id already exists, make
it active, and returns a reference to it. If this figure does not
exists, create it and returns it.
If num is a string, the window title will be set to this figure's
`num`.
figsize : tuple of integers, optional, default: None
width, height in inches. If not provided, defaults to rc
figure.figsize.
dpi : integer, optional, default: None
resolution of the figure. If not provided, defaults to rc figure.dpi.
facecolor :
the background color. If not provided, defaults to rc figure.facecolor.
edgecolor :
the border color. If not provided, defaults to rc figure.edgecolor.
frameon : bool, optional, default: True
If False, suppress drawing the figure frame.
FigureClass : class derived from matplotlib.figure.Figure
Optionally use a custom Figure instance.
clear : bool, optional, default: False
If True and the figure already exists, then it is cleared.
Returns
-------
figure : Figure
The Figure instance returned will also be passed to new_figure_manager
in the backends, which allows to hook custom Figure classes into the
pylab interface. Additional kwargs will be passed to the figure init
function.
Notes
-----
If you are creating many figures, make sure you explicitly call "close"
on the figures you are not using, because this will enable pylab
to properly clean up the memory.
rcParams defines the default values, which can be modified in the
matplotlibrc file
"""
if figsize is None:
figsize = rcParams['figure.figsize']
if dpi is None:
dpi = rcParams['figure.dpi']
if facecolor is None:
facecolor = rcParams['figure.facecolor']
if edgecolor is None:
edgecolor = rcParams['figure.edgecolor']
allnums = get_fignums()
next_num = max(allnums) + 1 if allnums else 1
figLabel = ''
if num is None:
num = next_num
elif isinstance(num, six.string_types):
figLabel = num
allLabels = get_figlabels()
if figLabel not in allLabels:
if figLabel == 'all':
warnings.warn("close('all') closes all existing figures")
num = next_num
else:
inum = allLabels.index(figLabel)
num = allnums[inum]
else:
num = int(num) # crude validation of num argument
figManager = _pylab_helpers.Gcf.get_fig_manager(num)
if figManager is None:
max_open_warning = rcParams['figure.max_open_warning']
if (max_open_warning >= 1 and len(allnums) >= max_open_warning):
warnings.warn(
"More than %d figures have been opened. Figures "
"created through the pyplot interface "
"(`matplotlib.pyplot.figure`) are retained until "
"explicitly closed and may consume too much memory. "
"(To control this warning, see the rcParam "
"`figure.max_open_warning`)." %
max_open_warning, RuntimeWarning)
if get_backend().lower() == 'ps':
dpi = 72
figManager = new_figure_manager(num, figsize=figsize,
dpi=dpi,
facecolor=facecolor,
edgecolor=edgecolor,
frameon=frameon,
FigureClass=FigureClass,
**kwargs)
if figLabel:
figManager.set_window_title(figLabel)
figManager.canvas.figure.set_label(figLabel)
# make this figure current on button press event
def make_active(event):
_pylab_helpers.Gcf.set_active(figManager)
cid = figManager.canvas.mpl_connect('button_press_event', make_active)
figManager._cidgcf = cid
_pylab_helpers.Gcf.set_active(figManager)
fig = figManager.canvas.figure
fig.number = num
# make sure backends (inline) that we don't ship that expect this
# to be called in plotting commands to make the figure call show
# still work. There is probably a better way to do this in the
# FigureManager base class.
if matplotlib.is_interactive():
draw_if_interactive()
if _INSTALL_FIG_OBSERVER:
fig.stale_callback = _auto_draw_if_interactive
if clear:
figManager.canvas.figure.clear()
return figManager.canvas.figure
def _auto_draw_if_interactive(fig, val):
"""
This is an internal helper function for making sure that auto-redrawing
works as intended in the plain python repl.
Parameters
----------
fig : Figure
A figure object which is assumed to be associated with a canvas
"""
if val and matplotlib.is_interactive() and not fig.canvas.is_saving():
fig.canvas.draw_idle()
def gcf():
"""Get a reference to the current figure."""
figManager = _pylab_helpers.Gcf.get_active()
if figManager is not None:
return figManager.canvas.figure
else:
return figure()
def fignum_exists(num):
return _pylab_helpers.Gcf.has_fignum(num) or num in get_figlabels()
def get_fignums():
"""Return a list of existing figure numbers."""
return sorted(_pylab_helpers.Gcf.figs)
def get_figlabels():
"""Return a list of existing figure labels."""
figManagers = _pylab_helpers.Gcf.get_all_fig_managers()
figManagers.sort(key=lambda m: m.num)
return [m.canvas.figure.get_label() for m in figManagers]
def get_current_fig_manager():
figManager = _pylab_helpers.Gcf.get_active()
if figManager is None:
gcf() # creates an active figure as a side effect
figManager = _pylab_helpers.Gcf.get_active()
return figManager
@docstring.copy_dedent(FigureCanvasBase.mpl_connect)
def connect(s, func):
return get_current_fig_manager().canvas.mpl_connect(s, func)
@docstring.copy_dedent(FigureCanvasBase.mpl_disconnect)
def disconnect(cid):
return get_current_fig_manager().canvas.mpl_disconnect(cid)
def close(*args):
"""
Close a figure window.
``close()`` by itself closes the current figure
``close(fig)`` closes the `.Figure` instance *fig*
``close(num)`` closes the figure number *num*
``close(name)`` where *name* is a string, closes figure with that label
``close('all')`` closes all the figure windows
"""
if len(args) == 0:
figManager = _pylab_helpers.Gcf.get_active()
if figManager is None:
return
else:
_pylab_helpers.Gcf.destroy(figManager.num)
elif len(args) == 1:
arg = args[0]
if arg == 'all':
_pylab_helpers.Gcf.destroy_all()
elif isinstance(arg, six.integer_types):
_pylab_helpers.Gcf.destroy(arg)
elif hasattr(arg, 'int'):
# if we are dealing with a type UUID, we
# can use its integer representation
_pylab_helpers.Gcf.destroy(arg.int)
elif isinstance(arg, six.string_types):
allLabels = get_figlabels()
if arg in allLabels:
num = get_fignums()[allLabels.index(arg)]
_pylab_helpers.Gcf.destroy(num)
elif isinstance(arg, Figure):
_pylab_helpers.Gcf.destroy_fig(arg)
else:
raise TypeError('Unrecognized argument type %s to close' % type(arg))
else:
raise TypeError('close takes 0 or 1 arguments')
def clf():
"""
Clear the current figure.
"""
gcf().clf()
def draw():
"""Redraw the current figure.
This is used to update a figure that has been altered, but not
automatically re-drawn. If interactive mode is on (:func:`.ion()`), this
should be only rarely needed, but there may be ways to modify the state of
a figure without marking it as `stale`. Please report these cases as
bugs.
A more object-oriented alternative, given any
:class:`~matplotlib.figure.Figure` instance, :attr:`fig`, that
was created using a :mod:`~matplotlib.pyplot` function, is::
fig.canvas.draw_idle()
"""
get_current_fig_manager().canvas.draw_idle()
@docstring.copy_dedent(Figure.savefig)
def savefig(*args, **kwargs):
fig = gcf()
res = fig.savefig(*args, **kwargs)
fig.canvas.draw_idle() # need this if 'transparent=True' to reset colors
return res
@docstring.copy_dedent(Figure.ginput)
def ginput(*args, **kwargs):
"""
Blocking call to interact with the figure.
This will wait for *n* clicks from the user and return a list of the
coordinates of each click.
If *timeout* is negative, does not timeout.
"""
return gcf().ginput(*args, **kwargs)
@docstring.copy_dedent(Figure.waitforbuttonpress)
def waitforbuttonpress(*args, **kwargs):
"""
Blocking call to interact with the figure.
This will wait for *n* key or mouse clicks from the user and
return a list containing True's for keyboard clicks and False's
for mouse clicks.
If *timeout* is negative, does not timeout.
"""
return gcf().waitforbuttonpress(*args, **kwargs)
# Putting things in figures
@docstring.copy_dedent(Figure.text)
def figtext(*args, **kwargs):
return gcf().text(*args, **kwargs)
@docstring.copy_dedent(Figure.suptitle)
def suptitle(*args, **kwargs):
return gcf().suptitle(*args, **kwargs)
@docstring.copy_dedent(Figure.figimage)
def figimage(*args, **kwargs):
return gcf().figimage(*args, **kwargs)
def figlegend(*args, **kwargs):
"""
Place a legend in the figure.
*labels*
a sequence of strings
*handles*
a sequence of :class:`~matplotlib.lines.Line2D` or
:class:`~matplotlib.patches.Patch` instances
*loc*
can be a string or an integer specifying the legend
location
A :class:`matplotlib.legend.Legend` instance is returned.
Examples
--------
To make a legend from existing artists on every axes::
figlegend()
To make a legend for a list of lines and labels::
figlegend( (line1, line2, line3),
('label1', 'label2', 'label3'),
'upper right' )
.. seealso::
:func:`~matplotlib.pyplot.legend`
"""
return gcf().legend(*args, **kwargs)
## Figure and Axes hybrid ##
_hold_msg = """pyplot.hold is deprecated.
Future behavior will be consistent with the long-time default:
plot commands add elements without first clearing the
Axes and/or Figure."""
@deprecated("2.0", message=_hold_msg)
def hold(b=None):
"""
Set the hold state. If *b* is None (default), toggle the
hold state, else set the hold state to boolean value *b*::
hold() # toggle hold
hold(True) # hold is on
hold(False) # hold is off
When *hold* is *True*, subsequent plot commands will add elements to
the current axes. When *hold* is *False*, the current axes and
figure will be cleared on the next plot command.
"""
fig = gcf()
ax = fig.gca()
if b is not None:
b = bool(b)
fig._hold = b
ax._hold = b
# b=None toggles the hold state, so let's get get the current hold
# state; but should pyplot hold toggle the rc setting - me thinks
# not
b = ax._hold
# The comment above looks ancient; and probably the line below,
# contrary to the comment, is equally ancient. It will trigger
# a second warning, but "Oh, well...".
rc('axes', hold=b)
@deprecated("2.0", message=_hold_msg)
def ishold():
"""
Return the hold status of the current axes.
"""
return gca()._hold
@deprecated("2.0", message=_hold_msg)
def over(func, *args, **kwargs):
"""
Call a function with hold(True).
Calls::
func(*args, **kwargs)
with ``hold(True)`` and then restores the hold state.
"""
ax = gca()
h = ax._hold
ax._hold = True
func(*args, **kwargs)
ax._hold = h
## Axes ##
def axes(arg=None, **kwargs):
"""
Add an axes to the current figure and make it the current axes.
Parameters
----------
arg : None or 4-tuple or Axes
The exact behavior of this function depends on the type:
- *None*: A new full window axes is added using
``subplot(111, **kwargs)``
- 4-tuple of floats *rect* = ``[left, bottom, width, height]``.
A new axes is added with dimensions *rect* in normalized
(0, 1) units using `~.Figure.add_axes` on the current figure.
- `.Axes`: This is equivalent to `.pyplot.sca`. It sets the current
axes to *arg*. Note: This implicitly changes the current figure to
the parent of *arg*.
.. note:: The use of an Axes as an argument is deprecated and will be
removed in v3.0. Please use `.pyplot.sca` instead.
Other Parameters
----------------
**kwargs :
For allowed keyword arguments see `.pyplot.subplot` and
`.Figure.add_axes` respectively. Some common keyword arguments are
listed below:
========= =========== =================================================
kwarg Accepts Description
========= =========== =================================================
facecolor color the axes background color
frameon bool whether to display the frame
sharex otherax share x-axis with *otherax*
sharey otherax share y-axis with *otherax*
polar bool whether to use polar axes
aspect [str | num] ['equal', 'auto'] or a number. If a number, the
ratio of y-unit/x-unit in screen-space. See also
`~.Axes.set_aspect`.
========= =========== =================================================
Returns
-------
axes : Axes
The created or activated axes.
Examples
--------
Creating a new full window axes::
>>> plt.axes()
Creating a new axes with specified dimensions and some kwargs::
>>> plt.axes((left, bottom, width, height), facecolor='w')
"""
if arg is None:
return subplot(111, **kwargs)
if isinstance(arg, Axes):
warn_deprecated("2.2",
message="Using pyplot.axes(ax) with ax an Axes "
"argument is deprecated. Please use "
"pyplot.sca(ax) instead.")
ax = arg
sca(ax)
return ax
else:
rect = arg
return gcf().add_axes(rect, **kwargs)
def delaxes(ax=None):
"""
Remove the given `Axes` *ax* from the current figure. If *ax* is *None*,
the current axes is removed. A KeyError is raised if the axes doesn't exist.
"""
if ax is None:
ax = gca()
gcf().delaxes(ax)
def sca(ax):
"""
Set the current Axes instance to *ax*.
The current Figure is updated to the parent of *ax*.
"""
managers = _pylab_helpers.Gcf.get_all_fig_managers()
for m in managers:
if ax in m.canvas.figure.axes:
_pylab_helpers.Gcf.set_active(m)
m.canvas.figure.sca(ax)
return
raise ValueError("Axes instance argument was not found in a figure.")
def gca(**kwargs):
"""
Get the current :class:`~matplotlib.axes.Axes` instance on the
current figure matching the given keyword args, or create one.
Examples
--------
To get the current polar axes on the current figure::
plt.gca(projection='polar')
If the current axes doesn't exist, or isn't a polar one, the appropriate
axes will be created and then returned.
See Also
--------
matplotlib.figure.Figure.gca : The figure's gca method.
"""
return gcf().gca(**kwargs)
# More ways of creating axes:
def subplot(*args, **kwargs):
"""
Return a subplot axes at the given grid position.
Call signature::
subplot(nrows, ncols, index, **kwargs)
In the current figure, create and return an `.Axes`, at position *index*
of a (virtual) grid of *nrows* by *ncols* axes. Indexes go from 1 to
``nrows * ncols``, incrementing in row-major order.
If *nrows*, *ncols* and *index* are all less than 10, they can also be
given as a single, concatenated, three-digit number.
For example, ``subplot(2, 3, 3)`` and ``subplot(233)`` both create an
`.Axes` at the top right corner of the current figure, occupying half of
the figure height and a third of the figure width.
.. note::
Creating a subplot will delete any pre-existing subplot that overlaps
with it beyond sharing a boundary::
import matplotlib.pyplot as plt
# plot a line, implicitly creating a subplot(111)
plt.plot([1,2,3])
# now create a subplot which represents the top plot of a grid
# with 2 rows and 1 column. Since this subplot will overlap the
# first, the plot (and its axes) previously created, will be removed
plt.subplot(211)
plt.plot(range(12))
plt.subplot(212, facecolor='y') # creates 2nd subplot with yellow background
If you do not want this behavior, use the
:meth:`~matplotlib.figure.Figure.add_subplot` method or the
:func:`~matplotlib.pyplot.axes` function instead.
Keyword arguments:
*facecolor*:
The background color of the subplot, which can be any valid
color specifier. See :mod:`matplotlib.colors` for more
information.
*polar*:
A boolean flag indicating whether the subplot plot should be
a polar projection. Defaults to *False*.
*projection*:
A string giving the name of a custom projection to be used
for the subplot. This projection must have been previously
registered. See :mod:`matplotlib.projections`.
.. seealso::
:func:`~matplotlib.pyplot.axes`
For additional information on :func:`axes` and
:func:`subplot` keyword arguments.
:file:`gallery/pie_and_polar_charts/polar_scatter.py`
For an example
**Example:**
.. plot:: gallery/subplots_axes_and_figures/subplot.py
"""
# if subplot called without arguments, create subplot(1,1,1)
if len(args)==0:
args=(1,1,1)
# This check was added because it is very easy to type
# subplot(1, 2, False) when subplots(1, 2, False) was intended
# (sharex=False, that is). In most cases, no error will
# ever occur, but mysterious behavior can result because what was
# intended to be the sharex argument is instead treated as a
# subplot index for subplot()
if len(args) >= 3 and isinstance(args[2], bool) :
warnings.warn("The subplot index argument to subplot() appears"
" to be a boolean. Did you intend to use subplots()?")
fig = gcf()
a = fig.add_subplot(*args, **kwargs)
bbox = a.bbox
byebye = []
for other in fig.axes:
if other==a: continue
if bbox.fully_overlaps(other.bbox):
byebye.append(other)
for ax in byebye: delaxes(ax)
return a
def subplots(nrows=1, ncols=1, sharex=False, sharey=False, squeeze=True,
subplot_kw=None, gridspec_kw=None, **fig_kw):
"""
Create a figure and a set of subplots
This utility wrapper makes it convenient to create common layouts of
subplots, including the enclosing figure object, in a single call.
Parameters
----------
nrows, ncols : int, optional, default: 1
Number of rows/columns of the subplot grid.
sharex, sharey : bool or {'none', 'all', 'row', 'col'}, default: False
Controls sharing of properties among x (`sharex`) or y (`sharey`)
axes:
- True or 'all': x- or y-axis will be shared among all
subplots.
- False or 'none': each subplot x- or y-axis will be
independent.
- 'row': each subplot row will share an x- or y-axis.
- 'col': each subplot column will share an x- or y-axis.
When subplots have a shared x-axis along a column, only the x tick
labels of the bottom subplot are visible. Similarly, when subplots
have a shared y-axis along a row, only the y tick labels of the first
column subplot are visible.
squeeze : bool, optional, default: True
- If True, extra dimensions are squeezed out from the returned Axes
object:
- if only one subplot is constructed (nrows=ncols=1), the
resulting single Axes object is returned as a scalar.
- for Nx1 or 1xN subplots, the returned object is a 1D numpy
object array of Axes objects are returned as numpy 1D arrays.
- for NxM, subplots with N>1 and M>1 are returned as a 2D arrays.
- If False, no squeezing at all is done: the returned Axes object is
always a 2D array containing Axes instances, even if it ends up
being 1x1.
subplot_kw : dict, optional
Dict with keywords passed to the
:meth:`~matplotlib.figure.Figure.add_subplot` call used to create each
subplot.
gridspec_kw : dict, optional
Dict with keywords passed to the
:class:`~matplotlib.gridspec.GridSpec` constructor used to create the
grid the subplots are placed on.
**fig_kw :
All additional keyword arguments are passed to the :func:`figure` call.
Returns
-------
fig : :class:`matplotlib.figure.Figure` object
ax : Axes object or array of Axes objects.
ax can be either a single :class:`matplotlib.axes.Axes` object or an
array of Axes objects if more than one subplot was created. The
dimensions of the resulting array can be controlled with the squeeze
keyword, see above.
Examples
--------
First create some toy data:
>>> x = np.linspace(0, 2*np.pi, 400)
>>> y = np.sin(x**2)
Creates just a figure and only one subplot
>>> fig, ax = plt.subplots()
>>> ax.plot(x, y)
>>> ax.set_title('Simple plot')
Creates two subplots and unpacks the output array immediately
>>> f, (ax1, ax2) = plt.subplots(1, 2, sharey=True)
>>> ax1.plot(x, y)
>>> ax1.set_title('Sharing Y axis')
>>> ax2.scatter(x, y)
Creates four polar axes, and accesses them through the returned array
>>> fig, axes = plt.subplots(2, 2, subplot_kw=dict(polar=True))
>>> axes[0, 0].plot(x, y)
>>> axes[1, 1].scatter(x, y)
Share a X axis with each column of subplots
>>> plt.subplots(2, 2, sharex='col')
Share a Y axis with each row of subplots
>>> plt.subplots(2, 2, sharey='row')
Share both X and Y axes with all subplots
>>> plt.subplots(2, 2, sharex='all', sharey='all')
Note that this is the same as
>>> plt.subplots(2, 2, sharex=True, sharey=True)
See Also
--------
figure
subplot
"""
fig = figure(**fig_kw)
axs = fig.subplots(nrows=nrows, ncols=ncols, sharex=sharex, sharey=sharey,
squeeze=squeeze, subplot_kw=subplot_kw,
gridspec_kw=gridspec_kw)
return fig, axs
def subplot2grid(shape, loc, rowspan=1, colspan=1, fig=None, **kwargs):
"""
Create an axis at specific location inside a regular grid.
Parameters
----------
shape : sequence of 2 ints
Shape of grid in which to place axis.
First entry is number of rows, second entry is number of columns.
loc : sequence of 2 ints
Location to place axis within grid.
First entry is row number, second entry is column number.
rowspan : int
Number of rows for the axis to span to the right.
colspan : int
Number of columns for the axis to span downwards.
fig : `Figure`, optional
Figure to place axis in. Defaults to current figure.
**kwargs
Additional keyword arguments are handed to `add_subplot`.
Notes
-----
The following call ::
subplot2grid(shape, loc, rowspan=1, colspan=1)
is identical to ::
gridspec=GridSpec(shape[0], shape[1])
subplotspec=gridspec.new_subplotspec(loc, rowspan, colspan)
subplot(subplotspec)
"""
if fig is None:
fig = gcf()
s1, s2 = shape
subplotspec = GridSpec(s1, s2).new_subplotspec(loc,
rowspan=rowspan,
colspan=colspan)
a = fig.add_subplot(subplotspec, **kwargs)
bbox = a.bbox
byebye = []
for other in fig.axes:
if other == a:
continue
if bbox.fully_overlaps(other.bbox):
byebye.append(other)
for ax in byebye:
delaxes(ax)
return a
def twinx(ax=None):
"""
Make a second axes that shares the *x*-axis. The new axes will
overlay *ax* (or the current axes if *ax* is *None*). The ticks
for *ax2* will be placed on the right, and the *ax2* instance is
returned.
.. seealso::
:file:`examples/api_examples/two_scales.py`
For an example
"""
if ax is None:
ax=gca()
ax1 = ax.twinx()
return ax1
def twiny(ax=None):
"""
Make a second axes that shares the *y*-axis. The new axis will
overlay *ax* (or the current axes if *ax* is *None*). The ticks
for *ax2* will be placed on the top, and the *ax2* instance is
returned.
"""
if ax is None:
ax=gca()
ax1 = ax.twiny()
return ax1
def subplots_adjust(*args, **kwargs):
"""
Tune the subplot layout.
call signature::
subplots_adjust(left=None, bottom=None, right=None, top=None,
wspace=None, hspace=None)
The parameter meanings (and suggested defaults) are::
left = 0.125 # the left side of the subplots of the figure
right = 0.9 # the right side of the subplots of the figure
bottom = 0.1 # the bottom of the subplots of the figure
top = 0.9 # the top of the subplots of the figure
wspace = 0.2 # the amount of width reserved for space between subplots,
# expressed as a fraction of the average axis width
hspace = 0.2 # the amount of height reserved for space between subplots,
# expressed as a fraction of the average axis height
The actual defaults are controlled by the rc file
"""
fig = gcf()
fig.subplots_adjust(*args, **kwargs)
def subplot_tool(targetfig=None):
"""
Launch a subplot tool window for a figure.
A :class:`matplotlib.widgets.SubplotTool` instance is returned.
"""
tbar = rcParams['toolbar'] # turn off the navigation toolbar for the toolfig
rcParams['toolbar'] = 'None'
if targetfig is None:
manager = get_current_fig_manager()
targetfig = manager.canvas.figure
else:
# find the manager for this figure
for manager in _pylab_helpers.Gcf._activeQue:
if manager.canvas.figure==targetfig: break
else: raise RuntimeError('Could not find manager for targetfig')
toolfig = figure(figsize=(6,3))
toolfig.subplots_adjust(top=0.9)
ret = SubplotTool(targetfig, toolfig)
rcParams['toolbar'] = tbar
_pylab_helpers.Gcf.set_active(manager) # restore the current figure
return ret
def tight_layout(pad=1.08, h_pad=None, w_pad=None, rect=None):
"""
Automatically adjust subplot parameters to give specified padding.
Parameters
----------
pad : float
padding between the figure edge and the edges of subplots, as a fraction of the font-size.
h_pad, w_pad : float
padding (height/width) between edges of adjacent subplots.
Defaults to `pad_inches`.
rect : if rect is given, it is interpreted as a rectangle
(left, bottom, right, top) in the normalized figure
coordinate that the whole subplots area (including
labels) will fit into. Default is (0, 0, 1, 1).
"""
fig = gcf()
fig.tight_layout(pad=pad, h_pad=h_pad, w_pad=w_pad, rect=rect)
def box(on=None):
"""
Turn the axes box on or off.
Parameters
----------
on : bool or None
The new axes box state. If ``None``, toggle the state.
"""
ax = gca()
on = _string_to_bool(on)
if on is None:
on = not ax.get_frame_on()
ax.set_frame_on(on)
def title(s, *args, **kwargs):
"""
Set a title of the current axes.
Set one of the three available axes titles. The available titles are
positioned above the axes in the center, flush with the left edge,
and flush with the right edge.
.. seealso::
See :func:`~matplotlib.pyplot.text` for adding text
to the current axes
Parameters
----------
label : str
Text to use for the title
fontdict : dict
A dictionary controlling the appearance of the title text,
the default `fontdict` is:
{'fontsize': rcParams['axes.titlesize'],
'fontweight' : rcParams['axes.titleweight'],
'verticalalignment': 'baseline',
'horizontalalignment': loc}
loc : {'center', 'left', 'right'}, str, optional
Which title to set, defaults to 'center'
Returns
-------
text : :class:`~matplotlib.text.Text`
The matplotlib text instance representing the title
Other parameters
----------------
kwargs : text properties
Other keyword arguments are text properties, see
:class:`~matplotlib.text.Text` for a list of valid text
properties.
"""
return gca().set_title(s, *args, **kwargs)
## Axis ##
def axis(*v, **kwargs):
"""
Convenience method to get or set axis properties.
Calling with no arguments::
>>> axis()
returns the current axes limits ``[xmin, xmax, ymin, ymax]``.::
>>> axis(v)
sets the min and max of the x and y axes, with
``v = [xmin, xmax, ymin, ymax]``.::
>>> axis('off')
turns off the axis lines and labels.::
>>> axis('equal')
changes limits of *x* or *y* axis so that equal increments of *x*
and *y* have the same length; a circle is circular.::
>>> axis('scaled')
achieves the same result by changing the dimensions of the plot box instead
of the axis data limits.::
>>> axis('tight')
changes *x* and *y* axis limits such that all data is shown. If
all data is already shown, it will move it to the center of the
figure without modifying (*xmax* - *xmin*) or (*ymax* -
*ymin*). Note this is slightly different than in MATLAB.::
>>> axis('image')
is 'scaled' with the axis limits equal to the data limits.::
>>> axis('auto')
and::
>>> axis('normal')
are deprecated. They restore default behavior; axis limits are automatically
scaled to make the data fit comfortably within the plot box.
if ``len(*v)==0``, you can pass in *xmin*, *xmax*, *ymin*, *ymax*
as kwargs selectively to alter just those limits without changing
the others.
>>> axis('square')
changes the limit ranges (*xmax*-*xmin*) and (*ymax*-*ymin*) of
the *x* and *y* axes to be the same, and have the same scaling,
resulting in a square plot.
The xmin, xmax, ymin, ymax tuple is returned
.. seealso::
:func:`xlim`, :func:`ylim`
For setting the x- and y-limits individually.
"""
return gca().axis(*v, **kwargs)
def xlabel(s, *args, **kwargs):
"""
Set the x-axis label of the current axes.
Call signature::
xlabel(label, fontdict=None, labelpad=None, **kwargs)
This is the pyplot equivalent of calling `.set_xlabel` on the current axes.
See there for a full parameter description.
"""
return gca().set_xlabel(s, *args, **kwargs)
def ylabel(s, *args, **kwargs):
"""
Set the y-axis label of the current axes.
Call signature::
ylabel(label, fontdict=None, labelpad=None, **kwargs)
This is the pyplot equivalent of calling `.set_ylabel` on the current axes.
See there for a full parameter description.
"""
return gca().set_ylabel(s, *args, **kwargs)
def xlim(*args, **kwargs):
"""
Get or set the x limits of the current axes.
Call signatures::
xmin, xmax = xlim() # return the current xlim
xlim((xmin, xmax)) # set the xlim to xmin, xmax
xlim(xmin, xmax) # set the xlim to xmin, xmax
If you do not specify args, you can pass *xmin* or *xmax* as kwargs, i.e.::
xlim(xmax=3) # adjust the max leaving min unchanged
xlim(xmin=1) # adjust the min leaving max unchanged
Setting limits turns autoscaling off for the x-axis.
Returns
-------
xmin, xmax
A tuple of the new x-axis limits.
Notes
-----
Calling this function with no arguments (e.g. ``xlim()``) is the pyplot
equivalent of calling `~.Axes.get_xlim` on the current axes.
Calling this function with arguments is the pyplot equivalent of calling
`~.Axes.set_xlim` on the current axes. All arguments are passed though.
"""
ax = gca()
if not args and not kwargs:
return ax.get_xlim()
ret = ax.set_xlim(*args, **kwargs)
return ret
def ylim(*args, **kwargs):
"""
Get or set the y-limits of the current axes.
Call signatures::
ymin, ymax = ylim() # return the current ylim
ylim((ymin, ymax)) # set the ylim to ymin, ymax
ylim(ymin, ymax) # set the ylim to ymin, ymax
If you do not specify args, you can alternatively pass *ymin* or *ymax* as
kwargs, i.e.::
ylim(ymax=3) # adjust the max leaving min unchanged
ylim(ymin=1) # adjust the min leaving max unchanged
Setting limits turns autoscaling off for the y-axis.
Returns
-------
ymin, ymax
A tuple of the new y-axis limits.
Notes
-----
Calling this function with no arguments (e.g. ``ylim()``) is the pyplot
equivalent of calling `~.Axes.get_ylim` on the current axes.
Calling this function with arguments is the pyplot equivalent of calling
`~.Axes.set_ylim` on the current axes. All arguments are passed though.
"""
ax = gca()
if not args and not kwargs:
return ax.get_ylim()
ret = ax.set_ylim(*args, **kwargs)
return ret
@docstring.dedent_interpd
def xscale(*args, **kwargs):
"""
Set the scaling of the x-axis.
Call signature::
xscale(scale, **kwargs)
Parameters
----------
scale : [%(scale)s]
The scaling type.
**kwargs
Additional parameters depend on *scale*. See Notes.
Notes
-----
This is the pyplot equivalent of calling `~.Axes.set_xscale` on the
current axes.
Different keywords may be accepted, depending on the scale:
%(scale_docs)s
"""
gca().set_xscale(*args, **kwargs)
@docstring.dedent_interpd
def yscale(*args, **kwargs):
"""
Set the scaling of the y-axis.
Call signature::
yscale(scale, **kwargs)
Parameters
----------
scale : [%(scale)s]
The scaling type.
**kwargs
Additional parameters depend on *scale*. See Notes.
Notes
-----
This is the pyplot equivalent of calling `~.Axes.set_yscale` on the
current axes.
Different keywords may be accepted, depending on the scale:
%(scale_docs)s
"""
gca().set_yscale(*args, **kwargs)
def xticks(*args, **kwargs):
"""
Get or set the current tick locations and labels of the x-axis.
Call signatures::
locs, labels = xticks() # Get locations and labels
xticks(locs, [labels], **kwargs) # Set locations and labels
Parameters
----------
locs : array_like
A list of positions at which ticks should be placed. You can pass an
empty list to disable xticks.
labels : array_like, optional
A list of explicit labels to place at the given *locs*.
**kwargs
:class:`.Text` properties can be used to control the appearance of
the labels.
Returns
-------
locs
An array of label locations.
labels
A list of `.Text` objects.
Notes
-----
Calling this function with no arguments (e.g. ``xticks()``) is the pyplot
equivalent of calling `~.Axes.get_xticks` and `~.Axes.get_xticklabels` on
the current axes.
Calling this function with arguments is the pyplot equivalent of calling
`~.Axes.set_xticks` and `~.Axes.set_xticklabels` on the current axes.
Examples
--------
Get the current locations and labels:
>>> locs, labels = xticks()
Set label locations:
>>> xticks(np.arange(0, 1, step=0.2))
Set text labels:
>>> xticks(np.arange(5), ('Tom', 'Dick', 'Harry', 'Sally', 'Sue'))
Set text labels and properties:
>>> xticks(np.arange(12), calendar.month_name[1:13], rotation=20)
Disable xticks:
>>> xticks([])
"""
ax = gca()
if len(args)==0:
locs = ax.get_xticks()
labels = ax.get_xticklabels()
elif len(args)==1:
locs = ax.set_xticks(args[0])
labels = ax.get_xticklabels()
elif len(args)==2:
locs = ax.set_xticks(args[0])
labels = ax.set_xticklabels(args[1], **kwargs)
else: raise TypeError('Illegal number of arguments to xticks')
if len(kwargs):
for l in labels:
l.update(kwargs)
return locs, silent_list('Text xticklabel', labels)
def yticks(*args, **kwargs):
"""
Get or set the current tick locations and labels of the y-axis.
Call signatures::
locs, labels = yticks() # Get locations and labels
yticks(locs, [labels], **kwargs) # Set locations and labels
Parameters
----------
locs : array_like
A list of positions at which ticks should be placed. You can pass an
empty list to disable yticks.
labels : array_like, optional
A list of explicit labels to place at the given *locs*.
**kwargs
:class:`.Text` properties can be used to control the appearance of
the labels.
Returns
-------
locs
An array of label locations.
labels
A list of `.Text` objects.
Notes
-----
Calling this function with no arguments (e.g. ``yticks()``) is the pyplot
equivalent of calling `~.Axes.get_yticks` and `~.Axes.get_yticklabels` on
the current axes.
Calling this function with arguments is the pyplot equivalent of calling
`~.Axes.set_yticks` and `~.Axes.set_yticklabels` on the current axes.
Examples
--------
Get the current locations and labels:
>>> locs, labels = yticks()
Set label locations:
>>> yticks(np.arange(0, 1, step=0.2))
Set text labels:
>>> yticks(np.arange(5), ('Tom', 'Dick', 'Harry', 'Sally', 'Sue'))
Set text labels and properties:
>>> yticks(np.arange(12), calendar.month_name[1:13], rotation=45)
Disable yticks:
>>> yticks([])
"""
ax = gca()
if len(args)==0:
locs = ax.get_yticks()
labels = ax.get_yticklabels()
elif len(args)==1:
locs = ax.set_yticks(args[0])
labels = ax.get_yticklabels()
elif len(args)==2:
locs = ax.set_yticks(args[0])
labels = ax.set_yticklabels(args[1], **kwargs)
else: raise TypeError('Illegal number of arguments to yticks')
if len(kwargs):
for l in labels:
l.update(kwargs)
return ( locs,
silent_list('Text yticklabel', labels)
)
def minorticks_on():
"""
Display minor ticks on the current plot.
Displaying minor ticks reduces performance; turn them off using
minorticks_off() if drawing speed is a problem.
"""
gca().minorticks_on()
def minorticks_off():
"""
Remove minor ticks from the current plot.
"""
gca().minorticks_off()
def rgrids(*args, **kwargs):
"""
Get or set the radial gridlines on a polar plot.
call signatures::
lines, labels = rgrids()
lines, labels = rgrids(radii, labels=None, angle=22.5, **kwargs)
When called with no arguments, :func:`rgrid` simply returns the
tuple (*lines*, *labels*), where *lines* is an array of radial
gridlines (:class:`~matplotlib.lines.Line2D` instances) and
*labels* is an array of tick labels
(:class:`~matplotlib.text.Text` instances). When called with
arguments, the labels will appear at the specified radial
distances and angles.
*labels*, if not *None*, is a len(*radii*) list of strings of the
labels to use at each angle.
If *labels* is None, the rformatter will be used
Examples::
# set the locations of the radial gridlines and labels
lines, labels = rgrids( (0.25, 0.5, 1.0) )
# set the locations and labels of the radial gridlines and labels
lines, labels = rgrids( (0.25, 0.5, 1.0), ('Tom', 'Dick', 'Harry' )
"""
ax = gca()
if not isinstance(ax, PolarAxes):
raise RuntimeError('rgrids only defined for polar axes')
if len(args)==0:
lines = ax.yaxis.get_gridlines()
labels = ax.yaxis.get_ticklabels()
else:
lines, labels = ax.set_rgrids(*args, **kwargs)
return ( silent_list('Line2D rgridline', lines),
silent_list('Text rgridlabel', labels) )
def thetagrids(*args, **kwargs):
"""
Get or set the theta locations of the gridlines in a polar plot.
If no arguments are passed, return a tuple (*lines*, *labels*)
where *lines* is an array of radial gridlines
(:class:`~matplotlib.lines.Line2D` instances) and *labels* is an
array of tick labels (:class:`~matplotlib.text.Text` instances)::
lines, labels = thetagrids()
Otherwise the syntax is::
lines, labels = thetagrids(angles, labels=None, fmt='%d', frac = 1.1)
set the angles at which to place the theta grids (these gridlines
are equal along the theta dimension).
*angles* is in degrees.
*labels*, if not *None*, is a len(angles) list of strings of the
labels to use at each angle.
If *labels* is *None*, the labels will be ``fmt%angle``.
*frac* is the fraction of the polar axes radius at which to place
the label (1 is the edge). e.g., 1.05 is outside the axes and 0.95
is inside the axes.
Return value is a list of tuples (*lines*, *labels*):
- *lines* are :class:`~matplotlib.lines.Line2D` instances
- *labels* are :class:`~matplotlib.text.Text` instances.
Note that on input, the *labels* argument is a list of strings,
and on output it is a list of :class:`~matplotlib.text.Text`
instances.
Examples::
# set the locations of the radial gridlines and labels
lines, labels = thetagrids( range(45,360,90) )
# set the locations and labels of the radial gridlines and labels
lines, labels = thetagrids( range(45,360,90), ('NE', 'NW', 'SW','SE') )
"""
ax = gca()
if not isinstance(ax, PolarAxes):
raise RuntimeError('rgrids only defined for polar axes')
if len(args)==0:
lines = ax.xaxis.get_ticklines()
labels = ax.xaxis.get_ticklabels()
else:
lines, labels = ax.set_thetagrids(*args, **kwargs)
return (silent_list('Line2D thetagridline', lines),
silent_list('Text thetagridlabel', labels)
)
## Plotting Info ##
def plotting():
pass
def get_plot_commands():
"""
Get a sorted list of all of the plotting commands.
"""
# This works by searching for all functions in this module and
# removing a few hard-coded exclusions, as well as all of the
# colormap-setting functions, and anything marked as private with
# a preceding underscore.
import inspect
exclude = {'colormaps', 'colors', 'connect', 'disconnect',
'get_plot_commands', 'get_current_fig_manager', 'ginput',
'plotting', 'waitforbuttonpress'}
exclude |= set(colormaps())
this_module = inspect.getmodule(get_plot_commands)
commands = set()
for name, obj in list(six.iteritems(globals())):
if name.startswith('_') or name in exclude:
continue
if inspect.isfunction(obj) and inspect.getmodule(obj) is this_module:
commands.add(name)
return sorted(commands)
@deprecated('2.1')
def colors():
"""
This is a do-nothing function to provide you with help on how
matplotlib handles colors.
Commands which take color arguments can use several formats to
specify the colors. For the basic built-in colors, you can use a
single letter
===== =======
Alias Color
===== =======
'b' blue
'g' green
'r' red
'c' cyan
'm' magenta
'y' yellow
'k' black
'w' white
===== =======
For a greater range of colors, you have two options. You can
specify the color using an html hex string, as in::
color = '#eeefff'
or you can pass an R,G,B tuple, where each of R,G,B are in the
range [0,1].
You can also use any legal html name for a color, for example::
color = 'red'
color = 'burlywood'
color = 'chartreuse'
The example below creates a subplot with a dark
slate gray background::
subplot(111, facecolor=(0.1843, 0.3098, 0.3098))
Here is an example that creates a pale turquoise title::
title('Is this the best color?', color='#afeeee')
"""
pass
def colormaps():
"""
Matplotlib provides a number of colormaps, and others can be added using
:func:`~matplotlib.cm.register_cmap`. This function documents the built-in
colormaps, and will also return a list of all registered colormaps if called.
You can set the colormap for an image, pcolor, scatter, etc,
using a keyword argument::
imshow(X, cmap=cm.hot)
or using the :func:`set_cmap` function::
imshow(X)
pyplot.set_cmap('hot')
pyplot.set_cmap('jet')
In interactive mode, :func:`set_cmap` will update the colormap post-hoc,
allowing you to see which one works best for your data.
All built-in colormaps can be reversed by appending ``_r``: For instance,
``gray_r`` is the reverse of ``gray``.
There are several common color schemes used in visualization:
Sequential schemes
for unipolar data that progresses from low to high
Diverging schemes
for bipolar data that emphasizes positive or negative deviations from a
central value
Cyclic schemes
meant for plotting values that wrap around at the
endpoints, such as phase angle, wind direction, or time of day
Qualitative schemes
for nominal data that has no inherent ordering, where color is used
only to distinguish categories
Matplotlib ships with 4 perceptually uniform color maps which are
the recommended color maps for sequential data:
========= ===================================================
Colormap Description
========= ===================================================
inferno perceptually uniform shades of black-red-yellow
magma perceptually uniform shades of black-red-white
plasma perceptually uniform shades of blue-red-yellow
viridis perceptually uniform shades of blue-green-yellow
========= ===================================================
The following colormaps are based on the `ColorBrewer
<http://colorbrewer2.org>`_ color specifications and designs developed by
Cynthia Brewer:
ColorBrewer Diverging (luminance is highest at the midpoint, and
decreases towards differently-colored endpoints):
======== ===================================
Colormap Description
======== ===================================
BrBG brown, white, blue-green
PiYG pink, white, yellow-green
PRGn purple, white, green
PuOr orange, white, purple
RdBu red, white, blue
RdGy red, white, gray
RdYlBu red, yellow, blue
RdYlGn red, yellow, green
Spectral red, orange, yellow, green, blue
======== ===================================
ColorBrewer Sequential (luminance decreases monotonically):
======== ====================================
Colormap Description
======== ====================================
Blues white to dark blue
BuGn white, light blue, dark green
BuPu white, light blue, dark purple
GnBu white, light green, dark blue
Greens white to dark green
Greys white to black (not linear)
Oranges white, orange, dark brown
OrRd white, orange, dark red
PuBu white, light purple, dark blue
PuBuGn white, light purple, dark green
PuRd white, light purple, dark red
Purples white to dark purple
RdPu white, pink, dark purple
Reds white to dark red
YlGn light yellow, dark green
YlGnBu light yellow, light green, dark blue
YlOrBr light yellow, orange, dark brown
YlOrRd light yellow, orange, dark red
======== ====================================
ColorBrewer Qualitative:
(For plotting nominal data, :class:`ListedColormap` is used,
not :class:`LinearSegmentedColormap`. Different sets of colors are
recommended for different numbers of categories.)
* Accent
* Dark2
* Paired
* Pastel1
* Pastel2
* Set1
* Set2
* Set3
A set of colormaps derived from those of the same name provided
with Matlab are also included:
========= =======================================================
Colormap Description
========= =======================================================
autumn sequential linearly-increasing shades of red-orange-yellow
bone sequential increasing black-white color map with
a tinge of blue, to emulate X-ray film
cool linearly-decreasing shades of cyan-magenta
copper sequential increasing shades of black-copper
flag repetitive red-white-blue-black pattern (not cyclic at
endpoints)
gray sequential linearly-increasing black-to-white
grayscale
hot sequential black-red-yellow-white, to emulate blackbody
radiation from an object at increasing temperatures
hsv cyclic red-yellow-green-cyan-blue-magenta-red, formed
by changing the hue component in the HSV color space
jet a spectral map with dark endpoints, blue-cyan-yellow-red;
based on a fluid-jet simulation by NCSA [#]_
pink sequential increasing pastel black-pink-white, meant
for sepia tone colorization of photographs
prism repetitive red-yellow-green-blue-purple-...-green pattern
(not cyclic at endpoints)
spring linearly-increasing shades of magenta-yellow
summer sequential linearly-increasing shades of green-yellow
winter linearly-increasing shades of blue-green
========= =======================================================
A set of palettes from the `Yorick scientific visualisation
package <https://dhmunro.github.io/yorick-doc/>`_, an evolution of
the GIST package, both by David H. Munro are included:
============ =======================================================
Colormap Description
============ =======================================================
gist_earth mapmaker's colors from dark blue deep ocean to green
lowlands to brown highlands to white mountains
gist_heat sequential increasing black-red-orange-white, to emulate
blackbody radiation from an iron bar as it grows hotter
gist_ncar pseudo-spectral black-blue-green-yellow-red-purple-white
colormap from National Center for Atmospheric
Research [#]_
gist_rainbow runs through the colors in spectral order from red to
violet at full saturation (like *hsv* but not cyclic)
gist_stern "Stern special" color table from Interactive Data
Language software
============ =======================================================
Other miscellaneous schemes:
============= =======================================================
Colormap Description
============= =======================================================
afmhot sequential black-orange-yellow-white blackbody
spectrum, commonly used in atomic force microscopy
brg blue-red-green
bwr diverging blue-white-red
coolwarm diverging blue-gray-red, meant to avoid issues with 3D
shading, color blindness, and ordering of colors [#]_
CMRmap "Default colormaps on color images often reproduce to
confusing grayscale images. The proposed colormap
maintains an aesthetically pleasing color image that
automatically reproduces to a monotonic grayscale with
discrete, quantifiable saturation levels." [#]_
cubehelix Unlike most other color schemes cubehelix was designed
by D.A. Green to be monotonically increasing in terms
of perceived brightness. Also, when printed on a black
and white postscript printer, the scheme results in a
greyscale with monotonically increasing brightness.
This color scheme is named cubehelix because the r,g,b
values produced can be visualised as a squashed helix
around the diagonal in the r,g,b color cube.
gnuplot gnuplot's traditional pm3d scheme
(black-blue-red-yellow)
gnuplot2 sequential color printable as gray
(black-blue-violet-yellow-white)
ocean green-blue-white
rainbow spectral purple-blue-green-yellow-orange-red colormap
with diverging luminance
seismic diverging blue-white-red
nipy_spectral black-purple-blue-green-yellow-red-white spectrum,
originally from the Neuroimaging in Python project
terrain mapmaker's colors, blue-green-yellow-brown-white,
originally from IGOR Pro
============= =======================================================
The following colormaps are redundant and may be removed in future
versions. It's recommended to use the names in the descriptions
instead, which produce identical output:
========= =======================================================
Colormap Description
========= =======================================================
gist_gray identical to *gray*
gist_yarg identical to *gray_r*
binary identical to *gray_r*
spectral identical to *nipy_spectral* [#]_
========= =======================================================
.. rubric:: Footnotes
.. [#] Rainbow colormaps, ``jet`` in particular, are considered a poor
choice for scientific visualization by many researchers: `Rainbow Color
Map (Still) Considered Harmful
<http://ieeexplore.ieee.org/document/4118486/?arnumber=4118486>`_
.. [#] Resembles "BkBlAqGrYeOrReViWh200" from NCAR Command
Language. See `Color Table Gallery
<https://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml>`_
.. [#] See `Diverging Color Maps for Scientific Visualization
<http://www.kennethmoreland.com/color-maps/>`_ by Kenneth Moreland.
.. [#] See `A Color Map for Effective Black-and-White Rendering of
Color-Scale Images
<https://www.mathworks.com/matlabcentral/fileexchange/2662-cmrmap-m>`_
by Carey Rappaport
.. [#] Changed to distinguish from ColorBrewer's *Spectral* map.
:func:`spectral` still works, but
``set_cmap('nipy_spectral')`` is recommended for clarity.
"""
return sorted(cm.cmap_d)
def _setup_pyplot_info_docstrings():
"""
Generates the plotting and docstring.
These must be done after the entire module is imported, so it is
called from the end of this module, which is generated by
boilerplate.py.
"""
# Generate the plotting docstring
import re
def pad(s, l):
"""Pad string *s* to length *l*."""
if l < len(s):
return s[:l]
return s + ' ' * (l - len(s))
commands = get_plot_commands()
first_sentence = re.compile(r"(?:\s*).+?\.(?:\s+|$)", flags=re.DOTALL)
# Collect the first sentence of the docstring for all of the
# plotting commands.
rows = []
max_name = 0
max_summary = 0
for name in commands:
doc = globals()[name].__doc__
summary = ''
if doc is not None:
match = first_sentence.match(doc)
if match is not None:
summary = match.group(0).strip().replace('\n', ' ')
name = '`%s`' % name
rows.append([name, summary])
max_name = max(max_name, len(name))
max_summary = max(max_summary, len(summary))
lines = []
sep = '=' * max_name + ' ' + '=' * max_summary
lines.append(sep)
lines.append(' '.join([pad("Function", max_name),
pad("Description", max_summary)]))
lines.append(sep)
for name, summary in rows:
lines.append(' '.join([pad(name, max_name),
pad(summary, max_summary)]))
lines.append(sep)
plotting.__doc__ = '\n'.join(lines)
## Plotting part 1: manually generated functions and wrappers ##
def colorbar(mappable=None, cax=None, ax=None, **kw):
if mappable is None:
mappable = gci()
if mappable is None:
raise RuntimeError('No mappable was found to use for colorbar '
'creation. First define a mappable such as '
'an image (with imshow) or a contour set ('
'with contourf).')
if ax is None:
ax = gca()
ret = gcf().colorbar(mappable, cax = cax, ax=ax, **kw)
return ret
colorbar.__doc__ = matplotlib.colorbar.colorbar_doc
def clim(vmin=None, vmax=None):
"""
Set the color limits of the current image.
To apply clim to all axes images do::
clim(0, 0.5)
If either *vmin* or *vmax* is None, the image min/max respectively
will be used for color scaling.
If you want to set the clim of multiple images,
use, for example::
for im in gca().get_images():
im.set_clim(0, 0.05)
"""
im = gci()
if im is None:
raise RuntimeError('You must first define an image, e.g., with imshow')
im.set_clim(vmin, vmax)
def set_cmap(cmap):
"""
Set the default colormap. Applies to the current image if any.
See help(colormaps) for more information.
*cmap* must be a :class:`~matplotlib.colors.Colormap` instance, or
the name of a registered colormap.
See :func:`matplotlib.cm.register_cmap` and
:func:`matplotlib.cm.get_cmap`.
"""
cmap = cm.get_cmap(cmap)
rc('image', cmap=cmap.name)
im = gci()
if im is not None:
im.set_cmap(cmap)
@docstring.copy_dedent(_imread)
def imread(*args, **kwargs):
return _imread(*args, **kwargs)
@docstring.copy_dedent(_imsave)
def imsave(*args, **kwargs):
return _imsave(*args, **kwargs)
def matshow(A, fignum=None, **kw):
"""
Display an array as a matrix in a new figure window.
The origin is set at the upper left hand corner and rows (first
dimension of the array) are displayed horizontally. The aspect
ratio of the figure window is that of the array, unless this would
make an excessively short or narrow figure.
Tick labels for the xaxis are placed on top.
With the exception of *fignum*, keyword arguments are passed to
:func:`~matplotlib.pyplot.imshow`. You may set the *origin*
kwarg to "lower" if you want the first row in the array to be
at the bottom instead of the top.
*fignum*: [ None | integer | False ]
By default, :func:`matshow` creates a new figure window with
automatic numbering. If *fignum* is given as an integer, the
created figure will use this figure number. Because of how
:func:`matshow` tries to set the figure aspect ratio to be the
one of the array, if you provide the number of an already
existing figure, strange things may happen.
If *fignum* is *False* or 0, a new figure window will **NOT** be created.
"""
A = np.asanyarray(A)
if fignum is False or fignum is 0:
ax = gca()
else:
# Extract actual aspect ratio of array and make appropriately sized figure
fig = figure(fignum, figsize=figaspect(A))
ax = fig.add_axes([0.15, 0.09, 0.775, 0.775])
im = ax.matshow(A, **kw)
sci(im)
return im
def polar(*args, **kwargs):
"""
Make a polar plot.
call signature::
polar(theta, r, **kwargs)
Multiple *theta*, *r* arguments are supported, with format
strings, as in :func:`~matplotlib.pyplot.plot`.
"""
# If an axis already exists, check if it has a polar projection
if gcf().get_axes():
if not isinstance(gca(), PolarAxes):
warnings.warn('Trying to create polar plot on an axis that does '
'not have a polar projection.')
ax = gca(polar=True)
ret = ax.plot(*args, **kwargs)
return ret
def plotfile(fname, cols=(0,), plotfuncs=None,
comments='#', skiprows=0, checkrows=5, delimiter=',',
names=None, subplots=True, newfig=True, **kwargs):
"""
Plot the data in a file.
*cols* is a sequence of column identifiers to plot. An identifier
is either an int or a string. If it is an int, it indicates the
column number. If it is a string, it indicates the column header.
matplotlib will make column headers lower case, replace spaces with
underscores, and remove all illegal characters; so ``'Adj Close*'``
will have name ``'adj_close'``.
- If len(*cols*) == 1, only that column will be plotted on the *y* axis.
- If len(*cols*) > 1, the first element will be an identifier for
data for the *x* axis and the remaining elements will be the
column indexes for multiple subplots if *subplots* is *True*
(the default), or for lines in a single subplot if *subplots*
is *False*.
*plotfuncs*, if not *None*, is a dictionary mapping identifier to
an :class:`~matplotlib.axes.Axes` plotting function as a string.
Default is 'plot', other choices are 'semilogy', 'fill', 'bar',
etc. You must use the same type of identifier in the *cols*
vector as you use in the *plotfuncs* dictionary, e.g., integer
column numbers in both or column names in both. If *subplots*
is *False*, then including any function such as 'semilogy'
that changes the axis scaling will set the scaling for all
columns.
*comments*, *skiprows*, *checkrows*, *delimiter*, and *names*
are all passed on to :func:`matplotlib.pylab.csv2rec` to
load the data into a record array.
If *newfig* is *True*, the plot always will be made in a new figure;
if *False*, it will be made in the current figure if one exists,
else in a new figure.
kwargs are passed on to plotting functions.
Example usage::
# plot the 2nd and 4th column against the 1st in two subplots
plotfile(fname, (0,1,3))
# plot using column names; specify an alternate plot type for volume
plotfile(fname, ('date', 'volume', 'adj_close'),
plotfuncs={'volume': 'semilogy'})
Note: plotfile is intended as a convenience for quickly plotting
data from flat files; it is not intended as an alternative
interface to general plotting with pyplot or matplotlib.
"""
if newfig:
fig = figure()
else:
fig = gcf()
if len(cols)<1:
raise ValueError('must have at least one column of data')
if plotfuncs is None:
plotfuncs = dict()
from matplotlib.cbook import mplDeprecation
with warnings.catch_warnings():
warnings.simplefilter('ignore', mplDeprecation)
r = mlab.csv2rec(fname, comments=comments, skiprows=skiprows,
checkrows=checkrows, delimiter=delimiter, names=names)
def getname_val(identifier):
'return the name and column data for identifier'
if isinstance(identifier, six.string_types):
return identifier, r[identifier]
elif is_numlike(identifier):
name = r.dtype.names[int(identifier)]
return name, r[name]
else:
raise TypeError('identifier must be a string or integer')
xname, x = getname_val(cols[0])
ynamelist = []
if len(cols)==1:
ax1 = fig.add_subplot(1,1,1)
funcname = plotfuncs.get(cols[0], 'plot')
func = getattr(ax1, funcname)
func(x, **kwargs)
ax1.set_ylabel(xname)
else:
N = len(cols)
for i in range(1,N):
if subplots:
if i==1:
ax = ax1 = fig.add_subplot(N-1,1,i)
else:
ax = fig.add_subplot(N-1,1,i, sharex=ax1)
elif i==1:
ax = fig.add_subplot(1,1,1)
yname, y = getname_val(cols[i])
ynamelist.append(yname)
funcname = plotfuncs.get(cols[i], 'plot')
func = getattr(ax, funcname)
func(x, y, **kwargs)
if subplots:
ax.set_ylabel(yname)
if ax.is_last_row():
ax.set_xlabel(xname)
else:
ax.set_xlabel('')
if not subplots:
ax.legend(ynamelist, loc='best')
if xname=='date':
fig.autofmt_xdate()
def _autogen_docstring(base):
"""Autogenerated wrappers will get their docstring from a base function
with an addendum."""
#msg = "\n\nAdditional kwargs: hold = [True|False] overrides default hold state"
msg = ''
addendum = docstring.Appender(msg, '\n\n')
return lambda func: addendum(docstring.copy_dedent(base)(func))
# This function cannot be generated by boilerplate.py because it may
# return an image or a line.
@_autogen_docstring(Axes.spy)
def spy(Z, precision=0, marker=None, markersize=None, aspect='equal', **kwargs):
ax = gca()
hold = kwargs.pop('hold', None)
# allow callers to override the hold state by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.spy(Z, precision, marker, markersize, aspect, **kwargs)
finally:
ax._hold = washold
if isinstance(ret, cm.ScalarMappable):
sci(ret)
return ret
# just to be safe. Interactive mode can be turned on without
# calling `plt.ion()` so register it again here.
# This is safe because multiple calls to `install_repl_displayhook`
# are no-ops and the registered function respect `mpl.is_interactive()`
# to determine if they should trigger a draw.
install_repl_displayhook()
################# REMAINING CONTENT GENERATED BY boilerplate.py ##############
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.acorr)
def acorr(x, hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.acorr(x, data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.angle_spectrum)
def angle_spectrum(x, Fs=None, Fc=None, window=None, pad_to=None, sides=None,
hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.angle_spectrum(x, Fs=Fs, Fc=Fc, window=window, pad_to=pad_to,
sides=sides, data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.arrow)
def arrow(x, y, dx, dy, hold=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.arrow(x, y, dx, dy, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.axhline)
def axhline(y=0, xmin=0, xmax=1, hold=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.axhline(y=y, xmin=xmin, xmax=xmax, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.axhspan)
def axhspan(ymin, ymax, xmin=0, xmax=1, hold=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.axhspan(ymin, ymax, xmin=xmin, xmax=xmax, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.axvline)
def axvline(x=0, ymin=0, ymax=1, hold=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.axvline(x=x, ymin=ymin, ymax=ymax, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.axvspan)
def axvspan(xmin, xmax, ymin=0, ymax=1, hold=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.axvspan(xmin, xmax, ymin=ymin, ymax=ymax, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.bar)
def bar(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.bar(*args, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.barh)
def barh(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.barh(*args, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.broken_barh)
def broken_barh(xranges, yrange, hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.broken_barh(xranges, yrange, data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.boxplot)
def boxplot(x, notch=None, sym=None, vert=None, whis=None, positions=None,
widths=None, patch_artist=None, bootstrap=None, usermedians=None,
conf_intervals=None, meanline=None, showmeans=None, showcaps=None,
showbox=None, showfliers=None, boxprops=None, labels=None,
flierprops=None, medianprops=None, meanprops=None, capprops=None,
whiskerprops=None, manage_xticks=True, autorange=False, zorder=None,
hold=None, data=None):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.boxplot(x, notch=notch, sym=sym, vert=vert, whis=whis,
positions=positions, widths=widths,
patch_artist=patch_artist, bootstrap=bootstrap,
usermedians=usermedians,
conf_intervals=conf_intervals, meanline=meanline,
showmeans=showmeans, showcaps=showcaps,
showbox=showbox, showfliers=showfliers,
boxprops=boxprops, labels=labels,
flierprops=flierprops, medianprops=medianprops,
meanprops=meanprops, capprops=capprops,
whiskerprops=whiskerprops,
manage_xticks=manage_xticks, autorange=autorange,
zorder=zorder, data=data)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.cohere)
def cohere(x, y, NFFT=256, Fs=2, Fc=0, detrend=mlab.detrend_none,
window=mlab.window_hanning, noverlap=0, pad_to=None, sides='default',
scale_by_freq=None, hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.cohere(x, y, NFFT=NFFT, Fs=Fs, Fc=Fc, detrend=detrend,
window=window, noverlap=noverlap, pad_to=pad_to,
sides=sides, scale_by_freq=scale_by_freq, data=data,
**kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.clabel)
def clabel(CS, *args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.clabel(CS, *args, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.contour)
def contour(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.contour(*args, **kwargs)
finally:
ax._hold = washold
if ret._A is not None: sci(ret)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.contourf)
def contourf(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.contourf(*args, **kwargs)
finally:
ax._hold = washold
if ret._A is not None: sci(ret)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.csd)
def csd(x, y, NFFT=None, Fs=None, Fc=None, detrend=None, window=None,
noverlap=None, pad_to=None, sides=None, scale_by_freq=None,
return_line=None, hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.csd(x, y, NFFT=NFFT, Fs=Fs, Fc=Fc, detrend=detrend,
window=window, noverlap=noverlap, pad_to=pad_to,
sides=sides, scale_by_freq=scale_by_freq,
return_line=return_line, data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.errorbar)
def errorbar(x, y, yerr=None, xerr=None, fmt='', ecolor=None, elinewidth=None,
capsize=None, barsabove=False, lolims=False, uplims=False,
xlolims=False, xuplims=False, errorevery=1, capthick=None,
hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.errorbar(x, y, yerr=yerr, xerr=xerr, fmt=fmt, ecolor=ecolor,
elinewidth=elinewidth, capsize=capsize,
barsabove=barsabove, lolims=lolims, uplims=uplims,
xlolims=xlolims, xuplims=xuplims,
errorevery=errorevery, capthick=capthick, data=data,
**kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.eventplot)
def eventplot(positions, orientation='horizontal', lineoffsets=1, linelengths=1,
linewidths=None, colors=None, linestyles='solid', hold=None,
data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.eventplot(positions, orientation=orientation,
lineoffsets=lineoffsets, linelengths=linelengths,
linewidths=linewidths, colors=colors,
linestyles=linestyles, data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.fill)
def fill(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.fill(*args, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.fill_between)
def fill_between(x, y1, y2=0, where=None, interpolate=False, step=None,
hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.fill_between(x, y1, y2=y2, where=where,
interpolate=interpolate, step=step, data=data,
**kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.fill_betweenx)
def fill_betweenx(y, x1, x2=0, where=None, step=None, interpolate=False,
hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.fill_betweenx(y, x1, x2=x2, where=where, step=step,
interpolate=interpolate, data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.hexbin)
def hexbin(x, y, C=None, gridsize=100, bins=None, xscale='linear',
yscale='linear', extent=None, cmap=None, norm=None, vmin=None,
vmax=None, alpha=None, linewidths=None, edgecolors='face',
reduce_C_function=np.mean, mincnt=None, marginals=False, hold=None,
data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.hexbin(x, y, C=C, gridsize=gridsize, bins=bins, xscale=xscale,
yscale=yscale, extent=extent, cmap=cmap, norm=norm,
vmin=vmin, vmax=vmax, alpha=alpha,
linewidths=linewidths, edgecolors=edgecolors,
reduce_C_function=reduce_C_function, mincnt=mincnt,
marginals=marginals, data=data, **kwargs)
finally:
ax._hold = washold
sci(ret)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.hist)
def hist(x, bins=None, range=None, density=None, weights=None, cumulative=False,
bottom=None, histtype='bar', align='mid', orientation='vertical',
rwidth=None, log=False, color=None, label=None, stacked=False,
normed=None, hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.hist(x, bins=bins, range=range, density=density,
weights=weights, cumulative=cumulative, bottom=bottom,
histtype=histtype, align=align, orientation=orientation,
rwidth=rwidth, log=log, color=color, label=label,
stacked=stacked, normed=normed, data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.hist2d)
def hist2d(x, y, bins=10, range=None, normed=False, weights=None, cmin=None,
cmax=None, hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.hist2d(x, y, bins=bins, range=range, normed=normed,
weights=weights, cmin=cmin, cmax=cmax, data=data,
**kwargs)
finally:
ax._hold = washold
sci(ret[-1])
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.hlines)
def hlines(y, xmin, xmax, colors='k', linestyles='solid', label='', hold=None,
data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.hlines(y, xmin, xmax, colors=colors, linestyles=linestyles,
label=label, data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.imshow)
def imshow(X, cmap=None, norm=None, aspect=None, interpolation=None, alpha=None,
vmin=None, vmax=None, origin=None, extent=None, shape=None,
filternorm=1, filterrad=4.0, imlim=None, resample=None, url=None,
hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.imshow(X, cmap=cmap, norm=norm, aspect=aspect,
interpolation=interpolation, alpha=alpha, vmin=vmin,
vmax=vmax, origin=origin, extent=extent, shape=shape,
filternorm=filternorm, filterrad=filterrad,
imlim=imlim, resample=resample, url=url, data=data,
**kwargs)
finally:
ax._hold = washold
sci(ret)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.loglog)
def loglog(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.loglog(*args, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.magnitude_spectrum)
def magnitude_spectrum(x, Fs=None, Fc=None, window=None, pad_to=None,
sides=None, scale=None, hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.magnitude_spectrum(x, Fs=Fs, Fc=Fc, window=window,
pad_to=pad_to, sides=sides, scale=scale,
data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.pcolor)
def pcolor(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.pcolor(*args, **kwargs)
finally:
ax._hold = washold
sci(ret)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.pcolormesh)
def pcolormesh(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.pcolormesh(*args, **kwargs)
finally:
ax._hold = washold
sci(ret)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.phase_spectrum)
def phase_spectrum(x, Fs=None, Fc=None, window=None, pad_to=None, sides=None,
hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.phase_spectrum(x, Fs=Fs, Fc=Fc, window=window, pad_to=pad_to,
sides=sides, data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.pie)
def pie(x, explode=None, labels=None, colors=None, autopct=None,
pctdistance=0.6, shadow=False, labeldistance=1.1, startangle=None,
radius=None, counterclock=True, wedgeprops=None, textprops=None,
center=(0, 0), frame=False, rotatelabels=False, hold=None, data=None):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.pie(x, explode=explode, labels=labels, colors=colors,
autopct=autopct, pctdistance=pctdistance, shadow=shadow,
labeldistance=labeldistance, startangle=startangle,
radius=radius, counterclock=counterclock,
wedgeprops=wedgeprops, textprops=textprops, center=center,
frame=frame, rotatelabels=rotatelabels, data=data)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.plot)
def plot(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.plot(*args, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.plot_date)
def plot_date(x, y, fmt='o', tz=None, xdate=True, ydate=False, hold=None,
data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.plot_date(x, y, fmt=fmt, tz=tz, xdate=xdate, ydate=ydate,
data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.psd)
def psd(x, NFFT=None, Fs=None, Fc=None, detrend=None, window=None,
noverlap=None, pad_to=None, sides=None, scale_by_freq=None,
return_line=None, hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.psd(x, NFFT=NFFT, Fs=Fs, Fc=Fc, detrend=detrend,
window=window, noverlap=noverlap, pad_to=pad_to,
sides=sides, scale_by_freq=scale_by_freq,
return_line=return_line, data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.quiver)
def quiver(*args, **kw):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kw.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.quiver(*args, **kw)
finally:
ax._hold = washold
sci(ret)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.quiverkey)
def quiverkey(*args, **kw):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kw.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.quiverkey(*args, **kw)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.scatter)
def scatter(x, y, s=None, c=None, marker=None, cmap=None, norm=None, vmin=None,
vmax=None, alpha=None, linewidths=None, verts=None, edgecolors=None,
hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.scatter(x, y, s=s, c=c, marker=marker, cmap=cmap, norm=norm,
vmin=vmin, vmax=vmax, alpha=alpha,
linewidths=linewidths, verts=verts,
edgecolors=edgecolors, data=data, **kwargs)
finally:
ax._hold = washold
sci(ret)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.semilogx)
def semilogx(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.semilogx(*args, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.semilogy)
def semilogy(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.semilogy(*args, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.specgram)
def specgram(x, NFFT=None, Fs=None, Fc=None, detrend=None, window=None,
noverlap=None, cmap=None, xextent=None, pad_to=None, sides=None,
scale_by_freq=None, mode=None, scale=None, vmin=None, vmax=None,
hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.specgram(x, NFFT=NFFT, Fs=Fs, Fc=Fc, detrend=detrend,
window=window, noverlap=noverlap, cmap=cmap,
xextent=xextent, pad_to=pad_to, sides=sides,
scale_by_freq=scale_by_freq, mode=mode, scale=scale,
vmin=vmin, vmax=vmax, data=data, **kwargs)
finally:
ax._hold = washold
sci(ret[-1])
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.stackplot)
def stackplot(x, *args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.stackplot(x, *args, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.stem)
def stem(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.stem(*args, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.step)
def step(x, y, *args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.step(x, y, *args, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.streamplot)
def streamplot(x, y, u, v, density=1, linewidth=None, color=None, cmap=None,
norm=None, arrowsize=1, arrowstyle='-|>', minlength=0.1,
transform=None, zorder=None, start_points=None, maxlength=4.0,
integration_direction='both', hold=None, data=None):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.streamplot(x, y, u, v, density=density, linewidth=linewidth,
color=color, cmap=cmap, norm=norm,
arrowsize=arrowsize, arrowstyle=arrowstyle,
minlength=minlength, transform=transform,
zorder=zorder, start_points=start_points,
maxlength=maxlength,
integration_direction=integration_direction,
data=data)
finally:
ax._hold = washold
sci(ret.lines)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.tricontour)
def tricontour(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.tricontour(*args, **kwargs)
finally:
ax._hold = washold
if ret._A is not None: sci(ret)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.tricontourf)
def tricontourf(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.tricontourf(*args, **kwargs)
finally:
ax._hold = washold
if ret._A is not None: sci(ret)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.tripcolor)
def tripcolor(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.tripcolor(*args, **kwargs)
finally:
ax._hold = washold
sci(ret)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.triplot)
def triplot(*args, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kwargs.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.triplot(*args, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.violinplot)
def violinplot(dataset, positions=None, vert=True, widths=0.5, showmeans=False,
showextrema=True, showmedians=False, points=100, bw_method=None,
hold=None, data=None):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.violinplot(dataset, positions=positions, vert=vert,
widths=widths, showmeans=showmeans,
showextrema=showextrema, showmedians=showmedians,
points=points, bw_method=bw_method, data=data)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.vlines)
def vlines(x, ymin, ymax, colors='k', linestyles='solid', label='', hold=None,
data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.vlines(x, ymin, ymax, colors=colors, linestyles=linestyles,
label=label, data=data, **kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.xcorr)
def xcorr(x, y, normed=True, detrend=mlab.detrend_none, usevlines=True,
maxlags=10, hold=None, data=None, **kwargs):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.xcorr(x, y, normed=normed, detrend=detrend,
usevlines=usevlines, maxlags=maxlags, data=data,
**kwargs)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_autogen_docstring(Axes.barbs)
def barbs(*args, **kw):
ax = gca()
# Deprecated: allow callers to override the hold state
# by passing hold=True|False
washold = ax._hold
hold = kw.pop('hold', None)
if hold is not None:
ax._hold = hold
from matplotlib.cbook import mplDeprecation
warnings.warn("The 'hold' keyword argument is deprecated since 2.0.",
mplDeprecation)
try:
ret = ax.barbs(*args, **kw)
finally:
ax._hold = washold
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@docstring.copy_dedent(Axes.cla)
def cla():
ret = gca().cla()
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@docstring.copy_dedent(Axes.grid)
def grid(b=None, which='major', axis='both', **kwargs):
ret = gca().grid(b=b, which=which, axis=axis, **kwargs)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@docstring.copy_dedent(Axes.legend)
def legend(*args, **kwargs):
ret = gca().legend(*args, **kwargs)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@docstring.copy_dedent(Axes.table)
def table(**kwargs):
ret = gca().table(**kwargs)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@docstring.copy_dedent(Axes.text)
def text(x, y, s, fontdict=None, withdash=False, **kwargs):
ret = gca().text(x, y, s, fontdict=fontdict, withdash=withdash, **kwargs)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@docstring.copy_dedent(Axes.annotate)
def annotate(*args, **kwargs):
ret = gca().annotate(*args, **kwargs)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@docstring.copy_dedent(Axes.ticklabel_format)
def ticklabel_format(**kwargs):
ret = gca().ticklabel_format(**kwargs)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@docstring.copy_dedent(Axes.locator_params)
def locator_params(axis='both', tight=None, **kwargs):
ret = gca().locator_params(axis=axis, tight=tight, **kwargs)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@docstring.copy_dedent(Axes.tick_params)
def tick_params(axis='both', **kwargs):
ret = gca().tick_params(axis=axis, **kwargs)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@docstring.copy_dedent(Axes.margins)
def margins(*args, **kw):
ret = gca().margins(*args, **kw)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@docstring.copy_dedent(Axes.autoscale)
def autoscale(enable=True, axis='both', tight=None):
ret = gca().autoscale(enable=enable, axis=axis, tight=tight)
return ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def autumn():
"""
Set the colormap to "autumn".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("autumn")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def bone():
"""
Set the colormap to "bone".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("bone")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def cool():
"""
Set the colormap to "cool".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("cool")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def copper():
"""
Set the colormap to "copper".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("copper")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def flag():
"""
Set the colormap to "flag".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("flag")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def gray():
"""
Set the colormap to "gray".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("gray")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def hot():
"""
Set the colormap to "hot".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("hot")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def hsv():
"""
Set the colormap to "hsv".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("hsv")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def jet():
"""
Set the colormap to "jet".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("jet")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def pink():
"""
Set the colormap to "pink".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("pink")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def prism():
"""
Set the colormap to "prism".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("prism")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def spring():
"""
Set the colormap to "spring".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("spring")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def summer():
"""
Set the colormap to "summer".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("summer")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def winter():
"""
Set the colormap to "winter".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("winter")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def magma():
"""
Set the colormap to "magma".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("magma")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def inferno():
"""
Set the colormap to "inferno".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("inferno")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def plasma():
"""
Set the colormap to "plasma".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("plasma")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def viridis():
"""
Set the colormap to "viridis".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("viridis")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def nipy_spectral():
"""
Set the colormap to "nipy_spectral".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("nipy_spectral")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def spectral():
"""
Set the colormap to "spectral".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
from matplotlib.cbook import warn_deprecated
warn_deprecated(
"2.0",
name="spectral",
obj_type="colormap"
)
set_cmap("spectral")
_setup_pyplot_info_docstrings()
| 134,492 | 31.843223 | 98 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/_cm_listed.py
|
from .colors import ListedColormap
_magma_data = [[0.001462, 0.000466, 0.013866],
[0.002258, 0.001295, 0.018331],
[0.003279, 0.002305, 0.023708],
[0.004512, 0.003490, 0.029965],
[0.005950, 0.004843, 0.037130],
[0.007588, 0.006356, 0.044973],
[0.009426, 0.008022, 0.052844],
[0.011465, 0.009828, 0.060750],
[0.013708, 0.011771, 0.068667],
[0.016156, 0.013840, 0.076603],
[0.018815, 0.016026, 0.084584],
[0.021692, 0.018320, 0.092610],
[0.024792, 0.020715, 0.100676],
[0.028123, 0.023201, 0.108787],
[0.031696, 0.025765, 0.116965],
[0.035520, 0.028397, 0.125209],
[0.039608, 0.031090, 0.133515],
[0.043830, 0.033830, 0.141886],
[0.048062, 0.036607, 0.150327],
[0.052320, 0.039407, 0.158841],
[0.056615, 0.042160, 0.167446],
[0.060949, 0.044794, 0.176129],
[0.065330, 0.047318, 0.184892],
[0.069764, 0.049726, 0.193735],
[0.074257, 0.052017, 0.202660],
[0.078815, 0.054184, 0.211667],
[0.083446, 0.056225, 0.220755],
[0.088155, 0.058133, 0.229922],
[0.092949, 0.059904, 0.239164],
[0.097833, 0.061531, 0.248477],
[0.102815, 0.063010, 0.257854],
[0.107899, 0.064335, 0.267289],
[0.113094, 0.065492, 0.276784],
[0.118405, 0.066479, 0.286321],
[0.123833, 0.067295, 0.295879],
[0.129380, 0.067935, 0.305443],
[0.135053, 0.068391, 0.315000],
[0.140858, 0.068654, 0.324538],
[0.146785, 0.068738, 0.334011],
[0.152839, 0.068637, 0.343404],
[0.159018, 0.068354, 0.352688],
[0.165308, 0.067911, 0.361816],
[0.171713, 0.067305, 0.370771],
[0.178212, 0.066576, 0.379497],
[0.184801, 0.065732, 0.387973],
[0.191460, 0.064818, 0.396152],
[0.198177, 0.063862, 0.404009],
[0.204935, 0.062907, 0.411514],
[0.211718, 0.061992, 0.418647],
[0.218512, 0.061158, 0.425392],
[0.225302, 0.060445, 0.431742],
[0.232077, 0.059889, 0.437695],
[0.238826, 0.059517, 0.443256],
[0.245543, 0.059352, 0.448436],
[0.252220, 0.059415, 0.453248],
[0.258857, 0.059706, 0.457710],
[0.265447, 0.060237, 0.461840],
[0.271994, 0.060994, 0.465660],
[0.278493, 0.061978, 0.469190],
[0.284951, 0.063168, 0.472451],
[0.291366, 0.064553, 0.475462],
[0.297740, 0.066117, 0.478243],
[0.304081, 0.067835, 0.480812],
[0.310382, 0.069702, 0.483186],
[0.316654, 0.071690, 0.485380],
[0.322899, 0.073782, 0.487408],
[0.329114, 0.075972, 0.489287],
[0.335308, 0.078236, 0.491024],
[0.341482, 0.080564, 0.492631],
[0.347636, 0.082946, 0.494121],
[0.353773, 0.085373, 0.495501],
[0.359898, 0.087831, 0.496778],
[0.366012, 0.090314, 0.497960],
[0.372116, 0.092816, 0.499053],
[0.378211, 0.095332, 0.500067],
[0.384299, 0.097855, 0.501002],
[0.390384, 0.100379, 0.501864],
[0.396467, 0.102902, 0.502658],
[0.402548, 0.105420, 0.503386],
[0.408629, 0.107930, 0.504052],
[0.414709, 0.110431, 0.504662],
[0.420791, 0.112920, 0.505215],
[0.426877, 0.115395, 0.505714],
[0.432967, 0.117855, 0.506160],
[0.439062, 0.120298, 0.506555],
[0.445163, 0.122724, 0.506901],
[0.451271, 0.125132, 0.507198],
[0.457386, 0.127522, 0.507448],
[0.463508, 0.129893, 0.507652],
[0.469640, 0.132245, 0.507809],
[0.475780, 0.134577, 0.507921],
[0.481929, 0.136891, 0.507989],
[0.488088, 0.139186, 0.508011],
[0.494258, 0.141462, 0.507988],
[0.500438, 0.143719, 0.507920],
[0.506629, 0.145958, 0.507806],
[0.512831, 0.148179, 0.507648],
[0.519045, 0.150383, 0.507443],
[0.525270, 0.152569, 0.507192],
[0.531507, 0.154739, 0.506895],
[0.537755, 0.156894, 0.506551],
[0.544015, 0.159033, 0.506159],
[0.550287, 0.161158, 0.505719],
[0.556571, 0.163269, 0.505230],
[0.562866, 0.165368, 0.504692],
[0.569172, 0.167454, 0.504105],
[0.575490, 0.169530, 0.503466],
[0.581819, 0.171596, 0.502777],
[0.588158, 0.173652, 0.502035],
[0.594508, 0.175701, 0.501241],
[0.600868, 0.177743, 0.500394],
[0.607238, 0.179779, 0.499492],
[0.613617, 0.181811, 0.498536],
[0.620005, 0.183840, 0.497524],
[0.626401, 0.185867, 0.496456],
[0.632805, 0.187893, 0.495332],
[0.639216, 0.189921, 0.494150],
[0.645633, 0.191952, 0.492910],
[0.652056, 0.193986, 0.491611],
[0.658483, 0.196027, 0.490253],
[0.664915, 0.198075, 0.488836],
[0.671349, 0.200133, 0.487358],
[0.677786, 0.202203, 0.485819],
[0.684224, 0.204286, 0.484219],
[0.690661, 0.206384, 0.482558],
[0.697098, 0.208501, 0.480835],
[0.703532, 0.210638, 0.479049],
[0.709962, 0.212797, 0.477201],
[0.716387, 0.214982, 0.475290],
[0.722805, 0.217194, 0.473316],
[0.729216, 0.219437, 0.471279],
[0.735616, 0.221713, 0.469180],
[0.742004, 0.224025, 0.467018],
[0.748378, 0.226377, 0.464794],
[0.754737, 0.228772, 0.462509],
[0.761077, 0.231214, 0.460162],
[0.767398, 0.233705, 0.457755],
[0.773695, 0.236249, 0.455289],
[0.779968, 0.238851, 0.452765],
[0.786212, 0.241514, 0.450184],
[0.792427, 0.244242, 0.447543],
[0.798608, 0.247040, 0.444848],
[0.804752, 0.249911, 0.442102],
[0.810855, 0.252861, 0.439305],
[0.816914, 0.255895, 0.436461],
[0.822926, 0.259016, 0.433573],
[0.828886, 0.262229, 0.430644],
[0.834791, 0.265540, 0.427671],
[0.840636, 0.268953, 0.424666],
[0.846416, 0.272473, 0.421631],
[0.852126, 0.276106, 0.418573],
[0.857763, 0.279857, 0.415496],
[0.863320, 0.283729, 0.412403],
[0.868793, 0.287728, 0.409303],
[0.874176, 0.291859, 0.406205],
[0.879464, 0.296125, 0.403118],
[0.884651, 0.300530, 0.400047],
[0.889731, 0.305079, 0.397002],
[0.894700, 0.309773, 0.393995],
[0.899552, 0.314616, 0.391037],
[0.904281, 0.319610, 0.388137],
[0.908884, 0.324755, 0.385308],
[0.913354, 0.330052, 0.382563],
[0.917689, 0.335500, 0.379915],
[0.921884, 0.341098, 0.377376],
[0.925937, 0.346844, 0.374959],
[0.929845, 0.352734, 0.372677],
[0.933606, 0.358764, 0.370541],
[0.937221, 0.364929, 0.368567],
[0.940687, 0.371224, 0.366762],
[0.944006, 0.377643, 0.365136],
[0.947180, 0.384178, 0.363701],
[0.950210, 0.390820, 0.362468],
[0.953099, 0.397563, 0.361438],
[0.955849, 0.404400, 0.360619],
[0.958464, 0.411324, 0.360014],
[0.960949, 0.418323, 0.359630],
[0.963310, 0.425390, 0.359469],
[0.965549, 0.432519, 0.359529],
[0.967671, 0.439703, 0.359810],
[0.969680, 0.446936, 0.360311],
[0.971582, 0.454210, 0.361030],
[0.973381, 0.461520, 0.361965],
[0.975082, 0.468861, 0.363111],
[0.976690, 0.476226, 0.364466],
[0.978210, 0.483612, 0.366025],
[0.979645, 0.491014, 0.367783],
[0.981000, 0.498428, 0.369734],
[0.982279, 0.505851, 0.371874],
[0.983485, 0.513280, 0.374198],
[0.984622, 0.520713, 0.376698],
[0.985693, 0.528148, 0.379371],
[0.986700, 0.535582, 0.382210],
[0.987646, 0.543015, 0.385210],
[0.988533, 0.550446, 0.388365],
[0.989363, 0.557873, 0.391671],
[0.990138, 0.565296, 0.395122],
[0.990871, 0.572706, 0.398714],
[0.991558, 0.580107, 0.402441],
[0.992196, 0.587502, 0.406299],
[0.992785, 0.594891, 0.410283],
[0.993326, 0.602275, 0.414390],
[0.993834, 0.609644, 0.418613],
[0.994309, 0.616999, 0.422950],
[0.994738, 0.624350, 0.427397],
[0.995122, 0.631696, 0.431951],
[0.995480, 0.639027, 0.436607],
[0.995810, 0.646344, 0.441361],
[0.996096, 0.653659, 0.446213],
[0.996341, 0.660969, 0.451160],
[0.996580, 0.668256, 0.456192],
[0.996775, 0.675541, 0.461314],
[0.996925, 0.682828, 0.466526],
[0.997077, 0.690088, 0.471811],
[0.997186, 0.697349, 0.477182],
[0.997254, 0.704611, 0.482635],
[0.997325, 0.711848, 0.488154],
[0.997351, 0.719089, 0.493755],
[0.997351, 0.726324, 0.499428],
[0.997341, 0.733545, 0.505167],
[0.997285, 0.740772, 0.510983],
[0.997228, 0.747981, 0.516859],
[0.997138, 0.755190, 0.522806],
[0.997019, 0.762398, 0.528821],
[0.996898, 0.769591, 0.534892],
[0.996727, 0.776795, 0.541039],
[0.996571, 0.783977, 0.547233],
[0.996369, 0.791167, 0.553499],
[0.996162, 0.798348, 0.559820],
[0.995932, 0.805527, 0.566202],
[0.995680, 0.812706, 0.572645],
[0.995424, 0.819875, 0.579140],
[0.995131, 0.827052, 0.585701],
[0.994851, 0.834213, 0.592307],
[0.994524, 0.841387, 0.598983],
[0.994222, 0.848540, 0.605696],
[0.993866, 0.855711, 0.612482],
[0.993545, 0.862859, 0.619299],
[0.993170, 0.870024, 0.626189],
[0.992831, 0.877168, 0.633109],
[0.992440, 0.884330, 0.640099],
[0.992089, 0.891470, 0.647116],
[0.991688, 0.898627, 0.654202],
[0.991332, 0.905763, 0.661309],
[0.990930, 0.912915, 0.668481],
[0.990570, 0.920049, 0.675675],
[0.990175, 0.927196, 0.682926],
[0.989815, 0.934329, 0.690198],
[0.989434, 0.941470, 0.697519],
[0.989077, 0.948604, 0.704863],
[0.988717, 0.955742, 0.712242],
[0.988367, 0.962878, 0.719649],
[0.988033, 0.970012, 0.727077],
[0.987691, 0.977154, 0.734536],
[0.987387, 0.984288, 0.742002],
[0.987053, 0.991438, 0.749504]]
_inferno_data = [[0.001462, 0.000466, 0.013866],
[0.002267, 0.001270, 0.018570],
[0.003299, 0.002249, 0.024239],
[0.004547, 0.003392, 0.030909],
[0.006006, 0.004692, 0.038558],
[0.007676, 0.006136, 0.046836],
[0.009561, 0.007713, 0.055143],
[0.011663, 0.009417, 0.063460],
[0.013995, 0.011225, 0.071862],
[0.016561, 0.013136, 0.080282],
[0.019373, 0.015133, 0.088767],
[0.022447, 0.017199, 0.097327],
[0.025793, 0.019331, 0.105930],
[0.029432, 0.021503, 0.114621],
[0.033385, 0.023702, 0.123397],
[0.037668, 0.025921, 0.132232],
[0.042253, 0.028139, 0.141141],
[0.046915, 0.030324, 0.150164],
[0.051644, 0.032474, 0.159254],
[0.056449, 0.034569, 0.168414],
[0.061340, 0.036590, 0.177642],
[0.066331, 0.038504, 0.186962],
[0.071429, 0.040294, 0.196354],
[0.076637, 0.041905, 0.205799],
[0.081962, 0.043328, 0.215289],
[0.087411, 0.044556, 0.224813],
[0.092990, 0.045583, 0.234358],
[0.098702, 0.046402, 0.243904],
[0.104551, 0.047008, 0.253430],
[0.110536, 0.047399, 0.262912],
[0.116656, 0.047574, 0.272321],
[0.122908, 0.047536, 0.281624],
[0.129285, 0.047293, 0.290788],
[0.135778, 0.046856, 0.299776],
[0.142378, 0.046242, 0.308553],
[0.149073, 0.045468, 0.317085],
[0.155850, 0.044559, 0.325338],
[0.162689, 0.043554, 0.333277],
[0.169575, 0.042489, 0.340874],
[0.176493, 0.041402, 0.348111],
[0.183429, 0.040329, 0.354971],
[0.190367, 0.039309, 0.361447],
[0.197297, 0.038400, 0.367535],
[0.204209, 0.037632, 0.373238],
[0.211095, 0.037030, 0.378563],
[0.217949, 0.036615, 0.383522],
[0.224763, 0.036405, 0.388129],
[0.231538, 0.036405, 0.392400],
[0.238273, 0.036621, 0.396353],
[0.244967, 0.037055, 0.400007],
[0.251620, 0.037705, 0.403378],
[0.258234, 0.038571, 0.406485],
[0.264810, 0.039647, 0.409345],
[0.271347, 0.040922, 0.411976],
[0.277850, 0.042353, 0.414392],
[0.284321, 0.043933, 0.416608],
[0.290763, 0.045644, 0.418637],
[0.297178, 0.047470, 0.420491],
[0.303568, 0.049396, 0.422182],
[0.309935, 0.051407, 0.423721],
[0.316282, 0.053490, 0.425116],
[0.322610, 0.055634, 0.426377],
[0.328921, 0.057827, 0.427511],
[0.335217, 0.060060, 0.428524],
[0.341500, 0.062325, 0.429425],
[0.347771, 0.064616, 0.430217],
[0.354032, 0.066925, 0.430906],
[0.360284, 0.069247, 0.431497],
[0.366529, 0.071579, 0.431994],
[0.372768, 0.073915, 0.432400],
[0.379001, 0.076253, 0.432719],
[0.385228, 0.078591, 0.432955],
[0.391453, 0.080927, 0.433109],
[0.397674, 0.083257, 0.433183],
[0.403894, 0.085580, 0.433179],
[0.410113, 0.087896, 0.433098],
[0.416331, 0.090203, 0.432943],
[0.422549, 0.092501, 0.432714],
[0.428768, 0.094790, 0.432412],
[0.434987, 0.097069, 0.432039],
[0.441207, 0.099338, 0.431594],
[0.447428, 0.101597, 0.431080],
[0.453651, 0.103848, 0.430498],
[0.459875, 0.106089, 0.429846],
[0.466100, 0.108322, 0.429125],
[0.472328, 0.110547, 0.428334],
[0.478558, 0.112764, 0.427475],
[0.484789, 0.114974, 0.426548],
[0.491022, 0.117179, 0.425552],
[0.497257, 0.119379, 0.424488],
[0.503493, 0.121575, 0.423356],
[0.509730, 0.123769, 0.422156],
[0.515967, 0.125960, 0.420887],
[0.522206, 0.128150, 0.419549],
[0.528444, 0.130341, 0.418142],
[0.534683, 0.132534, 0.416667],
[0.540920, 0.134729, 0.415123],
[0.547157, 0.136929, 0.413511],
[0.553392, 0.139134, 0.411829],
[0.559624, 0.141346, 0.410078],
[0.565854, 0.143567, 0.408258],
[0.572081, 0.145797, 0.406369],
[0.578304, 0.148039, 0.404411],
[0.584521, 0.150294, 0.402385],
[0.590734, 0.152563, 0.400290],
[0.596940, 0.154848, 0.398125],
[0.603139, 0.157151, 0.395891],
[0.609330, 0.159474, 0.393589],
[0.615513, 0.161817, 0.391219],
[0.621685, 0.164184, 0.388781],
[0.627847, 0.166575, 0.386276],
[0.633998, 0.168992, 0.383704],
[0.640135, 0.171438, 0.381065],
[0.646260, 0.173914, 0.378359],
[0.652369, 0.176421, 0.375586],
[0.658463, 0.178962, 0.372748],
[0.664540, 0.181539, 0.369846],
[0.670599, 0.184153, 0.366879],
[0.676638, 0.186807, 0.363849],
[0.682656, 0.189501, 0.360757],
[0.688653, 0.192239, 0.357603],
[0.694627, 0.195021, 0.354388],
[0.700576, 0.197851, 0.351113],
[0.706500, 0.200728, 0.347777],
[0.712396, 0.203656, 0.344383],
[0.718264, 0.206636, 0.340931],
[0.724103, 0.209670, 0.337424],
[0.729909, 0.212759, 0.333861],
[0.735683, 0.215906, 0.330245],
[0.741423, 0.219112, 0.326576],
[0.747127, 0.222378, 0.322856],
[0.752794, 0.225706, 0.319085],
[0.758422, 0.229097, 0.315266],
[0.764010, 0.232554, 0.311399],
[0.769556, 0.236077, 0.307485],
[0.775059, 0.239667, 0.303526],
[0.780517, 0.243327, 0.299523],
[0.785929, 0.247056, 0.295477],
[0.791293, 0.250856, 0.291390],
[0.796607, 0.254728, 0.287264],
[0.801871, 0.258674, 0.283099],
[0.807082, 0.262692, 0.278898],
[0.812239, 0.266786, 0.274661],
[0.817341, 0.270954, 0.270390],
[0.822386, 0.275197, 0.266085],
[0.827372, 0.279517, 0.261750],
[0.832299, 0.283913, 0.257383],
[0.837165, 0.288385, 0.252988],
[0.841969, 0.292933, 0.248564],
[0.846709, 0.297559, 0.244113],
[0.851384, 0.302260, 0.239636],
[0.855992, 0.307038, 0.235133],
[0.860533, 0.311892, 0.230606],
[0.865006, 0.316822, 0.226055],
[0.869409, 0.321827, 0.221482],
[0.873741, 0.326906, 0.216886],
[0.878001, 0.332060, 0.212268],
[0.882188, 0.337287, 0.207628],
[0.886302, 0.342586, 0.202968],
[0.890341, 0.347957, 0.198286],
[0.894305, 0.353399, 0.193584],
[0.898192, 0.358911, 0.188860],
[0.902003, 0.364492, 0.184116],
[0.905735, 0.370140, 0.179350],
[0.909390, 0.375856, 0.174563],
[0.912966, 0.381636, 0.169755],
[0.916462, 0.387481, 0.164924],
[0.919879, 0.393389, 0.160070],
[0.923215, 0.399359, 0.155193],
[0.926470, 0.405389, 0.150292],
[0.929644, 0.411479, 0.145367],
[0.932737, 0.417627, 0.140417],
[0.935747, 0.423831, 0.135440],
[0.938675, 0.430091, 0.130438],
[0.941521, 0.436405, 0.125409],
[0.944285, 0.442772, 0.120354],
[0.946965, 0.449191, 0.115272],
[0.949562, 0.455660, 0.110164],
[0.952075, 0.462178, 0.105031],
[0.954506, 0.468744, 0.099874],
[0.956852, 0.475356, 0.094695],
[0.959114, 0.482014, 0.089499],
[0.961293, 0.488716, 0.084289],
[0.963387, 0.495462, 0.079073],
[0.965397, 0.502249, 0.073859],
[0.967322, 0.509078, 0.068659],
[0.969163, 0.515946, 0.063488],
[0.970919, 0.522853, 0.058367],
[0.972590, 0.529798, 0.053324],
[0.974176, 0.536780, 0.048392],
[0.975677, 0.543798, 0.043618],
[0.977092, 0.550850, 0.039050],
[0.978422, 0.557937, 0.034931],
[0.979666, 0.565057, 0.031409],
[0.980824, 0.572209, 0.028508],
[0.981895, 0.579392, 0.026250],
[0.982881, 0.586606, 0.024661],
[0.983779, 0.593849, 0.023770],
[0.984591, 0.601122, 0.023606],
[0.985315, 0.608422, 0.024202],
[0.985952, 0.615750, 0.025592],
[0.986502, 0.623105, 0.027814],
[0.986964, 0.630485, 0.030908],
[0.987337, 0.637890, 0.034916],
[0.987622, 0.645320, 0.039886],
[0.987819, 0.652773, 0.045581],
[0.987926, 0.660250, 0.051750],
[0.987945, 0.667748, 0.058329],
[0.987874, 0.675267, 0.065257],
[0.987714, 0.682807, 0.072489],
[0.987464, 0.690366, 0.079990],
[0.987124, 0.697944, 0.087731],
[0.986694, 0.705540, 0.095694],
[0.986175, 0.713153, 0.103863],
[0.985566, 0.720782, 0.112229],
[0.984865, 0.728427, 0.120785],
[0.984075, 0.736087, 0.129527],
[0.983196, 0.743758, 0.138453],
[0.982228, 0.751442, 0.147565],
[0.981173, 0.759135, 0.156863],
[0.980032, 0.766837, 0.166353],
[0.978806, 0.774545, 0.176037],
[0.977497, 0.782258, 0.185923],
[0.976108, 0.789974, 0.196018],
[0.974638, 0.797692, 0.206332],
[0.973088, 0.805409, 0.216877],
[0.971468, 0.813122, 0.227658],
[0.969783, 0.820825, 0.238686],
[0.968041, 0.828515, 0.249972],
[0.966243, 0.836191, 0.261534],
[0.964394, 0.843848, 0.273391],
[0.962517, 0.851476, 0.285546],
[0.960626, 0.859069, 0.298010],
[0.958720, 0.866624, 0.310820],
[0.956834, 0.874129, 0.323974],
[0.954997, 0.881569, 0.337475],
[0.953215, 0.888942, 0.351369],
[0.951546, 0.896226, 0.365627],
[0.950018, 0.903409, 0.380271],
[0.948683, 0.910473, 0.395289],
[0.947594, 0.917399, 0.410665],
[0.946809, 0.924168, 0.426373],
[0.946392, 0.930761, 0.442367],
[0.946403, 0.937159, 0.458592],
[0.946903, 0.943348, 0.474970],
[0.947937, 0.949318, 0.491426],
[0.949545, 0.955063, 0.507860],
[0.951740, 0.960587, 0.524203],
[0.954529, 0.965896, 0.540361],
[0.957896, 0.971003, 0.556275],
[0.961812, 0.975924, 0.571925],
[0.966249, 0.980678, 0.587206],
[0.971162, 0.985282, 0.602154],
[0.976511, 0.989753, 0.616760],
[0.982257, 0.994109, 0.631017],
[0.988362, 0.998364, 0.644924]]
_plasma_data = [[0.050383, 0.029803, 0.527975],
[0.063536, 0.028426, 0.533124],
[0.075353, 0.027206, 0.538007],
[0.086222, 0.026125, 0.542658],
[0.096379, 0.025165, 0.547103],
[0.105980, 0.024309, 0.551368],
[0.115124, 0.023556, 0.555468],
[0.123903, 0.022878, 0.559423],
[0.132381, 0.022258, 0.563250],
[0.140603, 0.021687, 0.566959],
[0.148607, 0.021154, 0.570562],
[0.156421, 0.020651, 0.574065],
[0.164070, 0.020171, 0.577478],
[0.171574, 0.019706, 0.580806],
[0.178950, 0.019252, 0.584054],
[0.186213, 0.018803, 0.587228],
[0.193374, 0.018354, 0.590330],
[0.200445, 0.017902, 0.593364],
[0.207435, 0.017442, 0.596333],
[0.214350, 0.016973, 0.599239],
[0.221197, 0.016497, 0.602083],
[0.227983, 0.016007, 0.604867],
[0.234715, 0.015502, 0.607592],
[0.241396, 0.014979, 0.610259],
[0.248032, 0.014439, 0.612868],
[0.254627, 0.013882, 0.615419],
[0.261183, 0.013308, 0.617911],
[0.267703, 0.012716, 0.620346],
[0.274191, 0.012109, 0.622722],
[0.280648, 0.011488, 0.625038],
[0.287076, 0.010855, 0.627295],
[0.293478, 0.010213, 0.629490],
[0.299855, 0.009561, 0.631624],
[0.306210, 0.008902, 0.633694],
[0.312543, 0.008239, 0.635700],
[0.318856, 0.007576, 0.637640],
[0.325150, 0.006915, 0.639512],
[0.331426, 0.006261, 0.641316],
[0.337683, 0.005618, 0.643049],
[0.343925, 0.004991, 0.644710],
[0.350150, 0.004382, 0.646298],
[0.356359, 0.003798, 0.647810],
[0.362553, 0.003243, 0.649245],
[0.368733, 0.002724, 0.650601],
[0.374897, 0.002245, 0.651876],
[0.381047, 0.001814, 0.653068],
[0.387183, 0.001434, 0.654177],
[0.393304, 0.001114, 0.655199],
[0.399411, 0.000859, 0.656133],
[0.405503, 0.000678, 0.656977],
[0.411580, 0.000577, 0.657730],
[0.417642, 0.000564, 0.658390],
[0.423689, 0.000646, 0.658956],
[0.429719, 0.000831, 0.659425],
[0.435734, 0.001127, 0.659797],
[0.441732, 0.001540, 0.660069],
[0.447714, 0.002080, 0.660240],
[0.453677, 0.002755, 0.660310],
[0.459623, 0.003574, 0.660277],
[0.465550, 0.004545, 0.660139],
[0.471457, 0.005678, 0.659897],
[0.477344, 0.006980, 0.659549],
[0.483210, 0.008460, 0.659095],
[0.489055, 0.010127, 0.658534],
[0.494877, 0.011990, 0.657865],
[0.500678, 0.014055, 0.657088],
[0.506454, 0.016333, 0.656202],
[0.512206, 0.018833, 0.655209],
[0.517933, 0.021563, 0.654109],
[0.523633, 0.024532, 0.652901],
[0.529306, 0.027747, 0.651586],
[0.534952, 0.031217, 0.650165],
[0.540570, 0.034950, 0.648640],
[0.546157, 0.038954, 0.647010],
[0.551715, 0.043136, 0.645277],
[0.557243, 0.047331, 0.643443],
[0.562738, 0.051545, 0.641509],
[0.568201, 0.055778, 0.639477],
[0.573632, 0.060028, 0.637349],
[0.579029, 0.064296, 0.635126],
[0.584391, 0.068579, 0.632812],
[0.589719, 0.072878, 0.630408],
[0.595011, 0.077190, 0.627917],
[0.600266, 0.081516, 0.625342],
[0.605485, 0.085854, 0.622686],
[0.610667, 0.090204, 0.619951],
[0.615812, 0.094564, 0.617140],
[0.620919, 0.098934, 0.614257],
[0.625987, 0.103312, 0.611305],
[0.631017, 0.107699, 0.608287],
[0.636008, 0.112092, 0.605205],
[0.640959, 0.116492, 0.602065],
[0.645872, 0.120898, 0.598867],
[0.650746, 0.125309, 0.595617],
[0.655580, 0.129725, 0.592317],
[0.660374, 0.134144, 0.588971],
[0.665129, 0.138566, 0.585582],
[0.669845, 0.142992, 0.582154],
[0.674522, 0.147419, 0.578688],
[0.679160, 0.151848, 0.575189],
[0.683758, 0.156278, 0.571660],
[0.688318, 0.160709, 0.568103],
[0.692840, 0.165141, 0.564522],
[0.697324, 0.169573, 0.560919],
[0.701769, 0.174005, 0.557296],
[0.706178, 0.178437, 0.553657],
[0.710549, 0.182868, 0.550004],
[0.714883, 0.187299, 0.546338],
[0.719181, 0.191729, 0.542663],
[0.723444, 0.196158, 0.538981],
[0.727670, 0.200586, 0.535293],
[0.731862, 0.205013, 0.531601],
[0.736019, 0.209439, 0.527908],
[0.740143, 0.213864, 0.524216],
[0.744232, 0.218288, 0.520524],
[0.748289, 0.222711, 0.516834],
[0.752312, 0.227133, 0.513149],
[0.756304, 0.231555, 0.509468],
[0.760264, 0.235976, 0.505794],
[0.764193, 0.240396, 0.502126],
[0.768090, 0.244817, 0.498465],
[0.771958, 0.249237, 0.494813],
[0.775796, 0.253658, 0.491171],
[0.779604, 0.258078, 0.487539],
[0.783383, 0.262500, 0.483918],
[0.787133, 0.266922, 0.480307],
[0.790855, 0.271345, 0.476706],
[0.794549, 0.275770, 0.473117],
[0.798216, 0.280197, 0.469538],
[0.801855, 0.284626, 0.465971],
[0.805467, 0.289057, 0.462415],
[0.809052, 0.293491, 0.458870],
[0.812612, 0.297928, 0.455338],
[0.816144, 0.302368, 0.451816],
[0.819651, 0.306812, 0.448306],
[0.823132, 0.311261, 0.444806],
[0.826588, 0.315714, 0.441316],
[0.830018, 0.320172, 0.437836],
[0.833422, 0.324635, 0.434366],
[0.836801, 0.329105, 0.430905],
[0.840155, 0.333580, 0.427455],
[0.843484, 0.338062, 0.424013],
[0.846788, 0.342551, 0.420579],
[0.850066, 0.347048, 0.417153],
[0.853319, 0.351553, 0.413734],
[0.856547, 0.356066, 0.410322],
[0.859750, 0.360588, 0.406917],
[0.862927, 0.365119, 0.403519],
[0.866078, 0.369660, 0.400126],
[0.869203, 0.374212, 0.396738],
[0.872303, 0.378774, 0.393355],
[0.875376, 0.383347, 0.389976],
[0.878423, 0.387932, 0.386600],
[0.881443, 0.392529, 0.383229],
[0.884436, 0.397139, 0.379860],
[0.887402, 0.401762, 0.376494],
[0.890340, 0.406398, 0.373130],
[0.893250, 0.411048, 0.369768],
[0.896131, 0.415712, 0.366407],
[0.898984, 0.420392, 0.363047],
[0.901807, 0.425087, 0.359688],
[0.904601, 0.429797, 0.356329],
[0.907365, 0.434524, 0.352970],
[0.910098, 0.439268, 0.349610],
[0.912800, 0.444029, 0.346251],
[0.915471, 0.448807, 0.342890],
[0.918109, 0.453603, 0.339529],
[0.920714, 0.458417, 0.336166],
[0.923287, 0.463251, 0.332801],
[0.925825, 0.468103, 0.329435],
[0.928329, 0.472975, 0.326067],
[0.930798, 0.477867, 0.322697],
[0.933232, 0.482780, 0.319325],
[0.935630, 0.487712, 0.315952],
[0.937990, 0.492667, 0.312575],
[0.940313, 0.497642, 0.309197],
[0.942598, 0.502639, 0.305816],
[0.944844, 0.507658, 0.302433],
[0.947051, 0.512699, 0.299049],
[0.949217, 0.517763, 0.295662],
[0.951344, 0.522850, 0.292275],
[0.953428, 0.527960, 0.288883],
[0.955470, 0.533093, 0.285490],
[0.957469, 0.538250, 0.282096],
[0.959424, 0.543431, 0.278701],
[0.961336, 0.548636, 0.275305],
[0.963203, 0.553865, 0.271909],
[0.965024, 0.559118, 0.268513],
[0.966798, 0.564396, 0.265118],
[0.968526, 0.569700, 0.261721],
[0.970205, 0.575028, 0.258325],
[0.971835, 0.580382, 0.254931],
[0.973416, 0.585761, 0.251540],
[0.974947, 0.591165, 0.248151],
[0.976428, 0.596595, 0.244767],
[0.977856, 0.602051, 0.241387],
[0.979233, 0.607532, 0.238013],
[0.980556, 0.613039, 0.234646],
[0.981826, 0.618572, 0.231287],
[0.983041, 0.624131, 0.227937],
[0.984199, 0.629718, 0.224595],
[0.985301, 0.635330, 0.221265],
[0.986345, 0.640969, 0.217948],
[0.987332, 0.646633, 0.214648],
[0.988260, 0.652325, 0.211364],
[0.989128, 0.658043, 0.208100],
[0.989935, 0.663787, 0.204859],
[0.990681, 0.669558, 0.201642],
[0.991365, 0.675355, 0.198453],
[0.991985, 0.681179, 0.195295],
[0.992541, 0.687030, 0.192170],
[0.993032, 0.692907, 0.189084],
[0.993456, 0.698810, 0.186041],
[0.993814, 0.704741, 0.183043],
[0.994103, 0.710698, 0.180097],
[0.994324, 0.716681, 0.177208],
[0.994474, 0.722691, 0.174381],
[0.994553, 0.728728, 0.171622],
[0.994561, 0.734791, 0.168938],
[0.994495, 0.740880, 0.166335],
[0.994355, 0.746995, 0.163821],
[0.994141, 0.753137, 0.161404],
[0.993851, 0.759304, 0.159092],
[0.993482, 0.765499, 0.156891],
[0.993033, 0.771720, 0.154808],
[0.992505, 0.777967, 0.152855],
[0.991897, 0.784239, 0.151042],
[0.991209, 0.790537, 0.149377],
[0.990439, 0.796859, 0.147870],
[0.989587, 0.803205, 0.146529],
[0.988648, 0.809579, 0.145357],
[0.987621, 0.815978, 0.144363],
[0.986509, 0.822401, 0.143557],
[0.985314, 0.828846, 0.142945],
[0.984031, 0.835315, 0.142528],
[0.982653, 0.841812, 0.142303],
[0.981190, 0.848329, 0.142279],
[0.979644, 0.854866, 0.142453],
[0.977995, 0.861432, 0.142808],
[0.976265, 0.868016, 0.143351],
[0.974443, 0.874622, 0.144061],
[0.972530, 0.881250, 0.144923],
[0.970533, 0.887896, 0.145919],
[0.968443, 0.894564, 0.147014],
[0.966271, 0.901249, 0.148180],
[0.964021, 0.907950, 0.149370],
[0.961681, 0.914672, 0.150520],
[0.959276, 0.921407, 0.151566],
[0.956808, 0.928152, 0.152409],
[0.954287, 0.934908, 0.152921],
[0.951726, 0.941671, 0.152925],
[0.949151, 0.948435, 0.152178],
[0.946602, 0.955190, 0.150328],
[0.944152, 0.961916, 0.146861],
[0.941896, 0.968590, 0.140956],
[0.940015, 0.975158, 0.131326]]
_viridis_data = [[0.267004, 0.004874, 0.329415],
[0.268510, 0.009605, 0.335427],
[0.269944, 0.014625, 0.341379],
[0.271305, 0.019942, 0.347269],
[0.272594, 0.025563, 0.353093],
[0.273809, 0.031497, 0.358853],
[0.274952, 0.037752, 0.364543],
[0.276022, 0.044167, 0.370164],
[0.277018, 0.050344, 0.375715],
[0.277941, 0.056324, 0.381191],
[0.278791, 0.062145, 0.386592],
[0.279566, 0.067836, 0.391917],
[0.280267, 0.073417, 0.397163],
[0.280894, 0.078907, 0.402329],
[0.281446, 0.084320, 0.407414],
[0.281924, 0.089666, 0.412415],
[0.282327, 0.094955, 0.417331],
[0.282656, 0.100196, 0.422160],
[0.282910, 0.105393, 0.426902],
[0.283091, 0.110553, 0.431554],
[0.283197, 0.115680, 0.436115],
[0.283229, 0.120777, 0.440584],
[0.283187, 0.125848, 0.444960],
[0.283072, 0.130895, 0.449241],
[0.282884, 0.135920, 0.453427],
[0.282623, 0.140926, 0.457517],
[0.282290, 0.145912, 0.461510],
[0.281887, 0.150881, 0.465405],
[0.281412, 0.155834, 0.469201],
[0.280868, 0.160771, 0.472899],
[0.280255, 0.165693, 0.476498],
[0.279574, 0.170599, 0.479997],
[0.278826, 0.175490, 0.483397],
[0.278012, 0.180367, 0.486697],
[0.277134, 0.185228, 0.489898],
[0.276194, 0.190074, 0.493001],
[0.275191, 0.194905, 0.496005],
[0.274128, 0.199721, 0.498911],
[0.273006, 0.204520, 0.501721],
[0.271828, 0.209303, 0.504434],
[0.270595, 0.214069, 0.507052],
[0.269308, 0.218818, 0.509577],
[0.267968, 0.223549, 0.512008],
[0.266580, 0.228262, 0.514349],
[0.265145, 0.232956, 0.516599],
[0.263663, 0.237631, 0.518762],
[0.262138, 0.242286, 0.520837],
[0.260571, 0.246922, 0.522828],
[0.258965, 0.251537, 0.524736],
[0.257322, 0.256130, 0.526563],
[0.255645, 0.260703, 0.528312],
[0.253935, 0.265254, 0.529983],
[0.252194, 0.269783, 0.531579],
[0.250425, 0.274290, 0.533103],
[0.248629, 0.278775, 0.534556],
[0.246811, 0.283237, 0.535941],
[0.244972, 0.287675, 0.537260],
[0.243113, 0.292092, 0.538516],
[0.241237, 0.296485, 0.539709],
[0.239346, 0.300855, 0.540844],
[0.237441, 0.305202, 0.541921],
[0.235526, 0.309527, 0.542944],
[0.233603, 0.313828, 0.543914],
[0.231674, 0.318106, 0.544834],
[0.229739, 0.322361, 0.545706],
[0.227802, 0.326594, 0.546532],
[0.225863, 0.330805, 0.547314],
[0.223925, 0.334994, 0.548053],
[0.221989, 0.339161, 0.548752],
[0.220057, 0.343307, 0.549413],
[0.218130, 0.347432, 0.550038],
[0.216210, 0.351535, 0.550627],
[0.214298, 0.355619, 0.551184],
[0.212395, 0.359683, 0.551710],
[0.210503, 0.363727, 0.552206],
[0.208623, 0.367752, 0.552675],
[0.206756, 0.371758, 0.553117],
[0.204903, 0.375746, 0.553533],
[0.203063, 0.379716, 0.553925],
[0.201239, 0.383670, 0.554294],
[0.199430, 0.387607, 0.554642],
[0.197636, 0.391528, 0.554969],
[0.195860, 0.395433, 0.555276],
[0.194100, 0.399323, 0.555565],
[0.192357, 0.403199, 0.555836],
[0.190631, 0.407061, 0.556089],
[0.188923, 0.410910, 0.556326],
[0.187231, 0.414746, 0.556547],
[0.185556, 0.418570, 0.556753],
[0.183898, 0.422383, 0.556944],
[0.182256, 0.426184, 0.557120],
[0.180629, 0.429975, 0.557282],
[0.179019, 0.433756, 0.557430],
[0.177423, 0.437527, 0.557565],
[0.175841, 0.441290, 0.557685],
[0.174274, 0.445044, 0.557792],
[0.172719, 0.448791, 0.557885],
[0.171176, 0.452530, 0.557965],
[0.169646, 0.456262, 0.558030],
[0.168126, 0.459988, 0.558082],
[0.166617, 0.463708, 0.558119],
[0.165117, 0.467423, 0.558141],
[0.163625, 0.471133, 0.558148],
[0.162142, 0.474838, 0.558140],
[0.160665, 0.478540, 0.558115],
[0.159194, 0.482237, 0.558073],
[0.157729, 0.485932, 0.558013],
[0.156270, 0.489624, 0.557936],
[0.154815, 0.493313, 0.557840],
[0.153364, 0.497000, 0.557724],
[0.151918, 0.500685, 0.557587],
[0.150476, 0.504369, 0.557430],
[0.149039, 0.508051, 0.557250],
[0.147607, 0.511733, 0.557049],
[0.146180, 0.515413, 0.556823],
[0.144759, 0.519093, 0.556572],
[0.143343, 0.522773, 0.556295],
[0.141935, 0.526453, 0.555991],
[0.140536, 0.530132, 0.555659],
[0.139147, 0.533812, 0.555298],
[0.137770, 0.537492, 0.554906],
[0.136408, 0.541173, 0.554483],
[0.135066, 0.544853, 0.554029],
[0.133743, 0.548535, 0.553541],
[0.132444, 0.552216, 0.553018],
[0.131172, 0.555899, 0.552459],
[0.129933, 0.559582, 0.551864],
[0.128729, 0.563265, 0.551229],
[0.127568, 0.566949, 0.550556],
[0.126453, 0.570633, 0.549841],
[0.125394, 0.574318, 0.549086],
[0.124395, 0.578002, 0.548287],
[0.123463, 0.581687, 0.547445],
[0.122606, 0.585371, 0.546557],
[0.121831, 0.589055, 0.545623],
[0.121148, 0.592739, 0.544641],
[0.120565, 0.596422, 0.543611],
[0.120092, 0.600104, 0.542530],
[0.119738, 0.603785, 0.541400],
[0.119512, 0.607464, 0.540218],
[0.119423, 0.611141, 0.538982],
[0.119483, 0.614817, 0.537692],
[0.119699, 0.618490, 0.536347],
[0.120081, 0.622161, 0.534946],
[0.120638, 0.625828, 0.533488],
[0.121380, 0.629492, 0.531973],
[0.122312, 0.633153, 0.530398],
[0.123444, 0.636809, 0.528763],
[0.124780, 0.640461, 0.527068],
[0.126326, 0.644107, 0.525311],
[0.128087, 0.647749, 0.523491],
[0.130067, 0.651384, 0.521608],
[0.132268, 0.655014, 0.519661],
[0.134692, 0.658636, 0.517649],
[0.137339, 0.662252, 0.515571],
[0.140210, 0.665859, 0.513427],
[0.143303, 0.669459, 0.511215],
[0.146616, 0.673050, 0.508936],
[0.150148, 0.676631, 0.506589],
[0.153894, 0.680203, 0.504172],
[0.157851, 0.683765, 0.501686],
[0.162016, 0.687316, 0.499129],
[0.166383, 0.690856, 0.496502],
[0.170948, 0.694384, 0.493803],
[0.175707, 0.697900, 0.491033],
[0.180653, 0.701402, 0.488189],
[0.185783, 0.704891, 0.485273],
[0.191090, 0.708366, 0.482284],
[0.196571, 0.711827, 0.479221],
[0.202219, 0.715272, 0.476084],
[0.208030, 0.718701, 0.472873],
[0.214000, 0.722114, 0.469588],
[0.220124, 0.725509, 0.466226],
[0.226397, 0.728888, 0.462789],
[0.232815, 0.732247, 0.459277],
[0.239374, 0.735588, 0.455688],
[0.246070, 0.738910, 0.452024],
[0.252899, 0.742211, 0.448284],
[0.259857, 0.745492, 0.444467],
[0.266941, 0.748751, 0.440573],
[0.274149, 0.751988, 0.436601],
[0.281477, 0.755203, 0.432552],
[0.288921, 0.758394, 0.428426],
[0.296479, 0.761561, 0.424223],
[0.304148, 0.764704, 0.419943],
[0.311925, 0.767822, 0.415586],
[0.319809, 0.770914, 0.411152],
[0.327796, 0.773980, 0.406640],
[0.335885, 0.777018, 0.402049],
[0.344074, 0.780029, 0.397381],
[0.352360, 0.783011, 0.392636],
[0.360741, 0.785964, 0.387814],
[0.369214, 0.788888, 0.382914],
[0.377779, 0.791781, 0.377939],
[0.386433, 0.794644, 0.372886],
[0.395174, 0.797475, 0.367757],
[0.404001, 0.800275, 0.362552],
[0.412913, 0.803041, 0.357269],
[0.421908, 0.805774, 0.351910],
[0.430983, 0.808473, 0.346476],
[0.440137, 0.811138, 0.340967],
[0.449368, 0.813768, 0.335384],
[0.458674, 0.816363, 0.329727],
[0.468053, 0.818921, 0.323998],
[0.477504, 0.821444, 0.318195],
[0.487026, 0.823929, 0.312321],
[0.496615, 0.826376, 0.306377],
[0.506271, 0.828786, 0.300362],
[0.515992, 0.831158, 0.294279],
[0.525776, 0.833491, 0.288127],
[0.535621, 0.835785, 0.281908],
[0.545524, 0.838039, 0.275626],
[0.555484, 0.840254, 0.269281],
[0.565498, 0.842430, 0.262877],
[0.575563, 0.844566, 0.256415],
[0.585678, 0.846661, 0.249897],
[0.595839, 0.848717, 0.243329],
[0.606045, 0.850733, 0.236712],
[0.616293, 0.852709, 0.230052],
[0.626579, 0.854645, 0.223353],
[0.636902, 0.856542, 0.216620],
[0.647257, 0.858400, 0.209861],
[0.657642, 0.860219, 0.203082],
[0.668054, 0.861999, 0.196293],
[0.678489, 0.863742, 0.189503],
[0.688944, 0.865448, 0.182725],
[0.699415, 0.867117, 0.175971],
[0.709898, 0.868751, 0.169257],
[0.720391, 0.870350, 0.162603],
[0.730889, 0.871916, 0.156029],
[0.741388, 0.873449, 0.149561],
[0.751884, 0.874951, 0.143228],
[0.762373, 0.876424, 0.137064],
[0.772852, 0.877868, 0.131109],
[0.783315, 0.879285, 0.125405],
[0.793760, 0.880678, 0.120005],
[0.804182, 0.882046, 0.114965],
[0.814576, 0.883393, 0.110347],
[0.824940, 0.884720, 0.106217],
[0.835270, 0.886029, 0.102646],
[0.845561, 0.887322, 0.099702],
[0.855810, 0.888601, 0.097452],
[0.866013, 0.889868, 0.095953],
[0.876168, 0.891125, 0.095250],
[0.886271, 0.892374, 0.095374],
[0.896320, 0.893616, 0.096335],
[0.906311, 0.894855, 0.098125],
[0.916242, 0.896091, 0.100717],
[0.926106, 0.897330, 0.104071],
[0.935904, 0.898570, 0.108131],
[0.945636, 0.899815, 0.112838],
[0.955300, 0.901065, 0.118128],
[0.964894, 0.902323, 0.123941],
[0.974417, 0.903590, 0.130215],
[0.983868, 0.904867, 0.136897],
[0.993248, 0.906157, 0.143936]]
_cividis_data = [[0.000000, 0.135112, 0.304751],
[0.000000, 0.138068, 0.311105],
[0.000000, 0.141013, 0.317579],
[0.000000, 0.143951, 0.323982],
[0.000000, 0.146877, 0.330479],
[0.000000, 0.149791, 0.337065],
[0.000000, 0.152673, 0.343704],
[0.000000, 0.155377, 0.350500],
[0.000000, 0.157932, 0.357521],
[0.000000, 0.160495, 0.364534],
[0.000000, 0.163058, 0.371608],
[0.000000, 0.165621, 0.378769],
[0.000000, 0.168204, 0.385902],
[0.000000, 0.170800, 0.393100],
[0.000000, 0.173420, 0.400353],
[0.000000, 0.176082, 0.407577],
[0.000000, 0.178802, 0.414764],
[0.000000, 0.181610, 0.421859],
[0.000000, 0.184550, 0.428802],
[0.000000, 0.186915, 0.435532],
[0.000000, 0.188769, 0.439563],
[0.000000, 0.190950, 0.441085],
[0.000000, 0.193366, 0.441561],
[0.003602, 0.195911, 0.441564],
[0.017852, 0.198528, 0.441248],
[0.032110, 0.201199, 0.440785],
[0.046205, 0.203903, 0.440196],
[0.058378, 0.206629, 0.439531],
[0.068968, 0.209372, 0.438863],
[0.078624, 0.212122, 0.438105],
[0.087465, 0.214879, 0.437342],
[0.095645, 0.217643, 0.436593],
[0.103401, 0.220406, 0.435790],
[0.110658, 0.223170, 0.435067],
[0.117612, 0.225935, 0.434308],
[0.124291, 0.228697, 0.433547],
[0.130669, 0.231458, 0.432840],
[0.136830, 0.234216, 0.432148],
[0.142852, 0.236972, 0.431404],
[0.148638, 0.239724, 0.430752],
[0.154261, 0.242475, 0.430120],
[0.159733, 0.245221, 0.429528],
[0.165113, 0.247965, 0.428908],
[0.170362, 0.250707, 0.428325],
[0.175490, 0.253444, 0.427790],
[0.180503, 0.256180, 0.427299],
[0.185453, 0.258914, 0.426788],
[0.190303, 0.261644, 0.426329],
[0.195057, 0.264372, 0.425924],
[0.199764, 0.267099, 0.425497],
[0.204385, 0.269823, 0.425126],
[0.208926, 0.272546, 0.424809],
[0.213431, 0.275266, 0.424480],
[0.217863, 0.277985, 0.424206],
[0.222264, 0.280702, 0.423914],
[0.226598, 0.283419, 0.423678],
[0.230871, 0.286134, 0.423498],
[0.235120, 0.288848, 0.423304],
[0.239312, 0.291562, 0.423167],
[0.243485, 0.294274, 0.423014],
[0.247605, 0.296986, 0.422917],
[0.251675, 0.299698, 0.422873],
[0.255731, 0.302409, 0.422814],
[0.259740, 0.305120, 0.422810],
[0.263738, 0.307831, 0.422789],
[0.267693, 0.310542, 0.422821],
[0.271639, 0.313253, 0.422837],
[0.275513, 0.315965, 0.422979],
[0.279411, 0.318677, 0.423031],
[0.283240, 0.321390, 0.423211],
[0.287065, 0.324103, 0.423373],
[0.290884, 0.326816, 0.423517],
[0.294669, 0.329531, 0.423716],
[0.298421, 0.332247, 0.423973],
[0.302169, 0.334963, 0.424213],
[0.305886, 0.337681, 0.424512],
[0.309601, 0.340399, 0.424790],
[0.313287, 0.343120, 0.425120],
[0.316941, 0.345842, 0.425512],
[0.320595, 0.348565, 0.425889],
[0.324250, 0.351289, 0.426250],
[0.327875, 0.354016, 0.426670],
[0.331474, 0.356744, 0.427144],
[0.335073, 0.359474, 0.427605],
[0.338673, 0.362206, 0.428053],
[0.342246, 0.364939, 0.428559],
[0.345793, 0.367676, 0.429127],
[0.349341, 0.370414, 0.429685],
[0.352892, 0.373153, 0.430226],
[0.356418, 0.375896, 0.430823],
[0.359916, 0.378641, 0.431501],
[0.363446, 0.381388, 0.432075],
[0.366923, 0.384139, 0.432796],
[0.370430, 0.386890, 0.433428],
[0.373884, 0.389646, 0.434209],
[0.377371, 0.392404, 0.434890],
[0.380830, 0.395164, 0.435653],
[0.384268, 0.397928, 0.436475],
[0.387705, 0.400694, 0.437305],
[0.391151, 0.403464, 0.438096],
[0.394568, 0.406236, 0.438986],
[0.397991, 0.409011, 0.439848],
[0.401418, 0.411790, 0.440708],
[0.404820, 0.414572, 0.441642],
[0.408226, 0.417357, 0.442570],
[0.411607, 0.420145, 0.443577],
[0.414992, 0.422937, 0.444578],
[0.418383, 0.425733, 0.445560],
[0.421748, 0.428531, 0.446640],
[0.425120, 0.431334, 0.447692],
[0.428462, 0.434140, 0.448864],
[0.431817, 0.436950, 0.449982],
[0.435168, 0.439763, 0.451134],
[0.438504, 0.442580, 0.452341],
[0.441810, 0.445402, 0.453659],
[0.445148, 0.448226, 0.454885],
[0.448447, 0.451053, 0.456264],
[0.451759, 0.453887, 0.457582],
[0.455072, 0.456718, 0.458976],
[0.458366, 0.459552, 0.460457],
[0.461616, 0.462405, 0.461969],
[0.464947, 0.465241, 0.463395],
[0.468254, 0.468083, 0.464908],
[0.471501, 0.470960, 0.466357],
[0.474812, 0.473832, 0.467681],
[0.478186, 0.476699, 0.468845],
[0.481622, 0.479573, 0.469767],
[0.485141, 0.482451, 0.470384],
[0.488697, 0.485318, 0.471008],
[0.492278, 0.488198, 0.471453],
[0.495913, 0.491076, 0.471751],
[0.499552, 0.493960, 0.472032],
[0.503185, 0.496851, 0.472305],
[0.506866, 0.499743, 0.472432],
[0.510540, 0.502643, 0.472550],
[0.514226, 0.505546, 0.472640],
[0.517920, 0.508454, 0.472707],
[0.521643, 0.511367, 0.472639],
[0.525348, 0.514285, 0.472660],
[0.529086, 0.517207, 0.472543],
[0.532829, 0.520135, 0.472401],
[0.536553, 0.523067, 0.472352],
[0.540307, 0.526005, 0.472163],
[0.544069, 0.528948, 0.471947],
[0.547840, 0.531895, 0.471704],
[0.551612, 0.534849, 0.471439],
[0.555393, 0.537807, 0.471147],
[0.559181, 0.540771, 0.470829],
[0.562972, 0.543741, 0.470488],
[0.566802, 0.546715, 0.469988],
[0.570607, 0.549695, 0.469593],
[0.574417, 0.552682, 0.469172],
[0.578236, 0.555673, 0.468724],
[0.582087, 0.558670, 0.468118],
[0.585916, 0.561674, 0.467618],
[0.589753, 0.564682, 0.467090],
[0.593622, 0.567697, 0.466401],
[0.597469, 0.570718, 0.465821],
[0.601354, 0.573743, 0.465074],
[0.605211, 0.576777, 0.464441],
[0.609105, 0.579816, 0.463638],
[0.612977, 0.582861, 0.462950],
[0.616852, 0.585913, 0.462237],
[0.620765, 0.588970, 0.461351],
[0.624654, 0.592034, 0.460583],
[0.628576, 0.595104, 0.459641],
[0.632506, 0.598180, 0.458668],
[0.636412, 0.601264, 0.457818],
[0.640352, 0.604354, 0.456791],
[0.644270, 0.607450, 0.455886],
[0.648222, 0.610553, 0.454801],
[0.652178, 0.613664, 0.453689],
[0.656114, 0.616780, 0.452702],
[0.660082, 0.619904, 0.451534],
[0.664055, 0.623034, 0.450338],
[0.668008, 0.626171, 0.449270],
[0.671991, 0.629316, 0.448018],
[0.675981, 0.632468, 0.446736],
[0.679979, 0.635626, 0.445424],
[0.683950, 0.638793, 0.444251],
[0.687957, 0.641966, 0.442886],
[0.691971, 0.645145, 0.441491],
[0.695985, 0.648334, 0.440072],
[0.700008, 0.651529, 0.438624],
[0.704037, 0.654731, 0.437147],
[0.708067, 0.657942, 0.435647],
[0.712105, 0.661160, 0.434117],
[0.716177, 0.664384, 0.432386],
[0.720222, 0.667618, 0.430805],
[0.724274, 0.670859, 0.429194],
[0.728334, 0.674107, 0.427554],
[0.732422, 0.677364, 0.425717],
[0.736488, 0.680629, 0.424028],
[0.740589, 0.683900, 0.422131],
[0.744664, 0.687181, 0.420393],
[0.748772, 0.690470, 0.418448],
[0.752886, 0.693766, 0.416472],
[0.756975, 0.697071, 0.414659],
[0.761096, 0.700384, 0.412638],
[0.765223, 0.703705, 0.410587],
[0.769353, 0.707035, 0.408516],
[0.773486, 0.710373, 0.406422],
[0.777651, 0.713719, 0.404112],
[0.781795, 0.717074, 0.401966],
[0.785965, 0.720438, 0.399613],
[0.790116, 0.723810, 0.397423],
[0.794298, 0.727190, 0.395016],
[0.798480, 0.730580, 0.392597],
[0.802667, 0.733978, 0.390153],
[0.806859, 0.737385, 0.387684],
[0.811054, 0.740801, 0.385198],
[0.815274, 0.744226, 0.382504],
[0.819499, 0.747659, 0.379785],
[0.823729, 0.751101, 0.377043],
[0.827959, 0.754553, 0.374292],
[0.832192, 0.758014, 0.371529],
[0.836429, 0.761483, 0.368747],
[0.840693, 0.764962, 0.365746],
[0.844957, 0.768450, 0.362741],
[0.849223, 0.771947, 0.359729],
[0.853515, 0.775454, 0.356500],
[0.857809, 0.778969, 0.353259],
[0.862105, 0.782494, 0.350011],
[0.866421, 0.786028, 0.346571],
[0.870717, 0.789572, 0.343333],
[0.875057, 0.793125, 0.339685],
[0.879378, 0.796687, 0.336241],
[0.883720, 0.800258, 0.332599],
[0.888081, 0.803839, 0.328770],
[0.892440, 0.807430, 0.324968],
[0.896818, 0.811030, 0.320982],
[0.901195, 0.814639, 0.317021],
[0.905589, 0.818257, 0.312889],
[0.910000, 0.821885, 0.308594],
[0.914407, 0.825522, 0.304348],
[0.918828, 0.829168, 0.299960],
[0.923279, 0.832822, 0.295244],
[0.927724, 0.836486, 0.290611],
[0.932180, 0.840159, 0.285880],
[0.936660, 0.843841, 0.280876],
[0.941147, 0.847530, 0.275815],
[0.945654, 0.851228, 0.270532],
[0.950178, 0.854933, 0.265085],
[0.954725, 0.858646, 0.259365],
[0.959284, 0.862365, 0.253563],
[0.963872, 0.866089, 0.247445],
[0.968469, 0.869819, 0.241310],
[0.973114, 0.873550, 0.234677],
[0.977780, 0.877281, 0.227954],
[0.982497, 0.881008, 0.220878],
[0.987293, 0.884718, 0.213336],
[0.992218, 0.888385, 0.205468],
[0.994847, 0.892954, 0.203445],
[0.995249, 0.898384, 0.207561],
[0.995503, 0.903866, 0.212370],
[0.995737, 0.909344, 0.217772]]
cmaps = {}
for (name, data) in (('magma', _magma_data),
('inferno', _inferno_data),
('plasma', _plasma_data),
('viridis', _viridis_data),
('cividis', _cividis_data)):
cmaps[name] = ListedColormap(data, name=name)
# generate reversed colormap
name = name + '_r'
cmaps[name] = ListedColormap(list(reversed(data)), name=name)
| 62,417 | 47.050808 | 65 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/_pylab_helpers.py
|
"""
Manage figures for pyplot interface.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import atexit
import gc
import sys
class Gcf(object):
"""
Singleton to manage a set of integer-numbered figures.
This class is never instantiated; it consists of two class
attributes (a list and a dictionary), and a set of static
methods that operate on those attributes, accessing them
directly as class attributes.
Attributes:
*figs*:
dictionary of the form {*num*: *manager*, ...}
*_activeQue*:
list of *managers*, with active one at the end
"""
_activeQue = []
figs = {}
@classmethod
def get_fig_manager(cls, num):
"""
If figure manager *num* exists, make it the active
figure and return the manager; otherwise return *None*.
"""
manager = cls.figs.get(num, None)
if manager is not None:
cls.set_active(manager)
return manager
@classmethod
def destroy(cls, num):
"""
Try to remove all traces of figure *num*.
In the interactive backends, this is bound to the
window "destroy" and "delete" events.
"""
if not cls.has_fignum(num):
return
manager = cls.figs[num]
manager.canvas.mpl_disconnect(manager._cidgcf)
cls._activeQue.remove(manager)
del cls.figs[num]
manager.destroy()
gc.collect(1)
@classmethod
def destroy_fig(cls, fig):
"*fig* is a Figure instance"
num = next((manager.num for manager in six.itervalues(cls.figs)
if manager.canvas.figure == fig), None)
if num is not None:
cls.destroy(num)
@classmethod
def destroy_all(cls):
# this is need to ensure that gc is available in corner cases
# where modules are being torn down after install with easy_install
import gc # noqa
for manager in list(cls.figs.values()):
manager.canvas.mpl_disconnect(manager._cidgcf)
manager.destroy()
cls._activeQue = []
cls.figs.clear()
gc.collect(1)
@classmethod
def has_fignum(cls, num):
"""
Return *True* if figure *num* exists.
"""
return num in cls.figs
@classmethod
def get_all_fig_managers(cls):
"""
Return a list of figure managers.
"""
return list(cls.figs.values())
@classmethod
def get_num_fig_managers(cls):
"""
Return the number of figures being managed.
"""
return len(cls.figs)
@classmethod
def get_active(cls):
"""
Return the manager of the active figure, or *None*.
"""
if len(cls._activeQue) == 0:
return None
else:
return cls._activeQue[-1]
@classmethod
def set_active(cls, manager):
"""
Make the figure corresponding to *manager* the active one.
"""
oldQue = cls._activeQue[:]
cls._activeQue = []
for m in oldQue:
if m != manager:
cls._activeQue.append(m)
cls._activeQue.append(manager)
cls.figs[manager.num] = manager
@classmethod
def draw_all(cls, force=False):
"""
Redraw all figures registered with the pyplot
state machine.
"""
for f_mgr in cls.get_all_fig_managers():
if force or f_mgr.canvas.figure.stale:
f_mgr.canvas.draw_idle()
atexit.register(Gcf.destroy_all)
| 3,656 | 25.309353 | 75 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/font_manager.py
|
"""
A module for finding, managing, and using fonts across platforms.
This module provides a single :class:`FontManager` instance that can
be shared across backends and platforms. The :func:`findfont`
function returns the best TrueType (TTF) font file in the local or
system font path that matches the specified :class:`FontProperties`
instance. The :class:`FontManager` also handles Adobe Font Metrics
(AFM) font files for use by the PostScript backend.
The design is based on the `W3C Cascading Style Sheet, Level 1 (CSS1)
font specification <http://www.w3.org/TR/1998/REC-CSS2-19980512/>`_.
Future versions may implement the Level 2 or 2.1 specifications.
Experimental support is included for using `fontconfig` on Unix
variant platforms (Linux, OS X, Solaris). To enable it, set the
constant ``USE_FONTCONFIG`` in this file to ``True``. Fontconfig has
the advantage that it is the standard way to look up fonts on X11
platforms, so if a font is installed, it is much more likely to be
found.
"""
from __future__ import absolute_import, division, print_function
import six
"""
KNOWN ISSUES
- documentation
- font variant is untested
- font stretch is incomplete
- font size is incomplete
- default font algorithm needs improvement and testing
- setWeights function needs improvement
- 'light' is an invalid weight value, remove it.
- update_fonts not implemented
Authors : John Hunter <jdhunter@ace.bsd.uchicago.edu>
Paul Barrett <Barrett@STScI.Edu>
Michael Droettboom <mdroe@STScI.edu>
Copyright : John Hunter (2004,2005), Paul Barrett (2004,2005)
License : matplotlib license (PSF compatible)
The font directory code is from ttfquery,
see license/LICENSE_TTFQUERY.
"""
from collections import Iterable
import json
import os
import sys
from threading import Timer
import warnings
import logging
from matplotlib import afm, cbook, ft2font, rcParams, get_cachedir
from matplotlib.compat import subprocess
from matplotlib.fontconfig_pattern import (
parse_fontconfig_pattern, generate_fontconfig_pattern)
try:
from functools import lru_cache
except ImportError:
from backports.functools_lru_cache import lru_cache
_log = logging.getLogger(__name__)
USE_FONTCONFIG = False
font_scalings = {
'xx-small' : 0.579,
'x-small' : 0.694,
'small' : 0.833,
'medium' : 1.0,
'large' : 1.200,
'x-large' : 1.440,
'xx-large' : 1.728,
'larger' : 1.2,
'smaller' : 0.833,
None : 1.0}
stretch_dict = {
'ultra-condensed' : 100,
'extra-condensed' : 200,
'condensed' : 300,
'semi-condensed' : 400,
'normal' : 500,
'semi-expanded' : 600,
'expanded' : 700,
'extra-expanded' : 800,
'ultra-expanded' : 900}
weight_dict = {
'ultralight' : 100,
'light' : 200,
'normal' : 400,
'regular' : 400,
'book' : 400,
'medium' : 500,
'roman' : 500,
'semibold' : 600,
'demibold' : 600,
'demi' : 600,
'bold' : 700,
'heavy' : 800,
'extra bold' : 800,
'black' : 900}
font_family_aliases = {
'serif',
'sans-serif',
'sans serif',
'cursive',
'fantasy',
'monospace',
'sans'}
# OS Font paths
MSFolders = \
r'Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders'
MSFontDirectories = [
r'SOFTWARE\Microsoft\Windows NT\CurrentVersion\Fonts',
r'SOFTWARE\Microsoft\Windows\CurrentVersion\Fonts']
X11FontDirectories = [
# an old standard installation point
"/usr/X11R6/lib/X11/fonts/TTF/",
"/usr/X11/lib/X11/fonts",
# here is the new standard location for fonts
"/usr/share/fonts/",
# documented as a good place to install new fonts
"/usr/local/share/fonts/",
# common application, not really useful
"/usr/lib/openoffice/share/fonts/truetype/",
]
OSXFontDirectories = [
"/Library/Fonts/",
"/Network/Library/Fonts/",
"/System/Library/Fonts/",
# fonts installed via MacPorts
"/opt/local/share/fonts"
""
]
if not USE_FONTCONFIG and sys.platform != 'win32':
home = os.environ.get('HOME')
if home is not None:
# user fonts on OSX
path = os.path.join(home, 'Library', 'Fonts')
OSXFontDirectories.append(path)
path = os.path.join(home, '.fonts')
X11FontDirectories.append(path)
def get_fontext_synonyms(fontext):
"""
Return a list of file extensions extensions that are synonyms for
the given file extension *fileext*.
"""
return {'ttf': ('ttf', 'otf'),
'otf': ('ttf', 'otf'),
'afm': ('afm',)}[fontext]
def list_fonts(directory, extensions):
"""
Return a list of all fonts matching any of the extensions,
possibly upper-cased, found recursively under the directory.
"""
pattern = ';'.join(['*.%s;*.%s' % (ext, ext.upper())
for ext in extensions])
return cbook.listFiles(directory, pattern)
def win32FontDirectory():
"""
Return the user-specified font directory for Win32. This is
looked up from the registry key::
\\\\HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders\\Fonts
If the key is not found, $WINDIR/Fonts will be returned.
"""
try:
from six.moves import winreg
except ImportError:
pass # Fall through to default
else:
try:
user = winreg.OpenKey(winreg.HKEY_CURRENT_USER, MSFolders)
try:
try:
return winreg.QueryValueEx(user, 'Fonts')[0]
except OSError:
pass # Fall through to default
finally:
winreg.CloseKey(user)
except OSError:
pass # Fall through to default
return os.path.join(os.environ['WINDIR'], 'Fonts')
def win32InstalledFonts(directory=None, fontext='ttf'):
"""
Search for fonts in the specified font directory, or use the
system directories if none given. A list of TrueType font
filenames are returned by default, or AFM fonts if *fontext* ==
'afm'.
"""
from six.moves import winreg
if directory is None:
directory = win32FontDirectory()
fontext = get_fontext_synonyms(fontext)
key, items = None, set()
for fontdir in MSFontDirectories:
try:
local = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, fontdir)
except OSError:
continue
if not local:
return list_fonts(directory, fontext)
try:
for j in range(winreg.QueryInfoKey(local)[1]):
try:
key, direc, tp = winreg.EnumValue(local, j)
if not isinstance(direc, six.string_types):
continue
# Work around for https://bugs.python.org/issue25778, which
# is fixed in Py>=3.6.1.
direc = direc.split("\0", 1)[0]
if not os.path.dirname(direc):
direc = os.path.join(directory, direc)
direc = os.path.abspath(direc).lower()
if os.path.splitext(direc)[1][1:] in fontext:
items.add(direc)
except EnvironmentError:
continue
except WindowsError:
continue
except MemoryError:
continue
return list(items)
finally:
winreg.CloseKey(local)
return None
def OSXInstalledFonts(directories=None, fontext='ttf'):
"""
Get list of font files on OS X - ignores font suffix by default.
"""
if directories is None:
directories = OSXFontDirectories
fontext = get_fontext_synonyms(fontext)
files = []
for path in directories:
if fontext is None:
files.extend(cbook.listFiles(path, '*'))
else:
files.extend(list_fonts(path, fontext))
return files
@lru_cache()
def _call_fc_list():
"""Cache and list the font filenames known to `fc-list`.
"""
# Delay the warning by 5s.
timer = Timer(5, lambda: warnings.warn(
'Matplotlib is building the font cache using fc-list. '
'This may take a moment.'))
timer.start()
try:
out = subprocess.check_output([str('fc-list'), '--format=%{file}\\n'])
except (OSError, subprocess.CalledProcessError):
return []
finally:
timer.cancel()
fnames = []
for fname in out.split(b'\n'):
try:
fname = six.text_type(fname, sys.getfilesystemencoding())
except UnicodeDecodeError:
continue
fnames.append(fname)
return fnames
def get_fontconfig_fonts(fontext='ttf'):
"""List the font filenames known to `fc-list` having the given extension.
"""
fontext = get_fontext_synonyms(fontext)
return [fname for fname in _call_fc_list()
if os.path.splitext(fname)[1][1:] in fontext]
def findSystemFonts(fontpaths=None, fontext='ttf'):
"""
Search for fonts in the specified font paths. If no paths are
given, will use a standard set of system paths, as well as the
list of fonts tracked by fontconfig if fontconfig is installed and
available. A list of TrueType fonts are returned by default with
AFM fonts as an option.
"""
fontfiles = set()
fontexts = get_fontext_synonyms(fontext)
if fontpaths is None:
if sys.platform == 'win32':
fontdir = win32FontDirectory()
fontpaths = [fontdir]
# now get all installed fonts directly...
for f in win32InstalledFonts(fontdir):
base, ext = os.path.splitext(f)
if len(ext)>1 and ext[1:].lower() in fontexts:
fontfiles.add(f)
else:
fontpaths = X11FontDirectories
# check for OS X & load its fonts if present
if sys.platform == 'darwin':
for f in OSXInstalledFonts(fontext=fontext):
fontfiles.add(f)
for f in get_fontconfig_fonts(fontext):
fontfiles.add(f)
elif isinstance(fontpaths, six.string_types):
fontpaths = [fontpaths]
for path in fontpaths:
files = list_fonts(path, fontexts)
for fname in files:
fontfiles.add(os.path.abspath(fname))
return [fname for fname in fontfiles if os.path.exists(fname)]
@cbook.deprecated("2.1")
def weight_as_number(weight):
"""
Return the weight property as a numeric value. String values
are converted to their corresponding numeric value.
"""
if isinstance(weight, six.string_types):
try:
weight = weight_dict[weight.lower()]
except KeyError:
weight = 400
elif weight in range(100, 1000, 100):
pass
else:
raise ValueError('weight not a valid integer')
return weight
class FontEntry(object):
"""
A class for storing Font properties. It is used when populating
the font lookup dictionary.
"""
def __init__(self,
fname ='',
name ='',
style ='normal',
variant='normal',
weight ='normal',
stretch='normal',
size ='medium',
):
self.fname = fname
self.name = name
self.style = style
self.variant = variant
self.weight = weight
self.stretch = stretch
try:
self.size = str(float(size))
except ValueError:
self.size = size
def __repr__(self):
return "<Font '%s' (%s) %s %s %s %s>" % (
self.name, os.path.basename(self.fname), self.style, self.variant,
self.weight, self.stretch)
def ttfFontProperty(font):
"""
Extract information from a TrueType font file.
Parameters
----------
font : `.FT2Font`
The TrueType font file from which information will be extracted.
Returns
-------
`FontEntry`
The extracted font properties.
"""
name = font.family_name
# Styles are: italic, oblique, and normal (default)
sfnt = font.get_sfnt()
sfnt2 = sfnt.get((1,0,0,2))
sfnt4 = sfnt.get((1,0,0,4))
if sfnt2:
sfnt2 = sfnt2.decode('mac_roman').lower()
else:
sfnt2 = ''
if sfnt4:
sfnt4 = sfnt4.decode('mac_roman').lower()
else:
sfnt4 = ''
if sfnt4.find('oblique') >= 0:
style = 'oblique'
elif sfnt4.find('italic') >= 0:
style = 'italic'
elif sfnt2.find('regular') >= 0:
style = 'normal'
elif font.style_flags & ft2font.ITALIC:
style = 'italic'
else:
style = 'normal'
# Variants are: small-caps and normal (default)
# !!!! Untested
if name.lower() in ['capitals', 'small-caps']:
variant = 'small-caps'
else:
variant = 'normal'
weight = next((w for w in weight_dict if sfnt4.find(w) >= 0), None)
if not weight:
if font.style_flags & ft2font.BOLD:
weight = 700
else:
weight = 400
# Stretch can be absolute and relative
# Absolute stretches are: ultra-condensed, extra-condensed, condensed,
# semi-condensed, normal, semi-expanded, expanded, extra-expanded,
# and ultra-expanded.
# Relative stretches are: wider, narrower
# Child value is: inherit
if (sfnt4.find('narrow') >= 0 or sfnt4.find('condensed') >= 0 or
sfnt4.find('cond') >= 0):
stretch = 'condensed'
elif sfnt4.find('demi cond') >= 0:
stretch = 'semi-condensed'
elif sfnt4.find('wide') >= 0 or sfnt4.find('expanded') >= 0:
stretch = 'expanded'
else:
stretch = 'normal'
# Sizes can be absolute and relative.
# Absolute sizes are: xx-small, x-small, small, medium, large, x-large,
# and xx-large.
# Relative sizes are: larger, smaller
# Length value is an absolute font size, e.g., 12pt
# Percentage values are in 'em's. Most robust specification.
if not font.scalable:
raise NotImplementedError("Non-scalable fonts are not supported")
size = 'scalable'
return FontEntry(font.fname, name, style, variant, weight, stretch, size)
def afmFontProperty(fontpath, font):
"""
Extract information from an AFM font file.
Parameters
----------
font : `.AFM`
The AFM font file from which information will be extracted.
Returns
-------
`FontEntry`
The extracted font properties.
"""
name = font.get_familyname()
fontname = font.get_fontname().lower()
# Styles are: italic, oblique, and normal (default)
if font.get_angle() != 0 or name.lower().find('italic') >= 0:
style = 'italic'
elif name.lower().find('oblique') >= 0:
style = 'oblique'
else:
style = 'normal'
# Variants are: small-caps and normal (default)
# !!!! Untested
if name.lower() in ['capitals', 'small-caps']:
variant = 'small-caps'
else:
variant = 'normal'
weight = font.get_weight().lower()
# Stretch can be absolute and relative
# Absolute stretches are: ultra-condensed, extra-condensed, condensed,
# semi-condensed, normal, semi-expanded, expanded, extra-expanded,
# and ultra-expanded.
# Relative stretches are: wider, narrower
# Child value is: inherit
if fontname.find('narrow') >= 0 or fontname.find('condensed') >= 0 or \
fontname.find('cond') >= 0:
stretch = 'condensed'
elif fontname.find('demi cond') >= 0:
stretch = 'semi-condensed'
elif fontname.find('wide') >= 0 or fontname.find('expanded') >= 0:
stretch = 'expanded'
else:
stretch = 'normal'
# Sizes can be absolute and relative.
# Absolute sizes are: xx-small, x-small, small, medium, large, x-large,
# and xx-large.
# Relative sizes are: larger, smaller
# Length value is an absolute font size, e.g., 12pt
# Percentage values are in 'em's. Most robust specification.
# All AFM fonts are apparently scalable.
size = 'scalable'
return FontEntry(fontpath, name, style, variant, weight, stretch, size)
def createFontList(fontfiles, fontext='ttf'):
"""
A function to create a font lookup list. The default is to create
a list of TrueType fonts. An AFM font list can optionally be
created.
"""
fontlist = []
# Add fonts from list of known font files.
seen = set()
for fpath in fontfiles:
_log.debug('createFontDict: %s', fpath)
fname = os.path.split(fpath)[1]
if fname in seen:
continue
else:
seen.add(fname)
if fontext == 'afm':
try:
fh = open(fpath, 'rb')
except EnvironmentError:
_log.info("Could not open font file %s", fpath)
continue
try:
font = afm.AFM(fh)
except RuntimeError:
_log.info("Could not parse font file %s", fpath)
continue
finally:
fh.close()
try:
prop = afmFontProperty(fpath, font)
except KeyError:
continue
else:
try:
font = ft2font.FT2Font(fpath)
except RuntimeError:
_log.info("Could not open font file %s", fpath)
continue
except UnicodeError:
_log.info("Cannot handle unicode filenames")
continue
except IOError:
_log.info("IO error - cannot open font file %s", fpath)
continue
try:
prop = ttfFontProperty(font)
except (KeyError, RuntimeError, ValueError, NotImplementedError):
continue
fontlist.append(prop)
return fontlist
class FontProperties(object):
"""
A class for storing and manipulating font properties.
The font properties are those described in the `W3C Cascading
Style Sheet, Level 1
<http://www.w3.org/TR/1998/REC-CSS2-19980512/>`_ font
specification. The six properties are:
- family: A list of font names in decreasing order of priority.
The items may include a generic font family name, either
'serif', 'sans-serif', 'cursive', 'fantasy', or 'monospace'.
In that case, the actual font to be used will be looked up
from the associated rcParam in :file:`matplotlibrc`.
- style: Either 'normal', 'italic' or 'oblique'.
- variant: Either 'normal' or 'small-caps'.
- stretch: A numeric value in the range 0-1000 or one of
'ultra-condensed', 'extra-condensed', 'condensed',
'semi-condensed', 'normal', 'semi-expanded', 'expanded',
'extra-expanded' or 'ultra-expanded'
- weight: A numeric value in the range 0-1000 or one of
'ultralight', 'light', 'normal', 'regular', 'book', 'medium',
'roman', 'semibold', 'demibold', 'demi', 'bold', 'heavy',
'extra bold', 'black'
- size: Either an relative value of 'xx-small', 'x-small',
'small', 'medium', 'large', 'x-large', 'xx-large' or an
absolute font size, e.g., 12
The default font property for TrueType fonts (as specified in the
default :file:`matplotlibrc` file) is::
sans-serif, normal, normal, normal, normal, scalable.
Alternatively, a font may be specified using an absolute path to a
.ttf file, by using the *fname* kwarg.
The preferred usage of font sizes is to use the relative values,
e.g., 'large', instead of absolute font sizes, e.g., 12. This
approach allows all text sizes to be made larger or smaller based
on the font manager's default font size.
This class will also accept a `fontconfig
<https://www.freedesktop.org/wiki/Software/fontconfig/>`_ pattern, if it is
the only argument provided. See the documentation on `fontconfig patterns
<https://www.freedesktop.org/software/fontconfig/fontconfig-user.html>`_.
This support does not require fontconfig to be installed. We are merely
borrowing its pattern syntax for use here.
Note that matplotlib's internal font manager and fontconfig use a
different algorithm to lookup fonts, so the results of the same pattern
may be different in matplotlib than in other applications that use
fontconfig.
"""
def __init__(self,
family = None,
style = None,
variant= None,
weight = None,
stretch= None,
size = None,
fname = None, # if this is set, it's a hardcoded filename to use
_init = None # used only by copy()
):
self._family = _normalize_font_family(rcParams['font.family'])
self._slant = rcParams['font.style']
self._variant = rcParams['font.variant']
self._weight = rcParams['font.weight']
self._stretch = rcParams['font.stretch']
self._size = rcParams['font.size']
self._file = None
# This is used only by copy()
if _init is not None:
self.__dict__.update(_init.__dict__)
return
if isinstance(family, six.string_types):
# Treat family as a fontconfig pattern if it is the only
# parameter provided.
if (style is None and
variant is None and
weight is None and
stretch is None and
size is None and
fname is None):
self.set_fontconfig_pattern(family)
return
self.set_family(family)
self.set_style(style)
self.set_variant(variant)
self.set_weight(weight)
self.set_stretch(stretch)
self.set_file(fname)
self.set_size(size)
def _parse_fontconfig_pattern(self, pattern):
return parse_fontconfig_pattern(pattern)
def __hash__(self):
l = (tuple(self.get_family()),
self.get_slant(),
self.get_variant(),
self.get_weight(),
self.get_stretch(),
self.get_size_in_points(),
self.get_file())
return hash(l)
def __eq__(self, other):
return hash(self) == hash(other)
def __ne__(self, other):
return hash(self) != hash(other)
def __str__(self):
return self.get_fontconfig_pattern()
def get_family(self):
"""
Return a list of font names that comprise the font family.
"""
return self._family
def get_name(self):
"""
Return the name of the font that best matches the font
properties.
"""
return get_font(findfont(self)).family_name
def get_style(self):
"""
Return the font style. Values are: 'normal', 'italic' or
'oblique'.
"""
return self._slant
get_slant = get_style
def get_variant(self):
"""
Return the font variant. Values are: 'normal' or
'small-caps'.
"""
return self._variant
def get_weight(self):
"""
Set the font weight. Options are: A numeric value in the
range 0-1000 or one of 'light', 'normal', 'regular', 'book',
'medium', 'roman', 'semibold', 'demibold', 'demi', 'bold',
'heavy', 'extra bold', 'black'
"""
return self._weight
def get_stretch(self):
"""
Return the font stretch or width. Options are: 'ultra-condensed',
'extra-condensed', 'condensed', 'semi-condensed', 'normal',
'semi-expanded', 'expanded', 'extra-expanded', 'ultra-expanded'.
"""
return self._stretch
def get_size(self):
"""
Return the font size.
"""
return self._size
def get_size_in_points(self):
return self._size
def get_file(self):
"""
Return the filename of the associated font.
"""
return self._file
def get_fontconfig_pattern(self):
"""
Get a fontconfig pattern suitable for looking up the font as
specified with fontconfig's ``fc-match`` utility.
See the documentation on `fontconfig patterns
<https://www.freedesktop.org/software/fontconfig/fontconfig-user.html>`_.
This support does not require fontconfig to be installed or
support for it to be enabled. We are merely borrowing its
pattern syntax for use here.
"""
return generate_fontconfig_pattern(self)
def set_family(self, family):
"""
Change the font family. May be either an alias (generic name
is CSS parlance), such as: 'serif', 'sans-serif', 'cursive',
'fantasy', or 'monospace', a real font name or a list of real
font names. Real font names are not supported when
`text.usetex` is `True`.
"""
if family is None:
family = rcParams['font.family']
self._family = _normalize_font_family(family)
set_name = set_family
def set_style(self, style):
"""
Set the font style. Values are: 'normal', 'italic' or
'oblique'.
"""
if style is None:
style = rcParams['font.style']
if style not in ('normal', 'italic', 'oblique'):
raise ValueError("style must be normal, italic or oblique")
self._slant = style
set_slant = set_style
def set_variant(self, variant):
"""
Set the font variant. Values are: 'normal' or 'small-caps'.
"""
if variant is None:
variant = rcParams['font.variant']
if variant not in ('normal', 'small-caps'):
raise ValueError("variant must be normal or small-caps")
self._variant = variant
def set_weight(self, weight):
"""
Set the font weight. May be either a numeric value in the
range 0-1000 or one of 'ultralight', 'light', 'normal',
'regular', 'book', 'medium', 'roman', 'semibold', 'demibold',
'demi', 'bold', 'heavy', 'extra bold', 'black'
"""
if weight is None:
weight = rcParams['font.weight']
try:
weight = int(weight)
if weight < 0 or weight > 1000:
raise ValueError()
except ValueError:
if weight not in weight_dict:
raise ValueError("weight is invalid")
self._weight = weight
def set_stretch(self, stretch):
"""
Set the font stretch or width. Options are: 'ultra-condensed',
'extra-condensed', 'condensed', 'semi-condensed', 'normal',
'semi-expanded', 'expanded', 'extra-expanded' or
'ultra-expanded', or a numeric value in the range 0-1000.
"""
if stretch is None:
stretch = rcParams['font.stretch']
try:
stretch = int(stretch)
if stretch < 0 or stretch > 1000:
raise ValueError()
except ValueError:
if stretch not in stretch_dict:
raise ValueError("stretch is invalid")
self._stretch = stretch
def set_size(self, size):
"""
Set the font size. Either an relative value of 'xx-small',
'x-small', 'small', 'medium', 'large', 'x-large', 'xx-large'
or an absolute font size, e.g., 12.
"""
if size is None:
size = rcParams['font.size']
try:
size = float(size)
except ValueError:
try:
scale = font_scalings[size]
except KeyError:
raise ValueError(
"Size is invalid. Valid font size are "
+ ", ".join(map(str, font_scalings)))
else:
size = scale * FontManager.get_default_size()
if size < 1.0:
_log.info('Fontsize %1.2f < 1.0 pt not allowed by FreeType. '
'Setting fontsize = 1 pt', size)
size = 1.0
self._size = size
def set_file(self, file):
"""
Set the filename of the fontfile to use. In this case, all
other properties will be ignored.
"""
self._file = file
def set_fontconfig_pattern(self, pattern):
"""
Set the properties by parsing a fontconfig *pattern*.
See the documentation on `fontconfig patterns
<https://www.freedesktop.org/software/fontconfig/fontconfig-user.html>`_.
This support does not require fontconfig to be installed or
support for it to be enabled. We are merely borrowing its
pattern syntax for use here.
"""
for key, val in six.iteritems(self._parse_fontconfig_pattern(pattern)):
if type(val) == list:
getattr(self, "set_" + key)(val[0])
else:
getattr(self, "set_" + key)(val)
def copy(self):
"""Return a deep copy of self"""
return FontProperties(_init=self)
@cbook.deprecated("2.1")
def ttfdict_to_fnames(d):
"""
flatten a ttfdict to all the filenames it contains
"""
fnames = []
for named in six.itervalues(d):
for styled in six.itervalues(named):
for variantd in six.itervalues(styled):
for weightd in six.itervalues(variantd):
for stretchd in six.itervalues(weightd):
for fname in six.itervalues(stretchd):
fnames.append(fname)
return fnames
class JSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, FontManager):
return dict(o.__dict__, _class='FontManager')
elif isinstance(o, FontEntry):
return dict(o.__dict__, _class='FontEntry')
else:
return super(JSONEncoder, self).default(o)
def _json_decode(o):
cls = o.pop('_class', None)
if cls is None:
return o
elif cls == 'FontManager':
r = FontManager.__new__(FontManager)
r.__dict__.update(o)
return r
elif cls == 'FontEntry':
r = FontEntry.__new__(FontEntry)
r.__dict__.update(o)
return r
else:
raise ValueError("don't know how to deserialize _class=%s" % cls)
def json_dump(data, filename):
"""Dumps a data structure as JSON in the named file.
Handles FontManager and its fields."""
with open(filename, 'w') as fh:
try:
json.dump(data, fh, cls=JSONEncoder, indent=2)
except IOError as e:
warnings.warn('Could not save font_manager cache ', e)
def json_load(filename):
"""Loads a data structure as JSON from the named file.
Handles FontManager and its fields."""
with open(filename, 'r') as fh:
return json.load(fh, object_hook=_json_decode)
def _normalize_font_family(family):
if isinstance(family, six.string_types):
family = [six.text_type(family)]
elif isinstance(family, Iterable):
family = [six.text_type(f) for f in family]
return family
class TempCache(object):
"""
A class to store temporary caches that are (a) not saved to disk
and (b) invalidated whenever certain font-related
rcParams---namely the family lookup lists---are changed or the
font cache is reloaded. This avoids the expensive linear search
through all fonts every time a font is looked up.
"""
# A list of rcparam names that, when changed, invalidated this
# cache.
invalidating_rcparams = (
'font.serif', 'font.sans-serif', 'font.cursive', 'font.fantasy',
'font.monospace')
def __init__(self):
self._lookup_cache = {}
self._last_rcParams = self.make_rcparams_key()
def make_rcparams_key(self):
return [id(fontManager)] + [
rcParams[param] for param in self.invalidating_rcparams]
def get(self, prop):
key = self.make_rcparams_key()
if key != self._last_rcParams:
self._lookup_cache = {}
self._last_rcParams = key
return self._lookup_cache.get(prop)
def set(self, prop, value):
key = self.make_rcparams_key()
if key != self._last_rcParams:
self._lookup_cache = {}
self._last_rcParams = key
self._lookup_cache[prop] = value
class FontManager(object):
"""
On import, the :class:`FontManager` singleton instance creates a
list of TrueType fonts based on the font properties: name, style,
variant, weight, stretch, and size. The :meth:`findfont` method
does a nearest neighbor search to find the font that most closely
matches the specification. If no good enough match is found, a
default font is returned.
"""
# Increment this version number whenever the font cache data
# format or behavior has changed and requires a existing font
# cache files to be rebuilt.
__version__ = 201
def __init__(self, size=None, weight='normal'):
self._version = self.__version__
self.__default_weight = weight
self.default_size = size
paths = [os.path.join(rcParams['datapath'], 'fonts', 'ttf'),
os.path.join(rcParams['datapath'], 'fonts', 'afm'),
os.path.join(rcParams['datapath'], 'fonts', 'pdfcorefonts')]
# Create list of font paths
for pathname in ['TTFPATH', 'AFMPATH']:
if pathname in os.environ:
ttfpath = os.environ[pathname]
if ttfpath.find(';') >= 0: #win32 style
paths.extend(ttfpath.split(';'))
elif ttfpath.find(':') >= 0: # unix style
paths.extend(ttfpath.split(':'))
else:
paths.append(ttfpath)
_log.info('font search path %s', str(paths))
# Load TrueType fonts and create font dictionary.
self.ttffiles = findSystemFonts(paths) + findSystemFonts()
self.defaultFamily = {
'ttf': 'DejaVu Sans',
'afm': 'Helvetica'}
self.defaultFont = {}
for fname in self.ttffiles:
_log.debug('trying fontname %s', fname)
if fname.lower().find('DejaVuSans.ttf')>=0:
self.defaultFont['ttf'] = fname
break
else:
# use anything
self.defaultFont['ttf'] = self.ttffiles[0]
self.ttflist = createFontList(self.ttffiles)
self.afmfiles = (findSystemFonts(paths, fontext='afm')
+ findSystemFonts(fontext='afm'))
self.afmlist = createFontList(self.afmfiles, fontext='afm')
if len(self.afmfiles):
self.defaultFont['afm'] = self.afmfiles[0]
else:
self.defaultFont['afm'] = None
def get_default_weight(self):
"""
Return the default font weight.
"""
return self.__default_weight
@staticmethod
def get_default_size():
"""
Return the default font size.
"""
return rcParams['font.size']
def set_default_weight(self, weight):
"""
Set the default font weight. The initial value is 'normal'.
"""
self.__default_weight = weight
def update_fonts(self, filenames):
"""
Update the font dictionary with new font files.
Currently not implemented.
"""
# !!!! Needs implementing
raise NotImplementedError
# Each of the scoring functions below should return a value between
# 0.0 (perfect match) and 1.0 (terrible match)
def score_family(self, families, family2):
"""
Returns a match score between the list of font families in
*families* and the font family name *family2*.
An exact match at the head of the list returns 0.0.
A match further down the list will return between 0 and 1.
No match will return 1.0.
"""
if not isinstance(families, (list, tuple)):
families = [families]
elif len(families) == 0:
return 1.0
family2 = family2.lower()
step = 1 / len(families)
for i, family1 in enumerate(families):
family1 = family1.lower()
if family1 in font_family_aliases:
if family1 in ('sans', 'sans serif'):
family1 = 'sans-serif'
options = rcParams['font.' + family1]
options = [x.lower() for x in options]
if family2 in options:
idx = options.index(family2)
return (i + (idx / len(options))) * step
elif family1 == family2:
# The score should be weighted by where in the
# list the font was found.
return i * step
return 1.0
def score_style(self, style1, style2):
"""
Returns a match score between *style1* and *style2*.
An exact match returns 0.0.
A match between 'italic' and 'oblique' returns 0.1.
No match returns 1.0.
"""
if style1 == style2:
return 0.0
elif style1 in ('italic', 'oblique') and \
style2 in ('italic', 'oblique'):
return 0.1
return 1.0
def score_variant(self, variant1, variant2):
"""
Returns a match score between *variant1* and *variant2*.
An exact match returns 0.0, otherwise 1.0.
"""
if variant1 == variant2:
return 0.0
else:
return 1.0
def score_stretch(self, stretch1, stretch2):
"""
Returns a match score between *stretch1* and *stretch2*.
The result is the absolute value of the difference between the
CSS numeric values of *stretch1* and *stretch2*, normalized
between 0.0 and 1.0.
"""
try:
stretchval1 = int(stretch1)
except ValueError:
stretchval1 = stretch_dict.get(stretch1, 500)
try:
stretchval2 = int(stretch2)
except ValueError:
stretchval2 = stretch_dict.get(stretch2, 500)
return abs(stretchval1 - stretchval2) / 1000.0
def score_weight(self, weight1, weight2):
"""
Returns a match score between *weight1* and *weight2*.
The result is 0.0 if both weight1 and weight 2 are given as strings
and have the same value.
Otherwise, the result is the absolute value of the difference between the
CSS numeric values of *weight1* and *weight2*, normalized
between 0.05 and 1.0.
"""
# exact match of the weight names (e.g. weight1 == weight2 == "regular")
if (isinstance(weight1, six.string_types) and
isinstance(weight2, six.string_types) and
weight1 == weight2):
return 0.0
try:
weightval1 = int(weight1)
except ValueError:
weightval1 = weight_dict.get(weight1, 500)
try:
weightval2 = int(weight2)
except ValueError:
weightval2 = weight_dict.get(weight2, 500)
return 0.95*(abs(weightval1 - weightval2) / 1000.0) + 0.05
def score_size(self, size1, size2):
"""
Returns a match score between *size1* and *size2*.
If *size2* (the size specified in the font file) is 'scalable', this
function always returns 0.0, since any font size can be generated.
Otherwise, the result is the absolute distance between *size1* and
*size2*, normalized so that the usual range of font sizes (6pt -
72pt) will lie between 0.0 and 1.0.
"""
if size2 == 'scalable':
return 0.0
# Size value should have already been
try:
sizeval1 = float(size1)
except ValueError:
sizeval1 = self.default_size * font_scalings[size1]
try:
sizeval2 = float(size2)
except ValueError:
return 1.0
return abs(sizeval1 - sizeval2) / 72.0
def findfont(self, prop, fontext='ttf', directory=None,
fallback_to_default=True, rebuild_if_missing=True):
"""
Search the font list for the font that most closely matches
the :class:`FontProperties` *prop*.
:meth:`findfont` performs a nearest neighbor search. Each
font is given a similarity score to the target font
properties. The first font with the highest score is
returned. If no matches below a certain threshold are found,
the default font (usually DejaVu Sans) is returned.
`directory`, is specified, will only return fonts from the
given directory (or subdirectory of that directory).
The result is cached, so subsequent lookups don't have to
perform the O(n) nearest neighbor search.
If `fallback_to_default` is True, will fallback to the default
font family (usually "DejaVu Sans" or "Helvetica") if
the first lookup hard-fails.
See the `W3C Cascading Style Sheet, Level 1
<http://www.w3.org/TR/1998/REC-CSS2-19980512/>`_ documentation
for a description of the font finding algorithm.
"""
if not isinstance(prop, FontProperties):
prop = FontProperties(prop)
fname = prop.get_file()
if fname is not None:
_log.debug('findfont returning %s', fname)
return fname
if fontext == 'afm':
fontlist = self.afmlist
else:
fontlist = self.ttflist
if directory is None:
cached = _lookup_cache[fontext].get(prop)
if cached is not None:
return cached
else:
directory = os.path.normcase(directory)
best_score = 1e64
best_font = None
for font in fontlist:
if (directory is not None and
os.path.commonprefix([os.path.normcase(font.fname),
directory]) != directory):
continue
# Matching family should have highest priority, so it is multiplied
# by 10.0
score = \
self.score_family(prop.get_family(), font.name) * 10.0 + \
self.score_style(prop.get_style(), font.style) + \
self.score_variant(prop.get_variant(), font.variant) + \
self.score_weight(prop.get_weight(), font.weight) + \
self.score_stretch(prop.get_stretch(), font.stretch) + \
self.score_size(prop.get_size(), font.size)
if score < best_score:
best_score = score
best_font = font
if score == 0:
break
if best_font is None or best_score >= 10.0:
if fallback_to_default:
warnings.warn(
'findfont: Font family %s not found. Falling back to %s' %
(prop.get_family(), self.defaultFamily[fontext]))
default_prop = prop.copy()
default_prop.set_family(self.defaultFamily[fontext])
return self.findfont(default_prop, fontext, directory, False)
else:
# This is a hard fail -- we can't find anything reasonable,
# so just return the DejuVuSans.ttf
warnings.warn(
'findfont: Could not match %s. Returning %s' %
(prop, self.defaultFont[fontext]),
UserWarning)
result = self.defaultFont[fontext]
else:
_log.debug(
'findfont: Matching %s to %s (%s) with score of %f' %
(prop, best_font.name, repr(best_font.fname), best_score))
result = best_font.fname
if not os.path.isfile(result):
if rebuild_if_missing:
_log.info(
'findfont: Found a missing font file. Rebuilding cache.')
_rebuild()
return fontManager.findfont(
prop, fontext, directory, True, False)
else:
raise ValueError("No valid font could be found")
if directory is None:
_lookup_cache[fontext].set(prop, result)
return result
_is_opentype_cff_font_cache = {}
def is_opentype_cff_font(filename):
"""
Returns True if the given font is a Postscript Compact Font Format
Font embedded in an OpenType wrapper. Used by the PostScript and
PDF backends that can not subset these fonts.
"""
if os.path.splitext(filename)[1].lower() == '.otf':
result = _is_opentype_cff_font_cache.get(filename)
if result is None:
with open(filename, 'rb') as fd:
tag = fd.read(4)
result = (tag == b'OTTO')
_is_opentype_cff_font_cache[filename] = result
return result
return False
fontManager = None
_fmcache = None
_get_font = lru_cache(64)(ft2font.FT2Font)
def get_font(filename, hinting_factor=None):
if hinting_factor is None:
hinting_factor = rcParams['text.hinting_factor']
return _get_font(filename, hinting_factor)
# The experimental fontconfig-based backend.
if USE_FONTCONFIG and sys.platform != 'win32':
def fc_match(pattern, fontext):
fontexts = get_fontext_synonyms(fontext)
ext = "." + fontext
try:
pipe = subprocess.Popen(
['fc-match', '-s', '--format=%{file}\\n', pattern],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output = pipe.communicate()[0]
except (OSError, IOError):
return None
# The bulk of the output from fc-list is ascii, so we keep the
# result in bytes and parse it as bytes, until we extract the
# filename, which is in sys.filesystemencoding().
if pipe.returncode == 0:
for fname in output.split(b'\n'):
try:
fname = six.text_type(fname, sys.getfilesystemencoding())
except UnicodeDecodeError:
continue
if os.path.splitext(fname)[1][1:] in fontexts:
return fname
return None
_fc_match_cache = {}
def findfont(prop, fontext='ttf'):
if not isinstance(prop, six.string_types):
prop = prop.get_fontconfig_pattern()
cached = _fc_match_cache.get(prop)
if cached is not None:
return cached
result = fc_match(prop, fontext)
if result is None:
result = fc_match(':', fontext)
_fc_match_cache[prop] = result
return result
else:
_fmcache = None
cachedir = get_cachedir()
if cachedir is not None:
_fmcache = os.path.join(cachedir, 'fontList.json')
fontManager = None
_lookup_cache = {
'ttf': TempCache(),
'afm': TempCache()
}
def _rebuild():
global fontManager
fontManager = FontManager()
if _fmcache:
with cbook.Locked(cachedir):
json_dump(fontManager, _fmcache)
_log.info("generated new fontManager")
if _fmcache:
try:
fontManager = json_load(_fmcache)
if (not hasattr(fontManager, '_version') or
fontManager._version != FontManager.__version__):
_rebuild()
else:
fontManager.default_size = None
_log.debug("Using fontManager instance from %s", _fmcache)
except cbook.Locked.TimeoutError:
raise
except:
_rebuild()
else:
_rebuild()
def findfont(prop, **kw):
global fontManager
font = fontManager.findfont(prop, **kw)
return font
| 48,332 | 31.723764 | 105 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/path.py
|
r"""
A module for dealing with the polylines used throughout Matplotlib.
The primary class for polyline handling in Matplotlib is `Path`. Almost all
vector drawing makes use of `Path`\s somewhere in the drawing pipeline.
Whilst a `Path` instance itself cannot be drawn, some `.Artist` subclasses,
such as `.PathPatch` and `.PathCollection`, can be used for convenient `Path`
visualisation.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from weakref import WeakValueDictionary
import numpy as np
from . import _path, rcParams
from .cbook import (_to_unmasked_float_array, simple_linear_interpolation,
maxdict)
class Path(object):
"""
:class:`Path` represents a series of possibly disconnected,
possibly closed, line and curve segments.
The underlying storage is made up of two parallel numpy arrays:
- *vertices*: an Nx2 float array of vertices
- *codes*: an N-length uint8 array of vertex types
These two arrays always have the same length in the first
dimension. For example, to represent a cubic curve, you must
provide three vertices as well as three codes ``CURVE3``.
The code types are:
- ``STOP`` : 1 vertex (ignored)
A marker for the end of the entire path (currently not
required and ignored)
- ``MOVETO`` : 1 vertex
Pick up the pen and move to the given vertex.
- ``LINETO`` : 1 vertex
Draw a line from the current position to the given vertex.
- ``CURVE3`` : 1 control point, 1 endpoint
Draw a quadratic Bezier curve from the current position,
with the given control point, to the given end point.
- ``CURVE4`` : 2 control points, 1 endpoint
Draw a cubic Bezier curve from the current position, with
the given control points, to the given end point.
- ``CLOSEPOLY`` : 1 vertex (ignored)
Draw a line segment to the start point of the current
polyline.
Users of Path objects should not access the vertices and codes
arrays directly. Instead, they should use :meth:`iter_segments`
or :meth:`cleaned` to get the vertex/code pairs. This is important,
since many :class:`Path` objects, as an optimization, do not store a
*codes* at all, but have a default one provided for them by
:meth:`iter_segments`.
Some behavior of Path objects can be controlled by rcParams. See
the rcParams whose keys contain 'path.'.
.. note::
The vertices and codes arrays should be treated as
immutable -- there are a number of optimizations and assumptions
made up front in the constructor that will not change when the
data changes.
"""
# Path codes
STOP = 0 # 1 vertex
MOVETO = 1 # 1 vertex
LINETO = 2 # 1 vertex
CURVE3 = 3 # 2 vertices
CURVE4 = 4 # 3 vertices
CLOSEPOLY = 79 # 1 vertex
#: A dictionary mapping Path codes to the number of vertices that the
#: code expects.
NUM_VERTICES_FOR_CODE = {STOP: 1,
MOVETO: 1,
LINETO: 1,
CURVE3: 2,
CURVE4: 3,
CLOSEPOLY: 1}
code_type = np.uint8
def __init__(self, vertices, codes=None, _interpolation_steps=1,
closed=False, readonly=False):
"""
Create a new path with the given vertices and codes.
Parameters
----------
vertices : array_like
The ``(n, 2)`` float array, masked array or sequence of pairs
representing the vertices of the path.
If *vertices* contains masked values, they will be converted
to NaNs which are then handled correctly by the Agg
PathIterator and other consumers of path data, such as
:meth:`iter_segments`.
codes : {None, array_like}, optional
n-length array integers representing the codes of the path.
If not None, codes must be the same length as vertices.
If None, *vertices* will be treated as a series of line segments.
_interpolation_steps : int, optional
Used as a hint to certain projections, such as Polar, that this
path should be linearly interpolated immediately before drawing.
This attribute is primarily an implementation detail and is not
intended for public use.
closed : bool, optional
If *codes* is None and closed is True, vertices will be treated as
line segments of a closed polygon.
readonly : bool, optional
Makes the path behave in an immutable way and sets the vertices
and codes as read-only arrays.
"""
vertices = _to_unmasked_float_array(vertices)
if (vertices.ndim != 2) or (vertices.shape[1] != 2):
raise ValueError(
"'vertices' must be a 2D list or array with shape Nx2")
if codes is not None:
codes = np.asarray(codes, self.code_type)
if (codes.ndim != 1) or len(codes) != len(vertices):
raise ValueError("'codes' must be a 1D list or array with the "
"same length of 'vertices'")
if len(codes) and codes[0] != self.MOVETO:
raise ValueError("The first element of 'code' must be equal "
"to 'MOVETO' ({})".format(self.MOVETO))
elif closed:
codes = np.empty(len(vertices), dtype=self.code_type)
codes[0] = self.MOVETO
codes[1:-1] = self.LINETO
codes[-1] = self.CLOSEPOLY
self._vertices = vertices
self._codes = codes
self._interpolation_steps = _interpolation_steps
self._update_values()
if readonly:
self._vertices.flags.writeable = False
if self._codes is not None:
self._codes.flags.writeable = False
self._readonly = True
else:
self._readonly = False
@classmethod
def _fast_from_codes_and_verts(cls, verts, codes, internals=None):
"""
Creates a Path instance without the expense of calling the constructor
Parameters
----------
verts : numpy array
codes : numpy array
internals : dict or None
The attributes that the resulting path should have.
Allowed keys are ``readonly``, ``should_simplify``,
``simplify_threshold``, ``has_nonfinite`` and
``interpolation_steps``.
"""
internals = internals or {}
pth = cls.__new__(cls)
pth._vertices = _to_unmasked_float_array(verts)
pth._codes = codes
pth._readonly = internals.pop('readonly', False)
pth.should_simplify = internals.pop('should_simplify', True)
pth.simplify_threshold = (
internals.pop('simplify_threshold',
rcParams['path.simplify_threshold'])
)
pth._has_nonfinite = internals.pop('has_nonfinite', False)
pth._interpolation_steps = internals.pop('interpolation_steps', 1)
if internals:
raise ValueError('Unexpected internals provided to '
'_fast_from_codes_and_verts: '
'{0}'.format('\n *'.join(internals)))
return pth
def _update_values(self):
self._simplify_threshold = rcParams['path.simplify_threshold']
self._should_simplify = (
self._simplify_threshold > 0 and
rcParams['path.simplify'] and
len(self._vertices) >= 128 and
(self._codes is None or np.all(self._codes <= Path.LINETO))
)
self._has_nonfinite = not np.isfinite(self._vertices).all()
@property
def vertices(self):
"""
The list of vertices in the `Path` as an Nx2 numpy array.
"""
return self._vertices
@vertices.setter
def vertices(self, vertices):
if self._readonly:
raise AttributeError("Can't set vertices on a readonly Path")
self._vertices = vertices
self._update_values()
@property
def codes(self):
"""
The list of codes in the `Path` as a 1-D numpy array. Each
code is one of `STOP`, `MOVETO`, `LINETO`, `CURVE3`, `CURVE4`
or `CLOSEPOLY`. For codes that correspond to more than one
vertex (`CURVE3` and `CURVE4`), that code will be repeated so
that the length of `self.vertices` and `self.codes` is always
the same.
"""
return self._codes
@codes.setter
def codes(self, codes):
if self._readonly:
raise AttributeError("Can't set codes on a readonly Path")
self._codes = codes
self._update_values()
@property
def simplify_threshold(self):
"""
The fraction of a pixel difference below which vertices will
be simplified out.
"""
return self._simplify_threshold
@simplify_threshold.setter
def simplify_threshold(self, threshold):
self._simplify_threshold = threshold
@property
def has_nonfinite(self):
"""
`True` if the vertices array has nonfinite values.
"""
return self._has_nonfinite
@property
def should_simplify(self):
"""
`True` if the vertices array should be simplified.
"""
return self._should_simplify
@should_simplify.setter
def should_simplify(self, should_simplify):
self._should_simplify = should_simplify
@property
def readonly(self):
"""
`True` if the `Path` is read-only.
"""
return self._readonly
def __copy__(self):
"""
Returns a shallow copy of the `Path`, which will share the
vertices and codes with the source `Path`.
"""
import copy
return copy.copy(self)
copy = __copy__
def __deepcopy__(self, memo=None):
"""
Returns a deepcopy of the `Path`. The `Path` will not be
readonly, even if the source `Path` is.
"""
try:
codes = self.codes.copy()
except AttributeError:
codes = None
return self.__class__(
self.vertices.copy(), codes,
_interpolation_steps=self._interpolation_steps)
deepcopy = __deepcopy__
@classmethod
def make_compound_path_from_polys(cls, XY):
"""
Make a compound path object to draw a number
of polygons with equal numbers of sides XY is a (numpolys x
numsides x 2) numpy array of vertices. Return object is a
:class:`Path`
.. plot:: gallery/api/histogram_path.py
"""
# for each poly: 1 for the MOVETO, (numsides-1) for the LINETO, 1 for
# the CLOSEPOLY; the vert for the closepoly is ignored but we still
# need it to keep the codes aligned with the vertices
numpolys, numsides, two = XY.shape
if two != 2:
raise ValueError("The third dimension of 'XY' must be 2")
stride = numsides + 1
nverts = numpolys * stride
verts = np.zeros((nverts, 2))
codes = np.ones(nverts, int) * cls.LINETO
codes[0::stride] = cls.MOVETO
codes[numsides::stride] = cls.CLOSEPOLY
for i in range(numsides):
verts[i::stride] = XY[:, i]
return cls(verts, codes)
@classmethod
def make_compound_path(cls, *args):
"""Make a compound path from a list of Path objects."""
# Handle an empty list in args (i.e. no args).
if not args:
return Path(np.empty([0, 2], dtype=np.float32))
lengths = [len(x) for x in args]
total_length = sum(lengths)
vertices = np.vstack([x.vertices for x in args])
vertices.reshape((total_length, 2))
codes = np.empty(total_length, dtype=cls.code_type)
i = 0
for path in args:
if path.codes is None:
codes[i] = cls.MOVETO
codes[i + 1:i + len(path.vertices)] = cls.LINETO
else:
codes[i:i + len(path.codes)] = path.codes
i += len(path.vertices)
return cls(vertices, codes)
def __repr__(self):
return "Path(%r, %r)" % (self.vertices, self.codes)
def __len__(self):
return len(self.vertices)
def iter_segments(self, transform=None, remove_nans=True, clip=None,
snap=False, stroke_width=1.0, simplify=None,
curves=True, sketch=None):
"""
Iterates over all of the curve segments in the path. Each
iteration returns a 2-tuple (*vertices*, *code*), where
*vertices* is a sequence of 1 - 3 coordinate pairs, and *code* is
one of the :class:`Path` codes.
Additionally, this method can provide a number of standard
cleanups and conversions to the path.
Parameters
----------
transform : None or :class:`~matplotlib.transforms.Transform` instance
If not None, the given affine transformation will
be applied to the path.
remove_nans : {False, True}, optional
If True, will remove all NaNs from the path and
insert MOVETO commands to skip over them.
clip : None or sequence, optional
If not None, must be a four-tuple (x1, y1, x2, y2)
defining a rectangle in which to clip the path.
snap : None or bool, optional
If None, auto-snap to pixels, to reduce
fuzziness of rectilinear lines. If True, force snapping, and
if False, don't snap.
stroke_width : float, optional
The width of the stroke being drawn. Needed
as a hint for the snapping algorithm.
simplify : None or bool, optional
If True, perform simplification, to remove
vertices that do not affect the appearance of the path. If
False, perform no simplification. If None, use the
should_simplify member variable. See also the rcParams
path.simplify and path.simplify_threshold.
curves : {True, False}, optional
If True, curve segments will be returned as curve
segments. If False, all curves will be converted to line
segments.
sketch : None or sequence, optional
If not None, must be a 3-tuple of the form
(scale, length, randomness), representing the sketch
parameters.
"""
if not len(self):
return
cleaned = self.cleaned(transform=transform,
remove_nans=remove_nans, clip=clip,
snap=snap, stroke_width=stroke_width,
simplify=simplify, curves=curves,
sketch=sketch)
vertices = cleaned.vertices
codes = cleaned.codes
len_vertices = vertices.shape[0]
# Cache these object lookups for performance in the loop.
NUM_VERTICES_FOR_CODE = self.NUM_VERTICES_FOR_CODE
STOP = self.STOP
i = 0
while i < len_vertices:
code = codes[i]
if code == STOP:
return
else:
num_vertices = NUM_VERTICES_FOR_CODE[code]
curr_vertices = vertices[i:i+num_vertices].flatten()
yield curr_vertices, code
i += num_vertices
def cleaned(self, transform=None, remove_nans=False, clip=None,
quantize=False, simplify=False, curves=False,
stroke_width=1.0, snap=False, sketch=None):
"""
Cleans up the path according to the parameters returning a new
Path instance.
.. seealso::
See :meth:`iter_segments` for details of the keyword arguments.
Returns
-------
Path instance with cleaned up vertices and codes.
"""
vertices, codes = _path.cleanup_path(self, transform,
remove_nans, clip,
snap, stroke_width,
simplify, curves, sketch)
internals = {'should_simplify': self.should_simplify and not simplify,
'has_nonfinite': self.has_nonfinite and not remove_nans,
'simplify_threshold': self.simplify_threshold,
'interpolation_steps': self._interpolation_steps}
return Path._fast_from_codes_and_verts(vertices, codes, internals)
def transformed(self, transform):
"""
Return a transformed copy of the path.
.. seealso::
:class:`matplotlib.transforms.TransformedPath`
A specialized path class that will cache the
transformed result and automatically update when the
transform changes.
"""
return Path(transform.transform(self.vertices), self.codes,
self._interpolation_steps)
def contains_point(self, point, transform=None, radius=0.0):
"""
Returns whether the (closed) path contains the given point.
If *transform* is not ``None``, the path will be transformed before
performing the test.
*radius* allows the path to be made slightly larger or smaller.
"""
if transform is not None:
transform = transform.frozen()
# `point_in_path` does not handle nonlinear transforms, so we
# transform the path ourselves. If `transform` is affine, letting
# `point_in_path` handle the transform avoids allocating an extra
# buffer.
if transform and not transform.is_affine:
self = transform.transform_path(self)
transform = None
return _path.point_in_path(point[0], point[1], radius, self, transform)
def contains_points(self, points, transform=None, radius=0.0):
"""
Returns a bool array which is ``True`` if the (closed) path contains
the corresponding point.
If *transform* is not ``None``, the path will be transformed before
performing the test.
*radius* allows the path to be made slightly larger or smaller.
"""
if transform is not None:
transform = transform.frozen()
result = _path.points_in_path(points, radius, self, transform)
return result.astype('bool')
def contains_path(self, path, transform=None):
"""
Returns whether this (closed) path completely contains the given path.
If *transform* is not ``None``, the path will be transformed before
performing the test.
"""
if transform is not None:
transform = transform.frozen()
return _path.path_in_path(self, None, path, transform)
def get_extents(self, transform=None):
"""
Returns the extents (*xmin*, *ymin*, *xmax*, *ymax*) of the
path.
Unlike computing the extents on the *vertices* alone, this
algorithm will take into account the curves and deal with
control points appropriately.
"""
from .transforms import Bbox
path = self
if transform is not None:
transform = transform.frozen()
if not transform.is_affine:
path = self.transformed(transform)
transform = None
return Bbox(_path.get_path_extents(path, transform))
def intersects_path(self, other, filled=True):
"""
Returns *True* if this path intersects another given path.
*filled*, when True, treats the paths as if they were filled.
That is, if one path completely encloses the other,
:meth:`intersects_path` will return True.
"""
return _path.path_intersects_path(self, other, filled)
def intersects_bbox(self, bbox, filled=True):
"""
Returns *True* if this path intersects a given
:class:`~matplotlib.transforms.Bbox`.
*filled*, when True, treats the path as if it was filled.
That is, if the path completely encloses the bounding box,
:meth:`intersects_bbox` will return True.
The bounding box is always considered filled.
"""
return _path.path_intersects_rectangle(self,
bbox.x0, bbox.y0, bbox.x1, bbox.y1, filled)
def interpolated(self, steps):
"""
Returns a new path resampled to length N x steps. Does not
currently handle interpolating curves.
"""
if steps == 1:
return self
vertices = simple_linear_interpolation(self.vertices, steps)
codes = self.codes
if codes is not None:
new_codes = Path.LINETO * np.ones(((len(codes) - 1) * steps + 1, ))
new_codes[0::steps] = codes
else:
new_codes = None
return Path(vertices, new_codes)
def to_polygons(self, transform=None, width=0, height=0, closed_only=True):
"""
Convert this path to a list of polygons or polylines. Each
polygon/polyline is an Nx2 array of vertices. In other words,
each polygon has no ``MOVETO`` instructions or curves. This
is useful for displaying in backends that do not support
compound paths or Bezier curves, such as GDK.
If *width* and *height* are both non-zero then the lines will
be simplified so that vertices outside of (0, 0), (width,
height) will be clipped.
If *closed_only* is `True` (default), only closed polygons,
with the last point being the same as the first point, will be
returned. Any unclosed polylines in the path will be
explicitly closed. If *closed_only* is `False`, any unclosed
polygons in the path will be returned as unclosed polygons,
and the closed polygons will be returned explicitly closed by
setting the last point to the same as the first point.
"""
if len(self.vertices) == 0:
return []
if transform is not None:
transform = transform.frozen()
if self.codes is None and (width == 0 or height == 0):
vertices = self.vertices
if closed_only:
if len(vertices) < 3:
return []
elif np.any(vertices[0] != vertices[-1]):
vertices = list(vertices) + [vertices[0]]
if transform is None:
return [vertices]
else:
return [transform.transform(vertices)]
# Deal with the case where there are curves and/or multiple
# subpaths (using extension code)
return _path.convert_path_to_polygons(
self, transform, width, height, closed_only)
_unit_rectangle = None
@classmethod
def unit_rectangle(cls):
"""
Return a :class:`Path` instance of the unit rectangle
from (0, 0) to (1, 1).
"""
if cls._unit_rectangle is None:
cls._unit_rectangle = \
cls([[0.0, 0.0], [1.0, 0.0], [1.0, 1.0], [0.0, 1.0],
[0.0, 0.0]],
[cls.MOVETO, cls.LINETO, cls.LINETO, cls.LINETO,
cls.CLOSEPOLY],
readonly=True)
return cls._unit_rectangle
_unit_regular_polygons = WeakValueDictionary()
@classmethod
def unit_regular_polygon(cls, numVertices):
"""
Return a :class:`Path` instance for a unit regular
polygon with the given *numVertices* and radius of 1.0,
centered at (0, 0).
"""
if numVertices <= 16:
path = cls._unit_regular_polygons.get(numVertices)
else:
path = None
if path is None:
theta = (2*np.pi/numVertices *
np.arange(numVertices + 1).reshape((numVertices + 1, 1)))
# This initial rotation is to make sure the polygon always
# "points-up"
theta += np.pi / 2.0
verts = np.concatenate((np.cos(theta), np.sin(theta)), 1)
codes = np.empty((numVertices + 1,))
codes[0] = cls.MOVETO
codes[1:-1] = cls.LINETO
codes[-1] = cls.CLOSEPOLY
path = cls(verts, codes, readonly=True)
if numVertices <= 16:
cls._unit_regular_polygons[numVertices] = path
return path
_unit_regular_stars = WeakValueDictionary()
@classmethod
def unit_regular_star(cls, numVertices, innerCircle=0.5):
"""
Return a :class:`Path` for a unit regular star
with the given numVertices and radius of 1.0, centered at (0,
0).
"""
if numVertices <= 16:
path = cls._unit_regular_stars.get((numVertices, innerCircle))
else:
path = None
if path is None:
ns2 = numVertices * 2
theta = (2*np.pi/ns2 * np.arange(ns2 + 1))
# This initial rotation is to make sure the polygon always
# "points-up"
theta += np.pi / 2.0
r = np.ones(ns2 + 1)
r[1::2] = innerCircle
verts = np.vstack((r*np.cos(theta), r*np.sin(theta))).transpose()
codes = np.empty((ns2 + 1,))
codes[0] = cls.MOVETO
codes[1:-1] = cls.LINETO
codes[-1] = cls.CLOSEPOLY
path = cls(verts, codes, readonly=True)
if numVertices <= 16:
cls._unit_regular_stars[(numVertices, innerCircle)] = path
return path
@classmethod
def unit_regular_asterisk(cls, numVertices):
"""
Return a :class:`Path` for a unit regular
asterisk with the given numVertices and radius of 1.0,
centered at (0, 0).
"""
return cls.unit_regular_star(numVertices, 0.0)
_unit_circle = None
@classmethod
def unit_circle(cls):
"""
Return the readonly :class:`Path` of the unit circle.
For most cases, :func:`Path.circle` will be what you want.
"""
if cls._unit_circle is None:
cls._unit_circle = cls.circle(center=(0, 0), radius=1,
readonly=True)
return cls._unit_circle
@classmethod
def circle(cls, center=(0., 0.), radius=1., readonly=False):
"""
Return a Path representing a circle of a given radius and center.
Parameters
----------
center : pair of floats
The center of the circle. Default ``(0, 0)``.
radius : float
The radius of the circle. Default is 1.
readonly : bool
Whether the created path should have the "readonly" argument
set when creating the Path instance.
Notes
-----
The circle is approximated using cubic Bezier curves. This
uses 8 splines around the circle using the approach presented
here:
Lancaster, Don. `Approximating a Circle or an Ellipse Using Four
Bezier Cubic Splines <http://www.tinaja.com/glib/ellipse4.pdf>`_.
"""
MAGIC = 0.2652031
SQRTHALF = np.sqrt(0.5)
MAGIC45 = SQRTHALF * MAGIC
vertices = np.array([[0.0, -1.0],
[MAGIC, -1.0],
[SQRTHALF-MAGIC45, -SQRTHALF-MAGIC45],
[SQRTHALF, -SQRTHALF],
[SQRTHALF+MAGIC45, -SQRTHALF+MAGIC45],
[1.0, -MAGIC],
[1.0, 0.0],
[1.0, MAGIC],
[SQRTHALF+MAGIC45, SQRTHALF-MAGIC45],
[SQRTHALF, SQRTHALF],
[SQRTHALF-MAGIC45, SQRTHALF+MAGIC45],
[MAGIC, 1.0],
[0.0, 1.0],
[-MAGIC, 1.0],
[-SQRTHALF+MAGIC45, SQRTHALF+MAGIC45],
[-SQRTHALF, SQRTHALF],
[-SQRTHALF-MAGIC45, SQRTHALF-MAGIC45],
[-1.0, MAGIC],
[-1.0, 0.0],
[-1.0, -MAGIC],
[-SQRTHALF-MAGIC45, -SQRTHALF+MAGIC45],
[-SQRTHALF, -SQRTHALF],
[-SQRTHALF+MAGIC45, -SQRTHALF-MAGIC45],
[-MAGIC, -1.0],
[0.0, -1.0],
[0.0, -1.0]],
dtype=float)
codes = [cls.CURVE4] * 26
codes[0] = cls.MOVETO
codes[-1] = cls.CLOSEPOLY
return Path(vertices * radius + center, codes, readonly=readonly)
_unit_circle_righthalf = None
@classmethod
def unit_circle_righthalf(cls):
"""
Return a :class:`Path` of the right half
of a unit circle. The circle is approximated using cubic Bezier
curves. This uses 4 splines around the circle using the approach
presented here:
Lancaster, Don. `Approximating a Circle or an Ellipse Using Four
Bezier Cubic Splines <http://www.tinaja.com/glib/ellipse4.pdf>`_.
"""
if cls._unit_circle_righthalf is None:
MAGIC = 0.2652031
SQRTHALF = np.sqrt(0.5)
MAGIC45 = SQRTHALF * MAGIC
vertices = np.array(
[[0.0, -1.0],
[MAGIC, -1.0],
[SQRTHALF-MAGIC45, -SQRTHALF-MAGIC45],
[SQRTHALF, -SQRTHALF],
[SQRTHALF+MAGIC45, -SQRTHALF+MAGIC45],
[1.0, -MAGIC],
[1.0, 0.0],
[1.0, MAGIC],
[SQRTHALF+MAGIC45, SQRTHALF-MAGIC45],
[SQRTHALF, SQRTHALF],
[SQRTHALF-MAGIC45, SQRTHALF+MAGIC45],
[MAGIC, 1.0],
[0.0, 1.0],
[0.0, -1.0]],
float)
codes = cls.CURVE4 * np.ones(14)
codes[0] = cls.MOVETO
codes[-1] = cls.CLOSEPOLY
cls._unit_circle_righthalf = cls(vertices, codes, readonly=True)
return cls._unit_circle_righthalf
@classmethod
def arc(cls, theta1, theta2, n=None, is_wedge=False):
"""
Return an arc on the unit circle from angle
*theta1* to angle *theta2* (in degrees).
*theta2* is unwrapped to produce the shortest arc within 360 degrees.
That is, if *theta2* > *theta1* + 360, the arc will be from *theta1* to
*theta2* - 360 and not a full circle plus some extra overlap.
If *n* is provided, it is the number of spline segments to make.
If *n* is not provided, the number of spline segments is
determined based on the delta between *theta1* and *theta2*.
Masionobe, L. 2003. `Drawing an elliptical arc using
polylines, quadratic or cubic Bezier curves
<http://www.spaceroots.org/documents/ellipse/index.html>`_.
"""
halfpi = np.pi * 0.5
eta1 = theta1
eta2 = theta2 - 360 * np.floor((theta2 - theta1) / 360)
# Ensure 2pi range is not flattened to 0 due to floating-point errors,
# but don't try to expand existing 0 range.
if theta2 != theta1 and eta2 <= eta1:
eta2 += 360
eta1, eta2 = np.deg2rad([eta1, eta2])
# number of curve segments to make
if n is None:
n = int(2 ** np.ceil((eta2 - eta1) / halfpi))
if n < 1:
raise ValueError("n must be >= 1 or None")
deta = (eta2 - eta1) / n
t = np.tan(0.5 * deta)
alpha = np.sin(deta) * (np.sqrt(4.0 + 3.0 * t * t) - 1) / 3.0
steps = np.linspace(eta1, eta2, n + 1, True)
cos_eta = np.cos(steps)
sin_eta = np.sin(steps)
xA = cos_eta[:-1]
yA = sin_eta[:-1]
xA_dot = -yA
yA_dot = xA
xB = cos_eta[1:]
yB = sin_eta[1:]
xB_dot = -yB
yB_dot = xB
if is_wedge:
length = n * 3 + 4
vertices = np.zeros((length, 2), float)
codes = cls.CURVE4 * np.ones((length, ), cls.code_type)
vertices[1] = [xA[0], yA[0]]
codes[0:2] = [cls.MOVETO, cls.LINETO]
codes[-2:] = [cls.LINETO, cls.CLOSEPOLY]
vertex_offset = 2
end = length - 2
else:
length = n * 3 + 1
vertices = np.empty((length, 2), float)
codes = cls.CURVE4 * np.ones((length, ), cls.code_type)
vertices[0] = [xA[0], yA[0]]
codes[0] = cls.MOVETO
vertex_offset = 1
end = length
vertices[vertex_offset:end:3, 0] = xA + alpha * xA_dot
vertices[vertex_offset:end:3, 1] = yA + alpha * yA_dot
vertices[vertex_offset+1:end:3, 0] = xB - alpha * xB_dot
vertices[vertex_offset+1:end:3, 1] = yB - alpha * yB_dot
vertices[vertex_offset+2:end:3, 0] = xB
vertices[vertex_offset+2:end:3, 1] = yB
return cls(vertices, codes, readonly=True)
@classmethod
def wedge(cls, theta1, theta2, n=None):
"""
Return a wedge of the unit circle from angle
*theta1* to angle *theta2* (in degrees).
*theta2* is unwrapped to produce the shortest wedge within 360 degrees.
That is, if *theta2* > *theta1* + 360, the wedge will be from *theta1*
to *theta2* - 360 and not a full circle plus some extra overlap.
If *n* is provided, it is the number of spline segments to make.
If *n* is not provided, the number of spline segments is
determined based on the delta between *theta1* and *theta2*.
"""
return cls.arc(theta1, theta2, n, True)
_hatch_dict = maxdict(8)
@classmethod
def hatch(cls, hatchpattern, density=6):
"""
Given a hatch specifier, *hatchpattern*, generates a Path that
can be used in a repeated hatching pattern. *density* is the
number of lines per unit square.
"""
from matplotlib.hatch import get_path
if hatchpattern is None:
return None
hatch_path = cls._hatch_dict.get((hatchpattern, density))
if hatch_path is not None:
return hatch_path
hatch_path = get_path(hatchpattern, density)
cls._hatch_dict[(hatchpattern, density)] = hatch_path
return hatch_path
def clip_to_bbox(self, bbox, inside=True):
"""
Clip the path to the given bounding box.
The path must be made up of one or more closed polygons. This
algorithm will not behave correctly for unclosed paths.
If *inside* is `True`, clip to the inside of the box, otherwise
to the outside of the box.
"""
# Use make_compound_path_from_polys
verts = _path.clip_path_to_rect(self, bbox, inside)
paths = [Path(poly) for poly in verts]
return self.make_compound_path(*paths)
def get_path_collection_extents(
master_transform, paths, transforms, offsets, offset_transform):
"""
Given a sequence of :class:`Path` objects,
:class:`~matplotlib.transforms.Transform` objects and offsets, as
found in a :class:`~matplotlib.collections.PathCollection`,
returns the bounding box that encapsulates all of them.
*master_transform* is a global transformation to apply to all paths
*paths* is a sequence of :class:`Path` instances.
*transforms* is a sequence of
:class:`~matplotlib.transforms.Affine2D` instances.
*offsets* is a sequence of (x, y) offsets (or an Nx2 array)
*offset_transform* is a :class:`~matplotlib.transforms.Affine2D`
to apply to the offsets before applying the offset to the path.
The way that *paths*, *transforms* and *offsets* are combined
follows the same method as for collections. Each is iterated over
independently, so if you have 3 paths, 2 transforms and 1 offset,
their combinations are as follows:
(A, A, A), (B, B, A), (C, A, A)
"""
from .transforms import Bbox
if len(paths) == 0:
raise ValueError("No paths provided")
return Bbox.from_extents(*_path.get_path_collection_extents(
master_transform, paths, np.atleast_3d(transforms),
offsets, offset_transform))
def get_paths_extents(paths, transforms=[]):
"""
Given a sequence of :class:`Path` objects and optional
:class:`~matplotlib.transforms.Transform` objects, returns the
bounding box that encapsulates all of them.
*paths* is a sequence of :class:`Path` instances.
*transforms* is an optional sequence of
:class:`~matplotlib.transforms.Affine2D` instances to apply to
each path.
"""
from .transforms import Bbox, Affine2D
if len(paths) == 0:
raise ValueError("No paths provided")
return Bbox.from_extents(*_path.get_path_collection_extents(
Affine2D(), paths, transforms, [], Affine2D()))
| 37,566 | 35.50826 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/contour.py
|
"""
These are classes to support contour plotting and labelling for the Axes class.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange
import warnings
import matplotlib as mpl
import numpy as np
from numpy import ma
import matplotlib._contour as _contour
import matplotlib.path as mpath
import matplotlib.ticker as ticker
import matplotlib.cm as cm
import matplotlib.colors as colors
import matplotlib.collections as mcoll
import matplotlib.font_manager as font_manager
import matplotlib.text as text
import matplotlib.cbook as cbook
import matplotlib.mathtext as mathtext
import matplotlib.patches as mpatches
import matplotlib.texmanager as texmanager
import matplotlib.transforms as mtransforms
# Import needed for adding manual selection capability to clabel
from matplotlib.blocking_input import BlockingContourLabeler
# We can't use a single line collection for contour because a line
# collection can have only a single line style, and we want to be able to have
# dashed negative contours, for example, and solid positive contours.
# We could use a single polygon collection for filled contours, but it
# seems better to keep line and filled contours similar, with one collection
# per level.
class ClabelText(text.Text):
"""
Unlike the ordinary text, the get_rotation returns an updated
angle in the pixel coordinate assuming that the input rotation is
an angle in data coordinate (or whatever transform set).
"""
def get_rotation(self):
angle = text.Text.get_rotation(self)
trans = self.get_transform()
x, y = self.get_position()
new_angles = trans.transform_angles(np.array([angle]),
np.array([[x, y]]))
return new_angles[0]
class ContourLabeler(object):
"""Mixin to provide labelling capability to ContourSet"""
def clabel(self, *args, **kwargs):
"""
Label a contour plot.
Call signature::
clabel(cs, **kwargs)
Adds labels to line contours in *cs*, where *cs* is a
:class:`~matplotlib.contour.ContourSet` object returned by
contour.
::
clabel(cs, v, **kwargs)
only labels contours listed in *v*.
Parameters
----------
fontsize : string or float, optional
Size in points or relative size e.g., 'smaller', 'x-large'.
See `Text.set_size` for accepted string values.
colors :
Color of each label
- if *None*, the color of each label matches the color of
the corresponding contour
- if one string color, e.g., *colors* = 'r' or *colors* =
'red', all labels will be plotted in this color
- if a tuple of matplotlib color args (string, float, rgb, etc),
different labels will be plotted in different colors in the order
specified
inline : bool, optional
If ``True`` the underlying contour is removed where the label is
placed. Default is ``True``.
inline_spacing : float, optional
Space in pixels to leave on each side of label when
placing inline. Defaults to 5.
This spacing will be exact for labels at locations where the
contour is straight, less so for labels on curved contours.
fmt : string or dict, optional
A format string for the label. Default is '%1.3f'
Alternatively, this can be a dictionary matching contour
levels with arbitrary strings to use for each contour level
(i.e., fmt[level]=string), or it can be any callable, such
as a :class:`~matplotlib.ticker.Formatter` instance, that
returns a string when called with a numeric contour level.
manual : bool or iterable, optional
If ``True``, contour labels will be placed manually using
mouse clicks. Click the first button near a contour to
add a label, click the second button (or potentially both
mouse buttons at once) to finish adding labels. The third
button can be used to remove the last label added, but
only if labels are not inline. Alternatively, the keyboard
can be used to select label locations (enter to end label
placement, delete or backspace act like the third mouse button,
and any other key will select a label location).
*manual* can also be an iterable object of x,y tuples.
Contour labels will be created as if mouse is clicked at each
x,y positions.
rightside_up : bool, optional
If ``True``, label rotations will always be plus
or minus 90 degrees from level. Default is ``True``.
use_clabeltext : bool, optional
If ``True``, `ClabelText` class (instead of `Text`) is used to
create labels. `ClabelText` recalculates rotation angles
of texts during the drawing time, therefore this can be used if
aspect of the axes changes. Default is ``False``.
"""
"""
NOTES on how this all works:
clabel basically takes the input arguments and uses them to
add a list of "label specific" attributes to the ContourSet
object. These attributes are all of the form label* and names
should be fairly self explanatory.
Once these attributes are set, clabel passes control to the
labels method (case of automatic label placement) or
`BlockingContourLabeler` (case of manual label placement).
"""
fontsize = kwargs.get('fontsize', None)
inline = kwargs.get('inline', 1)
inline_spacing = kwargs.get('inline_spacing', 5)
self.labelFmt = kwargs.get('fmt', '%1.3f')
_colors = kwargs.get('colors', None)
self._use_clabeltext = kwargs.get('use_clabeltext', False)
# Detect if manual selection is desired and remove from argument list
self.labelManual = kwargs.get('manual', False)
self.rightside_up = kwargs.get('rightside_up', True)
if len(args) == 0:
levels = self.levels
indices = list(xrange(len(self.cvalues)))
elif len(args) == 1:
levlabs = list(args[0])
indices, levels = [], []
for i, lev in enumerate(self.levels):
if lev in levlabs:
indices.append(i)
levels.append(lev)
if len(levels) < len(levlabs):
raise ValueError("Specified levels {} don't match available "
"levels {}".format(levlabs, self.levels))
else:
raise TypeError("Illegal arguments to clabel, see help(clabel)")
self.labelLevelList = levels
self.labelIndiceList = indices
self.labelFontProps = font_manager.FontProperties()
self.labelFontProps.set_size(fontsize)
font_size_pts = self.labelFontProps.get_size_in_points()
self.labelFontSizeList = [font_size_pts] * len(levels)
if _colors is None:
self.labelMappable = self
self.labelCValueList = np.take(self.cvalues, self.labelIndiceList)
else:
cmap = colors.ListedColormap(_colors, N=len(self.labelLevelList))
self.labelCValueList = list(xrange(len(self.labelLevelList)))
self.labelMappable = cm.ScalarMappable(cmap=cmap,
norm=colors.NoNorm())
self.labelXYs = []
if cbook.iterable(self.labelManual):
for x, y in self.labelManual:
self.add_label_near(x, y, inline,
inline_spacing)
elif self.labelManual:
print('Select label locations manually using first mouse button.')
print('End manual selection with second mouse button.')
if not inline:
print('Remove last label by clicking third mouse button.')
blocking_contour_labeler = BlockingContourLabeler(self)
blocking_contour_labeler(inline, inline_spacing)
else:
self.labels(inline, inline_spacing)
# Hold on to some old attribute names. These are deprecated and will
# be removed in the near future (sometime after 2008-08-01), but
# keeping for now for backwards compatibility
self.cl = self.labelTexts
self.cl_xy = self.labelXYs
self.cl_cvalues = self.labelCValues
self.labelTextsList = cbook.silent_list('text.Text', self.labelTexts)
return self.labelTextsList
def print_label(self, linecontour, labelwidth):
"Return *False* if contours are too short for a label."
return (len(linecontour) > 10 * labelwidth
or (np.ptp(linecontour, axis=0) > 1.2 * labelwidth).any())
def too_close(self, x, y, lw):
"Return *True* if a label is already near this location."
for loc in self.labelXYs:
d = np.sqrt((x - loc[0]) ** 2 + (y - loc[1]) ** 2)
if d < 1.2 * lw:
return True
return False
def get_label_coords(self, distances, XX, YY, ysize, lw):
"""
Return x, y, and the index of a label location.
Labels are plotted at a location with the smallest
deviation of the contour from a straight line
unless there is another label nearby, in which case
the next best place on the contour is picked up.
If all such candidates are rejected, the beginning
of the contour is chosen.
"""
hysize = int(ysize / 2)
adist = np.argsort(distances)
for ind in adist:
x, y = XX[ind][hysize], YY[ind][hysize]
if self.too_close(x, y, lw):
continue
return x, y, ind
ind = adist[0]
x, y = XX[ind][hysize], YY[ind][hysize]
return x, y, ind
def get_label_width(self, lev, fmt, fsize):
"""
Return the width of the label in points.
"""
if not isinstance(lev, six.string_types):
lev = self.get_text(lev, fmt)
lev, ismath = text.Text.is_math_text(lev)
if ismath == 'TeX':
if not hasattr(self, '_TeX_manager'):
self._TeX_manager = texmanager.TexManager()
lw, _, _ = self._TeX_manager.get_text_width_height_descent(lev,
fsize)
elif ismath:
if not hasattr(self, '_mathtext_parser'):
self._mathtext_parser = mathtext.MathTextParser('bitmap')
img, _ = self._mathtext_parser.parse(lev, dpi=72,
prop=self.labelFontProps)
lw = img.get_width() # at dpi=72, the units are PostScript points
else:
# width is much less than "font size"
lw = (len(lev)) * fsize * 0.6
return lw
@cbook.deprecated("2.2")
def get_real_label_width(self, lev, fmt, fsize):
"""
This computes actual onscreen label width.
This uses some black magic to determine onscreen extent of non-drawn
label. This magic may not be very robust.
This method is not being used, and may be modified or removed.
"""
# Find middle of axes
xx = np.mean(np.asarray(self.ax.axis()).reshape(2, 2), axis=1)
# Temporarily create text object
t = text.Text(xx[0], xx[1])
self.set_label_props(t, self.get_text(lev, fmt), 'k')
# Some black magic to get onscreen extent
# NOTE: This will only work for already drawn figures, as the canvas
# does not have a renderer otherwise. This is the reason this function
# can't be integrated into the rest of the code.
bbox = t.get_window_extent(renderer=self.ax.figure.canvas.renderer)
# difference in pixel extent of image
lw = np.diff(bbox.corners()[0::2, 0])[0]
return lw
def set_label_props(self, label, text, color):
"set the label properties - color, fontsize, text"
label.set_text(text)
label.set_color(color)
label.set_fontproperties(self.labelFontProps)
label.set_clip_box(self.ax.bbox)
def get_text(self, lev, fmt):
"get the text of the label"
if isinstance(lev, six.string_types):
return lev
else:
if isinstance(fmt, dict):
return fmt.get(lev, '%1.3f')
elif callable(fmt):
return fmt(lev)
else:
return fmt % lev
def locate_label(self, linecontour, labelwidth):
"""
Find good place to draw a label (relatively flat part of the contour).
"""
# Number of contour points
nsize = len(linecontour)
if labelwidth > 1:
xsize = int(np.ceil(nsize / labelwidth))
else:
xsize = 1
if xsize == 1:
ysize = nsize
else:
ysize = int(labelwidth)
XX = np.resize(linecontour[:, 0], (xsize, ysize))
YY = np.resize(linecontour[:, 1], (xsize, ysize))
# I might have fouled up the following:
yfirst = YY[:, :1]
ylast = YY[:, -1:]
xfirst = XX[:, :1]
xlast = XX[:, -1:]
s = (yfirst - YY) * (xlast - xfirst) - (xfirst - XX) * (ylast - yfirst)
L = np.hypot(xlast - xfirst, ylast - yfirst)
# Ignore warning that divide by zero throws, as this is a valid option
with np.errstate(divide='ignore', invalid='ignore'):
dist = np.sum(np.abs(s) / L, axis=-1)
x, y, ind = self.get_label_coords(dist, XX, YY, ysize, labelwidth)
# There must be a more efficient way...
lc = [tuple(l) for l in linecontour]
dind = lc.index((x, y))
return x, y, dind
def calc_label_rot_and_inline(self, slc, ind, lw, lc=None, spacing=5):
"""
This function calculates the appropriate label rotation given
the linecontour coordinates in screen units, the index of the
label location and the label width.
It will also break contour and calculate inlining if *lc* is
not empty (lc defaults to the empty list if None). *spacing*
is the space around the label in pixels to leave empty.
Do both of these tasks at once to avoid calculating path lengths
multiple times, which is relatively costly.
The method used here involves calculating the path length
along the contour in pixel coordinates and then looking
approximately label width / 2 away from central point to
determine rotation and then to break contour if desired.
"""
if lc is None:
lc = []
# Half the label width
hlw = lw / 2.0
# Check if closed and, if so, rotate contour so label is at edge
closed = _is_closed_polygon(slc)
if closed:
slc = np.r_[slc[ind:-1], slc[:ind + 1]]
if len(lc): # Rotate lc also if not empty
lc = np.r_[lc[ind:-1], lc[:ind + 1]]
ind = 0
# Calculate path lengths
pl = np.zeros(slc.shape[0], dtype=float)
dx = np.diff(slc, axis=0)
pl[1:] = np.cumsum(np.hypot(dx[:, 0], dx[:, 1]))
pl = pl - pl[ind]
# Use linear interpolation to get points around label
xi = np.array([-hlw, hlw])
if closed: # Look at end also for closed contours
dp = np.array([pl[-1], 0])
else:
dp = np.zeros_like(xi)
# Get angle of vector between the two ends of the label - must be
# calculated in pixel space for text rotation to work correctly.
(dx,), (dy,) = (np.diff(np.interp(dp + xi, pl, slc_col))
for slc_col in slc.T)
rotation = np.rad2deg(np.arctan2(dy, dx))
if self.rightside_up:
# Fix angle so text is never upside-down
rotation = (rotation + 90) % 180 - 90
# Break contour if desired
nlc = []
if len(lc):
# Expand range by spacing
xi = dp + xi + np.array([-spacing, spacing])
# Get (integer) indices near points of interest; use -1 as marker
# for out of bounds.
I = np.interp(xi, pl, np.arange(len(pl)), left=-1, right=-1)
I = [np.floor(I[0]).astype(int), np.ceil(I[1]).astype(int)]
if I[0] != -1:
xy1 = [np.interp(xi[0], pl, lc_col) for lc_col in lc.T]
if I[1] != -1:
xy2 = [np.interp(xi[1], pl, lc_col) for lc_col in lc.T]
# Actually break contours
if closed:
# This will remove contour if shorter than label
if all(i != -1 for i in I):
nlc.append(np.row_stack([xy2, lc[I[1]:I[0]+1], xy1]))
else:
# These will remove pieces of contour if they have length zero
if I[0] != -1:
nlc.append(np.row_stack([lc[:I[0]+1], xy1]))
if I[1] != -1:
nlc.append(np.row_stack([xy2, lc[I[1]:]]))
# The current implementation removes contours completely
# covered by labels. Uncomment line below to keep
# original contour if this is the preferred behavior.
# if not len(nlc): nlc = [ lc ]
return rotation, nlc
def _get_label_text(self, x, y, rotation):
dx, dy = self.ax.transData.inverted().transform_point((x, y))
t = text.Text(dx, dy, rotation=rotation,
horizontalalignment='center',
verticalalignment='center')
return t
def _get_label_clabeltext(self, x, y, rotation):
# x, y, rotation is given in pixel coordinate. Convert them to
# the data coordinate and create a label using ClabelText
# class. This way, the roation of the clabel is along the
# contour line always.
transDataInv = self.ax.transData.inverted()
dx, dy = transDataInv.transform_point((x, y))
drotation = transDataInv.transform_angles(np.array([rotation]),
np.array([[x, y]]))
t = ClabelText(dx, dy, rotation=drotation[0],
horizontalalignment='center',
verticalalignment='center')
return t
def _add_label(self, t, x, y, lev, cvalue):
color = self.labelMappable.to_rgba(cvalue, alpha=self.alpha)
_text = self.get_text(lev, self.labelFmt)
self.set_label_props(t, _text, color)
self.labelTexts.append(t)
self.labelCValues.append(cvalue)
self.labelXYs.append((x, y))
# Add label to plot here - useful for manual mode label selection
self.ax.add_artist(t)
def add_label(self, x, y, rotation, lev, cvalue):
"""
Add contour label using :class:`~matplotlib.text.Text` class.
"""
t = self._get_label_text(x, y, rotation)
self._add_label(t, x, y, lev, cvalue)
def add_label_clabeltext(self, x, y, rotation, lev, cvalue):
"""
Add contour label using :class:`ClabelText` class.
"""
# x, y, rotation is given in pixel coordinate. Convert them to
# the data coordinate and create a label using ClabelText
# class. This way, the roation of the clabel is along the
# contour line always.
t = self._get_label_clabeltext(x, y, rotation)
self._add_label(t, x, y, lev, cvalue)
def add_label_near(self, x, y, inline=True, inline_spacing=5,
transform=None):
"""
Add a label near the point (x, y). If transform is None
(default), (x, y) is in data coordinates; if transform is
False, (x, y) is in display coordinates; otherwise, the
specified transform will be used to translate (x, y) into
display coordinates.
*inline*:
controls whether the underlying contour is removed or
not. Default is *True*.
*inline_spacing*:
space in pixels to leave on each side of label when
placing inline. Defaults to 5. This spacing will be
exact for labels at locations where the contour is
straight, less so for labels on curved contours.
"""
if transform is None:
transform = self.ax.transData
if transform:
x, y = transform.transform_point((x, y))
# find the nearest contour _in screen units_
conmin, segmin, imin, xmin, ymin = self.find_nearest_contour(
x, y, self.labelIndiceList)[:5]
# The calc_label_rot_and_inline routine requires that (xmin,ymin)
# be a vertex in the path. So, if it isn't, add a vertex here
# grab the paths from the collections
paths = self.collections[conmin].get_paths()
# grab the correct segment
active_path = paths[segmin]
# grab its vertices
lc = active_path.vertices
# sort out where the new vertex should be added data-units
xcmin = self.ax.transData.inverted().transform_point([xmin, ymin])
# if there isn't a vertex close enough
if not np.allclose(xcmin, lc[imin]):
# insert new data into the vertex list
lc = np.r_[lc[:imin], np.array(xcmin)[None, :], lc[imin:]]
# replace the path with the new one
paths[segmin] = mpath.Path(lc)
# Get index of nearest level in subset of levels used for labeling
lmin = self.labelIndiceList.index(conmin)
# Coordinates of contour
paths = self.collections[conmin].get_paths()
lc = paths[segmin].vertices
# In pixel/screen space
slc = self.ax.transData.transform(lc)
# Get label width for rotating labels and breaking contours
lw = self.get_label_width(self.labelLevelList[lmin],
self.labelFmt, self.labelFontSizeList[lmin])
# lw is in points.
lw *= self.ax.figure.dpi / 72.0 # scale to screen coordinates
# now lw in pixels
# Figure out label rotation.
if inline:
lcarg = lc
else:
lcarg = None
rotation, nlc = self.calc_label_rot_and_inline(
slc, imin, lw, lcarg,
inline_spacing)
self.add_label(xmin, ymin, rotation, self.labelLevelList[lmin],
self.labelCValueList[lmin])
if inline:
# Remove old, not looping over paths so we can do this up front
paths.pop(segmin)
# Add paths if not empty or single point
for n in nlc:
if len(n) > 1:
paths.append(mpath.Path(n))
def pop_label(self, index=-1):
"""Defaults to removing last label, but any index can be supplied"""
self.labelCValues.pop(index)
t = self.labelTexts.pop(index)
t.remove()
def labels(self, inline, inline_spacing):
if self._use_clabeltext:
add_label = self.add_label_clabeltext
else:
add_label = self.add_label
for icon, lev, fsize, cvalue in zip(
self.labelIndiceList, self.labelLevelList,
self.labelFontSizeList, self.labelCValueList):
con = self.collections[icon]
trans = con.get_transform()
lw = self.get_label_width(lev, self.labelFmt, fsize)
lw *= self.ax.figure.dpi / 72.0 # scale to screen coordinates
additions = []
paths = con.get_paths()
for segNum, linepath in enumerate(paths):
lc = linepath.vertices # Line contour
slc0 = trans.transform(lc) # Line contour in screen coords
# For closed polygons, add extra point to avoid division by
# zero in print_label and locate_label. Other than these
# functions, this is not necessary and should probably be
# eventually removed.
if _is_closed_polygon(lc):
slc = np.r_[slc0, slc0[1:2, :]]
else:
slc = slc0
# Check if long enough for a label
if self.print_label(slc, lw):
x, y, ind = self.locate_label(slc, lw)
if inline:
lcarg = lc
else:
lcarg = None
rotation, new = self.calc_label_rot_and_inline(
slc0, ind, lw, lcarg,
inline_spacing)
# Actually add the label
add_label(x, y, rotation, lev, cvalue)
# If inline, add new contours
if inline:
for n in new:
# Add path if not empty or single point
if len(n) > 1:
additions.append(mpath.Path(n))
else: # If not adding label, keep old path
additions.append(linepath)
# After looping over all segments on a contour, remove old
# paths and add new ones if inlining
if inline:
del paths[:]
paths.extend(additions)
def _find_closest_point_on_leg(p1, p2, p0):
"""find closest point to p0 on line segment connecting p1 and p2"""
# handle degenerate case
if np.all(p2 == p1):
d = np.sum((p0 - p1)**2)
return d, p1
d21 = p2 - p1
d01 = p0 - p1
# project on to line segment to find closest point
proj = np.dot(d01, d21) / np.dot(d21, d21)
if proj < 0:
proj = 0
if proj > 1:
proj = 1
pc = p1 + proj * d21
# find squared distance
d = np.sum((pc-p0)**2)
return d, pc
def _is_closed_polygon(X):
"""
Tests whether first and last object in a sequence are the same. These are
presumably coordinates on a polygonal curve, in which case this function
tests if that curve is closed.
"""
return np.all(X[0] == X[-1])
def _find_closest_point_on_path(lc, point):
"""
lc: coordinates of vertices
point: coordinates of test point
"""
# find index of closest vertex for this segment
ds = np.sum((lc - point[None, :])**2, 1)
imin = np.argmin(ds)
dmin = np.inf
xcmin = None
legmin = (None, None)
closed = _is_closed_polygon(lc)
# build list of legs before and after this vertex
legs = []
if imin > 0 or closed:
legs.append(((imin-1) % len(lc), imin))
if imin < len(lc) - 1 or closed:
legs.append((imin, (imin+1) % len(lc)))
for leg in legs:
d, xc = _find_closest_point_on_leg(lc[leg[0]], lc[leg[1]], point)
if d < dmin:
dmin = d
xcmin = xc
legmin = leg
return (dmin, xcmin, legmin)
class ContourSet(cm.ScalarMappable, ContourLabeler):
"""
Store a set of contour lines or filled regions.
User-callable method: clabel
Attributes
----------
ax:
The axes object in which the contours are drawn.
collections:
A silent_list of LineCollections or PolyCollections.
levels:
Contour levels.
layers:
Same as levels for line contours; half-way between
levels for filled contours. See :meth:`_process_colors`.
"""
def __init__(self, ax, *args, **kwargs):
"""
Draw contour lines or filled regions, depending on
whether keyword arg *filled* is ``False`` (default) or ``True``.
The first three arguments must be:
*ax*: axes object.
*levels*: [level0, level1, ..., leveln]
A list of floating point numbers indicating the contour
levels.
*allsegs*: [level0segs, level1segs, ...]
List of all the polygon segments for all the *levels*.
For contour lines ``len(allsegs) == len(levels)``, and for
filled contour regions ``len(allsegs) = len(levels)-1``. The lists
should look like::
level0segs = [polygon0, polygon1, ...]
polygon0 = array_like [[x0,y0], [x1,y1], ...]
*allkinds*: *None* or [level0kinds, level1kinds, ...]
Optional list of all the polygon vertex kinds (code types), as
described and used in Path. This is used to allow multiply-
connected paths such as holes within filled polygons.
If not ``None``, ``len(allkinds) == len(allsegs)``. The lists
should look like::
level0kinds = [polygon0kinds, ...]
polygon0kinds = [vertexcode0, vertexcode1, ...]
If *allkinds* is not ``None``, usually all polygons for a
particular contour level are grouped together so that
``level0segs = [polygon0]`` and ``level0kinds = [polygon0kinds]``.
Keyword arguments are as described in the docstring of
`~.Axes.contour`.
"""
self.ax = ax
self.levels = kwargs.pop('levels', None)
self.filled = kwargs.pop('filled', False)
self.linewidths = kwargs.pop('linewidths', None)
self.linestyles = kwargs.pop('linestyles', None)
self.hatches = kwargs.pop('hatches', [None])
self.alpha = kwargs.pop('alpha', None)
self.origin = kwargs.pop('origin', None)
self.extent = kwargs.pop('extent', None)
cmap = kwargs.pop('cmap', None)
self.colors = kwargs.pop('colors', None)
norm = kwargs.pop('norm', None)
vmin = kwargs.pop('vmin', None)
vmax = kwargs.pop('vmax', None)
self.extend = kwargs.pop('extend', 'neither')
self.antialiased = kwargs.pop('antialiased', None)
if self.antialiased is None and self.filled:
self.antialiased = False # eliminate artifacts; we are not
# stroking the boundaries.
# The default for line contours will be taken from
# the LineCollection default, which uses the
# rcParams['lines.antialiased']
self.nchunk = kwargs.pop('nchunk', 0)
self.locator = kwargs.pop('locator', None)
if (isinstance(norm, colors.LogNorm)
or isinstance(self.locator, ticker.LogLocator)):
self.logscale = True
if norm is None:
norm = colors.LogNorm()
if self.extend is not 'neither':
raise ValueError('extend kwarg does not work yet with log '
' scale')
else:
self.logscale = False
if self.origin not in [None, 'lower', 'upper', 'image']:
raise ValueError("If given, *origin* must be one of [ 'lower' |"
" 'upper' | 'image']")
if self.extent is not None and len(self.extent) != 4:
raise ValueError("If given, *extent* must be '[ *None* |"
" (x0,x1,y0,y1) ]'")
if self.colors is not None and cmap is not None:
raise ValueError('Either colors or cmap must be None')
if self.origin == 'image':
self.origin = mpl.rcParams['image.origin']
self._transform = kwargs.pop('transform', None)
kwargs = self._process_args(*args, **kwargs)
self._process_levels()
if self.colors is not None:
ncolors = len(self.levels)
if self.filled:
ncolors -= 1
i0 = 0
# Handle the case where colors are given for the extended
# parts of the contour.
extend_min = self.extend in ['min', 'both']
extend_max = self.extend in ['max', 'both']
use_set_under_over = False
# if we are extending the lower end, and we've been given enough
# colors then skip the first color in the resulting cmap. For the
# extend_max case we don't need to worry about passing more colors
# than ncolors as ListedColormap will clip.
total_levels = ncolors + int(extend_min) + int(extend_max)
if (len(self.colors) == total_levels and
any([extend_min, extend_max])):
use_set_under_over = True
if extend_min:
i0 = 1
cmap = colors.ListedColormap(self.colors[i0:None], N=ncolors)
if use_set_under_over:
if extend_min:
cmap.set_under(self.colors[0])
if extend_max:
cmap.set_over(self.colors[-1])
if self.filled:
self.collections = cbook.silent_list('mcoll.PathCollection')
else:
self.collections = cbook.silent_list('mcoll.LineCollection')
# label lists must be initialized here
self.labelTexts = []
self.labelCValues = []
kw = {'cmap': cmap}
if norm is not None:
kw['norm'] = norm
# sets self.cmap, norm if needed;
cm.ScalarMappable.__init__(self, **kw)
if vmin is not None:
self.norm.vmin = vmin
if vmax is not None:
self.norm.vmax = vmax
self._process_colors()
self.allsegs, self.allkinds = self._get_allsegs_and_allkinds()
if self.filled:
if self.linewidths is not None:
warnings.warn('linewidths is ignored by contourf')
# Lower and upper contour levels.
lowers, uppers = self._get_lowers_and_uppers()
# Ensure allkinds can be zipped below.
if self.allkinds is None:
self.allkinds = [None] * len(self.allsegs)
# Default zorder taken from Collection
zorder = kwargs.pop('zorder', 1)
for level, level_upper, segs, kinds in \
zip(lowers, uppers, self.allsegs, self.allkinds):
paths = self._make_paths(segs, kinds)
col = mcoll.PathCollection(
paths,
antialiaseds=(self.antialiased,),
edgecolors='none',
alpha=self.alpha,
transform=self.get_transform(),
zorder=zorder)
self.ax.add_collection(col, autolim=False)
self.collections.append(col)
else:
tlinewidths = self._process_linewidths()
self.tlinewidths = tlinewidths
tlinestyles = self._process_linestyles()
aa = self.antialiased
if aa is not None:
aa = (self.antialiased,)
# Default zorder taken from LineCollection
zorder = kwargs.pop('zorder', 2)
for level, width, lstyle, segs in \
zip(self.levels, tlinewidths, tlinestyles, self.allsegs):
col = mcoll.LineCollection(
segs,
antialiaseds=aa,
linewidths=width,
linestyles=[lstyle],
alpha=self.alpha,
transform=self.get_transform(),
zorder=zorder)
col.set_label('_nolegend_')
self.ax.add_collection(col, autolim=False)
self.collections.append(col)
for col in self.collections:
col.sticky_edges.x[:] = [self._mins[0], self._maxs[0]]
col.sticky_edges.y[:] = [self._mins[1], self._maxs[1]]
self.ax.update_datalim([self._mins, self._maxs])
self.ax.autoscale_view(tight=True)
self.changed() # set the colors
if kwargs:
s = ", ".join(map(repr, kwargs))
warnings.warn('The following kwargs were not used by contour: ' +
s)
def get_transform(self):
"""
Return the :class:`~matplotlib.transforms.Transform`
instance used by this ContourSet.
"""
if self._transform is None:
self._transform = self.ax.transData
elif (not isinstance(self._transform, mtransforms.Transform)
and hasattr(self._transform, '_as_mpl_transform')):
self._transform = self._transform._as_mpl_transform(self.ax)
return self._transform
def __getstate__(self):
state = self.__dict__.copy()
# the C object _contour_generator cannot currently be pickled. This
# isn't a big issue as it is not actually used once the contour has
# been calculated.
state['_contour_generator'] = None
return state
def legend_elements(self, variable_name='x', str_format=str):
"""
Return a list of artist and labels suitable for passing through
to :func:`plt.legend` which represent this ContourSet.
Args:
*variable_name*: the string used inside the inequality used
on the labels
*str_format*: function used to format the numbers in the labels
"""
artists = []
labels = []
if self.filled:
lowers, uppers = self._get_lowers_and_uppers()
n_levels = len(self.collections)
for i, (collection, lower, upper) in enumerate(
zip(self.collections, lowers, uppers)):
patch = mpatches.Rectangle(
(0, 0), 1, 1,
facecolor=collection.get_facecolor()[0],
hatch=collection.get_hatch(),
alpha=collection.get_alpha())
artists.append(patch)
lower = str_format(lower)
upper = str_format(upper)
if i == 0 and self.extend in ('min', 'both'):
labels.append(r'$%s \leq %s$' % (variable_name,
lower))
elif i == n_levels - 1 and self.extend in ('max', 'both'):
labels.append(r'$%s > %s$' % (variable_name,
upper))
else:
labels.append(r'$%s < %s \leq %s$' % (lower,
variable_name,
upper))
else:
for collection, level in zip(self.collections, self.levels):
patch = mcoll.LineCollection(None)
patch.update_from(collection)
artists.append(patch)
# format the level for insertion into the labels
level = str_format(level)
labels.append(r'$%s = %s$' % (variable_name, level))
return artists, labels
def _process_args(self, *args, **kwargs):
"""
Process *args* and *kwargs*; override in derived classes.
Must set self.levels, self.zmin and self.zmax, and update axes
limits.
"""
self.levels = args[0]
self.allsegs = args[1]
self.allkinds = len(args) > 2 and args[2] or None
self.zmax = np.max(self.levels)
self.zmin = np.min(self.levels)
self._auto = False
# Check lengths of levels and allsegs.
if self.filled:
if len(self.allsegs) != len(self.levels) - 1:
raise ValueError('must be one less number of segments as '
'levels')
else:
if len(self.allsegs) != len(self.levels):
raise ValueError('must be same number of segments as levels')
# Check length of allkinds.
if (self.allkinds is not None and
len(self.allkinds) != len(self.allsegs)):
raise ValueError('allkinds has different length to allsegs')
# Determine x,y bounds and update axes data limits.
flatseglist = [s for seg in self.allsegs for s in seg]
points = np.concatenate(flatseglist, axis=0)
self._mins = points.min(axis=0)
self._maxs = points.max(axis=0)
return kwargs
def _get_allsegs_and_allkinds(self):
"""
Override in derived classes to create and return allsegs and allkinds.
allkinds can be None.
"""
return self.allsegs, self.allkinds
def _get_lowers_and_uppers(self):
"""
Return (lowers,uppers) for filled contours.
"""
lowers = self._levels[:-1]
if self.zmin == lowers[0]:
# Include minimum values in lowest interval
lowers = lowers.copy() # so we don't change self._levels
if self.logscale:
lowers[0] = 0.99 * self.zmin
else:
lowers[0] -= 1
uppers = self._levels[1:]
return (lowers, uppers)
def _make_paths(self, segs, kinds):
if kinds is not None:
return [mpath.Path(seg, codes=kind)
for seg, kind in zip(segs, kinds)]
else:
return [mpath.Path(seg) for seg in segs]
def changed(self):
tcolors = [(tuple(rgba),)
for rgba in self.to_rgba(self.cvalues, alpha=self.alpha)]
self.tcolors = tcolors
hatches = self.hatches * len(tcolors)
for color, hatch, collection in zip(tcolors, hatches,
self.collections):
if self.filled:
collection.set_facecolor(color)
# update the collection's hatch (may be None)
collection.set_hatch(hatch)
else:
collection.set_color(color)
for label, cv in zip(self.labelTexts, self.labelCValues):
label.set_alpha(self.alpha)
label.set_color(self.labelMappable.to_rgba(cv))
# add label colors
cm.ScalarMappable.changed(self)
def _autolev(self, N):
"""
Select contour levels to span the data.
We need two more levels for filled contours than for
line contours, because for the latter we need to specify
the lower and upper boundary of each range. For example,
a single contour boundary, say at z = 0, requires only
one contour line, but two filled regions, and therefore
three levels to provide boundaries for both regions.
"""
if self.locator is None:
if self.logscale:
self.locator = ticker.LogLocator()
else:
self.locator = ticker.MaxNLocator(N + 1, min_n_ticks=1)
lev = self.locator.tick_values(self.zmin, self.zmax)
self._auto = True
return lev
def _contour_level_args(self, z, args):
"""
Determine the contour levels and store in self.levels.
"""
if self.filled:
fn = 'contourf'
else:
fn = 'contour'
self._auto = False
if self.levels is None:
if len(args) == 0:
lev = self._autolev(7)
else:
level_arg = args[0]
try:
if type(level_arg) == int:
lev = self._autolev(level_arg)
else:
lev = np.asarray(level_arg).astype(np.float64)
except:
raise TypeError(
"Last {0} arg must give levels; see help({0})"
.format(fn))
self.levels = lev
else:
self.levels = np.asarray(self.levels).astype(np.float64)
if not self.filled:
inside = (self.levels > self.zmin) & (self.levels < self.zmax)
self.levels = self.levels[inside]
if len(self.levels) == 0:
self.levels = [self.zmin]
warnings.warn("No contour levels were found"
" within the data range.")
if self.filled and len(self.levels) < 2:
raise ValueError("Filled contours require at least 2 levels.")
if len(self.levels) > 1 and np.min(np.diff(self.levels)) <= 0.0:
raise ValueError("Contour levels must be increasing")
def _process_levels(self):
"""
Assign values to :attr:`layers` based on :attr:`levels`,
adding extended layers as needed if contours are filled.
For line contours, layers simply coincide with levels;
a line is a thin layer. No extended levels are needed
with line contours.
"""
# Make a private _levels to include extended regions; we
# want to leave the original levels attribute unchanged.
# (Colorbar needs this even for line contours.)
self._levels = list(self.levels)
if self.extend in ('both', 'min'):
self._levels.insert(0, min(self.levels[0], self.zmin) - 1)
if self.extend in ('both', 'max'):
self._levels.append(max(self.levels[-1], self.zmax) + 1)
self._levels = np.asarray(self._levels)
if not self.filled:
self.layers = self.levels
return
# layer values are mid-way between levels
self.layers = 0.5 * (self._levels[:-1] + self._levels[1:])
# ...except that extended layers must be outside the
# normed range:
if self.extend in ('both', 'min'):
self.layers[0] = -1e150
if self.extend in ('both', 'max'):
self.layers[-1] = 1e150
def _process_colors(self):
"""
Color argument processing for contouring.
Note that we base the color mapping on the contour levels
and layers, not on the actual range of the Z values. This
means we don't have to worry about bad values in Z, and we
always have the full dynamic range available for the selected
levels.
The color is based on the midpoint of the layer, except for
extended end layers. By default, the norm vmin and vmax
are the extreme values of the non-extended levels. Hence,
the layer color extremes are not the extreme values of
the colormap itself, but approach those values as the number
of levels increases. An advantage of this scheme is that
line contours, when added to filled contours, take on
colors that are consistent with those of the filled regions;
for example, a contour line on the boundary between two
regions will have a color intermediate between those
of the regions.
"""
self.monochrome = self.cmap.monochrome
if self.colors is not None:
# Generate integers for direct indexing.
i0, i1 = 0, len(self.levels)
if self.filled:
i1 -= 1
# Out of range indices for over and under:
if self.extend in ('both', 'min'):
i0 -= 1
if self.extend in ('both', 'max'):
i1 += 1
self.cvalues = list(range(i0, i1))
self.set_norm(colors.NoNorm())
else:
self.cvalues = self.layers
self.set_array(self.levels)
self.autoscale_None()
if self.extend in ('both', 'max', 'min'):
self.norm.clip = False
# self.tcolors are set by the "changed" method
def _process_linewidths(self):
linewidths = self.linewidths
Nlev = len(self.levels)
if linewidths is None:
tlinewidths = [(mpl.rcParams['lines.linewidth'],)] * Nlev
else:
if not cbook.iterable(linewidths):
linewidths = [linewidths] * Nlev
else:
linewidths = list(linewidths)
if len(linewidths) < Nlev:
nreps = int(np.ceil(Nlev / len(linewidths)))
linewidths = linewidths * nreps
if len(linewidths) > Nlev:
linewidths = linewidths[:Nlev]
tlinewidths = [(w,) for w in linewidths]
return tlinewidths
def _process_linestyles(self):
linestyles = self.linestyles
Nlev = len(self.levels)
if linestyles is None:
tlinestyles = ['solid'] * Nlev
if self.monochrome:
neg_ls = mpl.rcParams['contour.negative_linestyle']
eps = - (self.zmax - self.zmin) * 1e-15
for i, lev in enumerate(self.levels):
if lev < eps:
tlinestyles[i] = neg_ls
else:
if isinstance(linestyles, six.string_types):
tlinestyles = [linestyles] * Nlev
elif cbook.iterable(linestyles):
tlinestyles = list(linestyles)
if len(tlinestyles) < Nlev:
nreps = int(np.ceil(Nlev / len(linestyles)))
tlinestyles = tlinestyles * nreps
if len(tlinestyles) > Nlev:
tlinestyles = tlinestyles[:Nlev]
else:
raise ValueError("Unrecognized type for linestyles kwarg")
return tlinestyles
def get_alpha(self):
"""returns alpha to be applied to all ContourSet artists"""
return self.alpha
def set_alpha(self, alpha):
"""sets alpha for all ContourSet artists"""
self.alpha = alpha
self.changed()
def find_nearest_contour(self, x, y, indices=None, pixel=True):
"""
Finds contour that is closest to a point. Defaults to
measuring distance in pixels (screen space - useful for manual
contour labeling), but this can be controlled via a keyword
argument.
Returns a tuple containing the contour, segment, index of
segment, x & y of segment point and distance to minimum point.
Optional keyword arguments:
*indices*:
Indexes of contour levels to consider when looking for
nearest point. Defaults to using all levels.
*pixel*:
If *True*, measure distance in pixel space, if not, measure
distance in axes space. Defaults to *True*.
"""
# This function uses a method that is probably quite
# inefficient based on converting each contour segment to
# pixel coordinates and then comparing the given point to
# those coordinates for each contour. This will probably be
# quite slow for complex contours, but for normal use it works
# sufficiently well that the time is not noticeable.
# Nonetheless, improvements could probably be made.
if indices is None:
indices = list(xrange(len(self.levels)))
dmin = np.inf
conmin = None
segmin = None
xmin = None
ymin = None
point = np.array([x, y])
for icon in indices:
con = self.collections[icon]
trans = con.get_transform()
paths = con.get_paths()
for segNum, linepath in enumerate(paths):
lc = linepath.vertices
# transfer all data points to screen coordinates if desired
if pixel:
lc = trans.transform(lc)
d, xc, leg = _find_closest_point_on_path(lc, point)
if d < dmin:
dmin = d
conmin = icon
segmin = segNum
imin = leg[1]
xmin = xc[0]
ymin = xc[1]
return (conmin, segmin, imin, xmin, ymin, dmin)
class QuadContourSet(ContourSet):
"""
Create and store a set of contour lines or filled regions.
User-callable method: :meth:`clabel`
Attributes
----------
ax:
The axes object in which the contours are drawn.
collections:
A silent_list of LineCollections or PolyCollections.
levels:
Contour levels.
layers:
Same as levels for line contours; half-way between
levels for filled contours. See :meth:`_process_colors` method.
"""
def _process_args(self, *args, **kwargs):
"""
Process args and kwargs.
"""
if isinstance(args[0], QuadContourSet):
if self.levels is None:
self.levels = args[0].levels
self.zmin = args[0].zmin
self.zmax = args[0].zmax
self._corner_mask = args[0]._corner_mask
contour_generator = args[0]._contour_generator
self._mins = args[0]._mins
self._maxs = args[0]._maxs
else:
self._corner_mask = kwargs.pop('corner_mask', None)
if self._corner_mask is None:
self._corner_mask = mpl.rcParams['contour.corner_mask']
x, y, z = self._contour_args(args, kwargs)
_mask = ma.getmask(z)
if _mask is ma.nomask or not _mask.any():
_mask = None
contour_generator = _contour.QuadContourGenerator(
x, y, z.filled(), _mask, self._corner_mask, self.nchunk)
t = self.get_transform()
# if the transform is not trans data, and some part of it
# contains transData, transform the xs and ys to data coordinates
if (t != self.ax.transData and
any(t.contains_branch_seperately(self.ax.transData))):
trans_to_data = t - self.ax.transData
pts = (np.vstack([x.flat, y.flat]).T)
transformed_pts = trans_to_data.transform(pts)
x = transformed_pts[..., 0]
y = transformed_pts[..., 1]
self._mins = [ma.min(x), ma.min(y)]
self._maxs = [ma.max(x), ma.max(y)]
self._contour_generator = contour_generator
return kwargs
def _get_allsegs_and_allkinds(self):
"""Compute ``allsegs`` and ``allkinds`` using C extension."""
allsegs = []
if self.filled:
lowers, uppers = self._get_lowers_and_uppers()
allkinds = []
for level, level_upper in zip(lowers, uppers):
vertices, kinds = \
self._contour_generator.create_filled_contour(
level, level_upper)
allsegs.append(vertices)
allkinds.append(kinds)
else:
allkinds = None
for level in self.levels:
vertices = self._contour_generator.create_contour(level)
allsegs.append(vertices)
return allsegs, allkinds
def _contour_args(self, args, kwargs):
if self.filled:
fn = 'contourf'
else:
fn = 'contour'
Nargs = len(args)
if Nargs <= 2:
z = ma.asarray(args[0], dtype=np.float64)
x, y = self._initialize_x_y(z)
args = args[1:]
elif Nargs <= 4:
x, y, z = self._check_xyz(args[:3], kwargs)
args = args[3:]
else:
raise TypeError("Too many arguments to %s; see help(%s)" %
(fn, fn))
z = ma.masked_invalid(z, copy=False)
self.zmax = float(z.max())
self.zmin = float(z.min())
if self.logscale and self.zmin <= 0:
z = ma.masked_where(z <= 0, z)
warnings.warn('Log scale: values of z <= 0 have been masked')
self.zmin = float(z.min())
self._contour_level_args(z, args)
return (x, y, z)
def _check_xyz(self, args, kwargs):
"""
For functions like contour, check that the dimensions
of the input arrays match; if x and y are 1D, convert
them to 2D using meshgrid.
Possible change: I think we should make and use an ArgumentError
Exception class (here and elsewhere).
"""
x, y = args[:2]
kwargs = self.ax._process_unit_info(xdata=x, ydata=y, kwargs=kwargs)
x = self.ax.convert_xunits(x)
y = self.ax.convert_yunits(y)
x = np.asarray(x, dtype=np.float64)
y = np.asarray(y, dtype=np.float64)
z = ma.asarray(args[2], dtype=np.float64)
if z.ndim != 2:
raise TypeError("Input z must be a 2D array.")
elif z.shape[0] < 2 or z.shape[1] < 2:
raise TypeError("Input z must be at least a 2x2 array.")
else:
Ny, Nx = z.shape
if x.ndim != y.ndim:
raise TypeError("Number of dimensions of x and y should match.")
if x.ndim == 1:
nx, = x.shape
ny, = y.shape
if nx != Nx:
raise TypeError("Length of x must be number of columns in z.")
if ny != Ny:
raise TypeError("Length of y must be number of rows in z.")
x, y = np.meshgrid(x, y)
elif x.ndim == 2:
if x.shape != z.shape:
raise TypeError("Shape of x does not match that of z: found "
"{0} instead of {1}.".format(x.shape, z.shape))
if y.shape != z.shape:
raise TypeError("Shape of y does not match that of z: found "
"{0} instead of {1}.".format(y.shape, z.shape))
else:
raise TypeError("Inputs x and y must be 1D or 2D.")
return x, y, z
def _initialize_x_y(self, z):
"""
Return X, Y arrays such that contour(Z) will match imshow(Z)
if origin is not None.
The center of pixel Z[i,j] depends on origin:
if origin is None, x = j, y = i;
if origin is 'lower', x = j + 0.5, y = i + 0.5;
if origin is 'upper', x = j + 0.5, y = Nrows - i - 0.5
If extent is not None, x and y will be scaled to match,
as in imshow.
If origin is None and extent is not None, then extent
will give the minimum and maximum values of x and y.
"""
if z.ndim != 2:
raise TypeError("Input must be a 2D array.")
elif z.shape[0] < 2 or z.shape[1] < 2:
raise TypeError("Input z must be at least a 2x2 array.")
else:
Ny, Nx = z.shape
if self.origin is None: # Not for image-matching.
if self.extent is None:
return np.meshgrid(np.arange(Nx), np.arange(Ny))
else:
x0, x1, y0, y1 = self.extent
x = np.linspace(x0, x1, Nx)
y = np.linspace(y0, y1, Ny)
return np.meshgrid(x, y)
# Match image behavior:
if self.extent is None:
x0, x1, y0, y1 = (0, Nx, 0, Ny)
else:
x0, x1, y0, y1 = self.extent
dx = (x1 - x0) / Nx
dy = (y1 - y0) / Ny
x = x0 + (np.arange(Nx) + 0.5) * dx
y = y0 + (np.arange(Ny) + 0.5) * dy
if self.origin == 'upper':
y = y[::-1]
return np.meshgrid(x, y)
_contour_doc = """
Plot contours.
:func:`~matplotlib.pyplot.contour` and
:func:`~matplotlib.pyplot.contourf` draw contour lines and
filled contours, respectively. Except as noted, function
signatures and return values are the same for both versions.
:func:`~matplotlib.pyplot.contourf` differs from the MATLAB
version in that it does not draw the polygon edges.
To draw edges, add line contours with
calls to :func:`~matplotlib.pyplot.contour`.
Call signatures::
contour(Z)
make a contour plot of an array *Z*. The level values are chosen
automatically.
::
contour(X,Y,Z)
*X*, *Y* specify the (x, y) coordinates of the surface
::
contour(Z,N)
contour(X,Y,Z,N)
contour up to *N+1* automatically chosen contour levels
(*N* intervals).
::
contour(Z,V)
contour(X,Y,Z,V)
draw contour lines at the values specified in sequence *V*,
which must be in increasing order.
::
contourf(..., V)
fill the ``len(V)-1`` regions between the values in *V*,
which must be in increasing order.
::
contour(Z, **kwargs)
Use keyword args to control colors, linewidth, origin, cmap ... see
below for more details.
*X* and *Y* must both be 2-D with the same shape as *Z*, or they
must both be 1-D such that ``len(X)`` is the number of columns in
*Z* and ``len(Y)`` is the number of rows in *Z*.
``C = contour(...)`` returns a
:class:`~matplotlib.contour.QuadContourSet` object.
Optional keyword arguments:
*corner_mask*: bool, optional
Enable/disable corner masking, which only has an effect if *Z* is
a masked array. If ``False``, any quad touching a masked point is
masked out. If ``True``, only the triangular corners of quads
nearest those points are always masked out, other triangular
corners comprising three unmasked points are contoured as usual.
Defaults to ``rcParams['contour.corner_mask']``, which defaults to
``True``.
*colors*: [ *None* | string | (mpl_colors) ]
If *None*, the colormap specified by cmap will be used.
If a string, like 'r' or 'red', all levels will be plotted in this
color.
If a tuple of matplotlib color args (string, float, rgb, etc),
different levels will be plotted in different colors in the order
specified.
*alpha*: float
The alpha blending value
*cmap*: [ *None* | Colormap ]
A cm :class:`~matplotlib.colors.Colormap` instance or
*None*. If *cmap* is *None* and *colors* is *None*, a
default Colormap is used.
*norm*: [ *None* | Normalize ]
A :class:`matplotlib.colors.Normalize` instance for
scaling data values to colors. If *norm* is *None* and
*colors* is *None*, the default linear scaling is used.
*vmin*, *vmax*: [ *None* | scalar ]
If not *None*, either or both of these values will be
supplied to the :class:`matplotlib.colors.Normalize`
instance, overriding the default color scaling based on
*levels*.
*levels*: [level0, level1, ..., leveln]
A list of floating point numbers indicating the level
curves to draw, in increasing order; e.g., to draw just
the zero contour pass ``levels=[0]``
*origin*: [ *None* | 'upper' | 'lower' | 'image' ]
If *None*, the first value of *Z* will correspond to the
lower left corner, location (0,0). If 'image', the rc
value for ``image.origin`` will be used.
This keyword is not active if *X* and *Y* are specified in
the call to contour.
*extent*: [ *None* | (x0,x1,y0,y1) ]
If *origin* is not *None*, then *extent* is interpreted as
in :func:`matplotlib.pyplot.imshow`: it gives the outer
pixel boundaries. In this case, the position of Z[0,0]
is the center of the pixel, not a corner. If *origin* is
*None*, then (*x0*, *y0*) is the position of Z[0,0], and
(*x1*, *y1*) is the position of Z[-1,-1].
This keyword is not active if *X* and *Y* are specified in
the call to contour.
*locator*: [ *None* | ticker.Locator subclass ]
If *locator* is *None*, the default
:class:`~matplotlib.ticker.MaxNLocator` is used. The
locator is used to determine the contour levels if they
are not given explicitly via the *V* argument.
*extend*: [ 'neither' | 'both' | 'min' | 'max' ]
Unless this is 'neither', contour levels are automatically
added to one or both ends of the range so that all data
are included. These added ranges are then mapped to the
special colormap values which default to the ends of the
colormap range, but can be set via
:meth:`matplotlib.colors.Colormap.set_under` and
:meth:`matplotlib.colors.Colormap.set_over` methods.
*xunits*, *yunits*: [ *None* | registered units ]
Override axis units by specifying an instance of a
:class:`matplotlib.units.ConversionInterface`.
*antialiased*: bool
enable antialiasing, overriding the defaults. For
filled contours, the default is *True*. For line contours,
it is taken from rcParams['lines.antialiased'].
*nchunk*: [ 0 | integer ]
If 0, no subdivision of the domain. Specify a positive integer to
divide the domain into subdomains of *nchunk* by *nchunk* quads.
Chunking reduces the maximum length of polygons generated by the
contouring algorithm which reduces the rendering workload passed
on to the backend and also requires slightly less RAM. It can
however introduce rendering artifacts at chunk boundaries depending
on the backend, the *antialiased* flag and value of *alpha*.
contour-only keyword arguments:
*linewidths*: [ *None* | number | tuple of numbers ]
If *linewidths* is *None*, the default width in
``lines.linewidth`` in ``matplotlibrc`` is used.
If a number, all levels will be plotted with this linewidth.
If a tuple, different levels will be plotted with different
linewidths in the order specified.
*linestyles*: [ *None* | 'solid' | 'dashed' | 'dashdot' | 'dotted' ]
If *linestyles* is *None*, the default is 'solid' unless
the lines are monochrome. In that case, negative
contours will take their linestyle from the ``matplotlibrc``
``contour.negative_linestyle`` setting.
*linestyles* can also be an iterable of the above strings
specifying a set of linestyles to be used. If this
iterable is shorter than the number of contour levels
it will be repeated as necessary.
contourf-only keyword arguments:
*hatches*:
A list of cross hatch patterns to use on the filled areas.
If None, no hatching will be added to the contour.
Hatching is supported in the PostScript, PDF, SVG and Agg
backends only.
Note: contourf fills intervals that are closed at the top; that
is, for boundaries *z1* and *z2*, the filled region is::
z1 < z <= z2
There is one exception: if the lowest boundary coincides with
the minimum value of the *z* array, then that minimum value
will be included in the lowest interval.
"""
| 67,455 | 36.748181 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/__init__.py
|
"""
This is an object-oriented plotting library.
A procedural interface is provided by the companion pyplot module,
which may be imported directly, e.g.::
import matplotlib.pyplot as plt
or using ipython::
ipython
at your terminal, followed by::
In [1]: %matplotlib
In [2]: import matplotlib.pyplot as plt
at the ipython shell prompt.
For the most part, direct use of the object-oriented library is
encouraged when programming; pyplot is primarily for working
interactively. The
exceptions are the pyplot commands :func:`~matplotlib.pyplot.figure`,
:func:`~matplotlib.pyplot.subplot`,
:func:`~matplotlib.pyplot.subplots`, and
:func:`~pyplot.savefig`, which can greatly simplify scripting.
Modules include:
:mod:`matplotlib.axes`
defines the :class:`~matplotlib.axes.Axes` class. Most pylab
commands are wrappers for :class:`~matplotlib.axes.Axes`
methods. The axes module is the highest level of OO access to
the library.
:mod:`matplotlib.figure`
defines the :class:`~matplotlib.figure.Figure` class.
:mod:`matplotlib.artist`
defines the :class:`~matplotlib.artist.Artist` base class for
all classes that draw things.
:mod:`matplotlib.lines`
defines the :class:`~matplotlib.lines.Line2D` class for
drawing lines and markers
:mod:`matplotlib.patches`
defines classes for drawing polygons
:mod:`matplotlib.text`
defines the :class:`~matplotlib.text.Text`,
:class:`~matplotlib.text.TextWithDash`, and
:class:`~matplotlib.text.Annotate` classes
:mod:`matplotlib.image`
defines the :class:`~matplotlib.image.AxesImage` and
:class:`~matplotlib.image.FigureImage` classes
:mod:`matplotlib.collections`
classes for efficient drawing of groups of lines or polygons
:mod:`matplotlib.colors`
classes for interpreting color specifications and for making
colormaps
:mod:`matplotlib.cm`
colormaps and the :class:`~matplotlib.image.ScalarMappable`
mixin class for providing color mapping functionality to other
classes
:mod:`matplotlib.ticker`
classes for calculating tick mark locations and for formatting
tick labels
:mod:`matplotlib.backends`
a subpackage with modules for various gui libraries and output
formats
The base matplotlib namespace includes:
:data:`~matplotlib.rcParams`
a global dictionary of default configuration settings. It is
initialized by code which may be overridden by a matplotlibrc
file.
:func:`~matplotlib.rc`
a function for setting groups of rcParams values
:func:`~matplotlib.use`
a function for setting the matplotlib backend. If used, this
function must be called immediately after importing matplotlib
for the first time. In particular, it must be called
**before** importing pylab (if pylab is imported).
matplotlib was initially written by John D. Hunter (1968-2012) and is now
developed and maintained by a host of others.
Occasionally the internal documentation (python docstrings) will refer
to MATLAB®, a registered trademark of The MathWorks, Inc.
"""
from __future__ import absolute_import, division, print_function
import six
import atexit
from collections import MutableMapping
import contextlib
import distutils.version
import distutils.sysconfig
import functools
import io
import inspect
import itertools
import locale
import logging
import os
import re
import shutil
import stat
import sys
import tempfile
import warnings
# cbook must import matplotlib only within function
# definitions, so it is safe to import from it here.
from . import cbook
from matplotlib.cbook import (
_backports, mplDeprecation, dedent, get_label, sanitize_sequence)
from matplotlib.compat import subprocess
from matplotlib.rcsetup import defaultParams, validate_backend, cycler
import numpy
from six.moves.urllib.request import urlopen
from six.moves import reload_module as reload
# Get the version from the _version.py versioneer file. For a git checkout,
# this is computed based on the number of commits since the last tag.
from ._version import get_versions
__version__ = str(get_versions()['version'])
del get_versions
_log = logging.getLogger(__name__)
__version__numpy__ = str('1.7.1') # minimum required numpy version
__bibtex__ = r"""@Article{Hunter:2007,
Author = {Hunter, J. D.},
Title = {Matplotlib: A 2D graphics environment},
Journal = {Computing In Science \& Engineering},
Volume = {9},
Number = {3},
Pages = {90--95},
abstract = {Matplotlib is a 2D graphics package used for Python
for application development, interactive scripting, and
publication-quality image generation across user
interfaces and operating systems.},
publisher = {IEEE COMPUTER SOC},
year = 2007
}"""
_python27 = (sys.version_info.major == 2 and sys.version_info.minor >= 7)
_python34 = (sys.version_info.major == 3 and sys.version_info.minor >= 4)
if not (_python27 or _python34):
raise ImportError("Matplotlib requires Python 2.7 or 3.4 or later")
if _python27:
_log.addHandler(logging.NullHandler())
def compare_versions(a, b):
"return True if a is greater than or equal to b"
if a:
if six.PY3:
if isinstance(a, bytes):
a = a.decode('ascii')
if isinstance(b, bytes):
b = b.decode('ascii')
a = distutils.version.LooseVersion(a)
b = distutils.version.LooseVersion(b)
return a >= b
else:
return False
try:
import dateutil
except ImportError:
raise ImportError("Matplotlib requires dateutil")
if not compare_versions(six.__version__, '1.10'):
raise ImportError(
"Matplotlib requires six>=1.10; you have %s" % six.__version__)
try:
import pyparsing
except ImportError:
raise ImportError("Matplotlib requires pyparsing")
else:
if not compare_versions(pyparsing.__version__, '2.0.1'):
raise ImportError(
"Matplotlib requires pyparsing>=2.0.1; you have %s"
% pyparsing.__version__)
if not compare_versions(numpy.__version__, __version__numpy__):
raise ImportError(
"Matplotlib requires numpy>=%s; you have %s" % (
__version__numpy__, numpy.__version__))
if not hasattr(sys, 'argv'): # for modpython
sys.argv = [str('modpython')]
def _is_writable_dir(p):
"""
p is a string pointing to a putative writable dir -- return True p
is such a string, else False
"""
return os.access(p, os.W_OK) and os.path.isdir(p)
_verbose_msg = """\
matplotlib.verbose is deprecated;
Command line argument --verbose-LEVEL is deprecated.
This functionality is now provided by the standard
python logging library. To get more (or less) logging output:
import logging
logger = logging.getLogger('matplotlib')
logger.set_level(logging.INFO)"""
def _set_logger_verbose_level(level_str='silent', file_str='sys.stdout'):
"""
Use a --verbose-LEVEL level to set the logging level:
"""
levelmap = {'silent': logging.WARNING, 'helpful': logging.INFO,
'debug': logging.DEBUG, 'debug-annoying': logging.DEBUG,
'info': logging.INFO, 'warning': logging.WARNING}
# Check that current state of logger isn't already more verbose
# than the requested level. If it is more verbose, then leave more
# verbose.
newlev = levelmap[level_str]
oldlev = _log.getEffectiveLevel()
if newlev < oldlev:
_log.setLevel(newlev)
std = {
'sys.stdout': sys.stdout,
'sys.stderr': sys.stderr,
}
if file_str in std:
fileo = std[file_str]
else:
fileo = sys.stdout
try:
fileo = open(file_str, 'w')
# if this fails, we will just write to stdout
except IOError:
warnings.warn('could not open log file "{0}"'
'for writing. Check your '
'matplotlibrc'.format(file_str))
console = logging.StreamHandler(fileo)
console.setLevel(newlev)
_log.addHandler(console)
def _parse_commandline():
"""
Check for --verbose-LEVEL type command line arguments and
set logging level appropriately.
"""
levels = ('silent', 'helpful', 'debug', 'debug-annoying',
'info', 'warning')
for arg in sys.argv[1:]:
if arg.startswith('--verbose-'):
level_str = arg[10:]
# If it doesn't match one of ours, then don't even
# bother noting it, we are just a 3rd-party library
# to somebody else's script.
if level_str in levels:
_set_logger_verbose_level(level_str)
_parse_commandline()
class Verbose(object):
"""
A class to handle reporting. Set the fileo attribute to any file
instance to handle the output. Default is sys.stdout
"""
levels = ('silent', 'helpful', 'debug', 'debug-annoying')
vald = {level: i for i, level in enumerate(levels)}
# parse the verbosity from the command line; flags look like
# --verbose-silent or --verbose-helpful
_commandLineVerbose = None
for arg in sys.argv[1:]:
if not arg.startswith('--verbose-'):
continue
level_str = arg[10:]
# If it doesn't match one of ours, then don't even
# bother noting it, we are just a 3rd-party library
# to somebody else's script.
if level_str in levels:
_commandLineVerbose = level_str
@cbook.deprecated("2.2", message=_verbose_msg)
def __init__(self):
self.set_level('silent')
self.fileo = sys.stdout
@cbook.deprecated("2.2", message=_verbose_msg)
def set_level(self, level):
'set the verbosity to one of the Verbose.levels strings'
if self._commandLineVerbose is not None:
level = self._commandLineVerbose
if level not in self.levels:
warnings.warn('matplotlib: unrecognized --verbose-* string "%s".'
' Legal values are %s' % (level, self.levels))
else:
self.level = level
@cbook.deprecated("2.2", message=_verbose_msg)
def set_fileo(self, fname):
std = {
'sys.stdout': sys.stdout,
'sys.stderr': sys.stderr,
}
if fname in std:
self.fileo = std[fname]
else:
try:
fileo = open(fname, 'w')
except IOError:
raise ValueError('Verbose object could not open log file "{0}"'
' for writing.\nCheck your matplotlibrc '
'verbose.fileo setting'.format(fname))
else:
self.fileo = fileo
@cbook.deprecated("2.2", message=_verbose_msg)
def report(self, s, level='helpful'):
"""
print message s to self.fileo if self.level>=level. Return
value indicates whether a message was issued
"""
if self.ge(level):
print(s, file=self.fileo)
return True
return False
@cbook.deprecated("2.2", message=_verbose_msg)
def wrap(self, fmt, func, level='helpful', always=True):
"""
return a callable function that wraps func and reports it
output through the verbose handler if current verbosity level
is higher than level
if always is True, the report will occur on every function
call; otherwise only on the first time the function is called
"""
assert callable(func)
def wrapper(*args, **kwargs):
ret = func(*args, **kwargs)
if (always or not wrapper._spoke):
spoke = self.report(fmt % ret, level)
if not wrapper._spoke:
wrapper._spoke = spoke
return ret
wrapper._spoke = False
wrapper.__doc__ = func.__doc__
return wrapper
@cbook.deprecated("2.2", message=_verbose_msg)
def ge(self, level):
'return true if self.level is >= level'
return self.vald[self.level] >= self.vald[level]
with warnings.catch_warnings():
warnings.simplefilter("ignore")
verbose = Verbose()
def _wrap(fmt, func, level=logging.DEBUG, always=True):
"""
return a callable function that wraps func and reports its
output through logger
if always is True, the report will occur on every function
call; otherwise only on the first time the function is called
"""
assert callable(func)
def wrapper(*args, **kwargs):
ret = func(*args, **kwargs)
if (always or not wrapper._spoke):
_log.log(level, fmt % ret)
spoke = True
if not wrapper._spoke:
wrapper._spoke = spoke
return ret
wrapper._spoke = False
wrapper.__doc__ = func.__doc__
return wrapper
def checkdep_dvipng():
try:
s = subprocess.Popen([str('dvipng'), '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = s.communicate()
line = stdout.decode('ascii').split('\n')[1]
v = line.split()[-1]
return v
except (IndexError, ValueError, OSError):
return None
def checkdep_ghostscript():
if checkdep_ghostscript.executable is None:
if sys.platform == 'win32':
# mgs is the name in miktex
gs_execs = ['gswin32c', 'gswin64c', 'mgs', 'gs']
else:
gs_execs = ['gs']
for gs_exec in gs_execs:
try:
s = subprocess.Popen(
[str(gs_exec), '--version'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = s.communicate()
if s.returncode == 0:
v = stdout[:-1].decode('ascii')
checkdep_ghostscript.executable = gs_exec
checkdep_ghostscript.version = v
except (IndexError, ValueError, OSError):
pass
return checkdep_ghostscript.executable, checkdep_ghostscript.version
checkdep_ghostscript.executable = None
checkdep_ghostscript.version = None
# Deprecated, as it is unneeded and some distributions (e.g. MiKTeX 2.9.6350)
# do not actually report the TeX version.
@cbook.deprecated("2.1")
def checkdep_tex():
try:
s = subprocess.Popen([str('tex'), '-version'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = s.communicate()
line = stdout.decode('ascii').split('\n')[0]
pattern = r'3\.1\d+'
match = re.search(pattern, line)
v = match.group(0)
return v
except (IndexError, ValueError, AttributeError, OSError):
return None
def checkdep_pdftops():
try:
s = subprocess.Popen([str('pdftops'), '-v'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = s.communicate()
lines = stderr.decode('ascii').split('\n')
for line in lines:
if 'version' in line:
v = line.split()[-1]
return v
except (IndexError, ValueError, UnboundLocalError, OSError):
return None
def checkdep_inkscape():
if checkdep_inkscape.version is None:
try:
s = subprocess.Popen([str('inkscape'), '-V'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = s.communicate()
lines = stdout.decode('ascii').split('\n')
for line in lines:
if 'Inkscape' in line:
v = line.split()[1]
break
checkdep_inkscape.version = v
except (IndexError, ValueError, UnboundLocalError, OSError):
pass
return checkdep_inkscape.version
checkdep_inkscape.version = None
@cbook.deprecated("2.1")
def checkdep_xmllint():
try:
s = subprocess.Popen([str('xmllint'), '--version'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = s.communicate()
lines = stderr.decode('ascii').split('\n')
for line in lines:
if 'version' in line:
v = line.split()[-1]
break
return v
except (IndexError, ValueError, UnboundLocalError, OSError):
return None
def checkdep_ps_distiller(s):
if not s:
return False
flag = True
gs_req = '8.60'
gs_exec, gs_v = checkdep_ghostscript()
if not compare_versions(gs_v, gs_req):
flag = False
warnings.warn(('matplotlibrc ps.usedistiller option can not be used '
'unless ghostscript-%s or later is installed on your '
'system') % gs_req)
if s == 'xpdf':
pdftops_req = '3.0'
pdftops_req_alt = '0.9' # poppler version numbers, ugh
pdftops_v = checkdep_pdftops()
if compare_versions(pdftops_v, pdftops_req):
pass
elif (compare_versions(pdftops_v, pdftops_req_alt) and not
compare_versions(pdftops_v, '1.0')):
pass
else:
flag = False
warnings.warn(('matplotlibrc ps.usedistiller can not be set to '
'xpdf unless xpdf-%s or later is installed on '
'your system') % pdftops_req)
if flag:
return s
else:
return False
def checkdep_usetex(s):
if not s:
return False
gs_req = '8.60'
dvipng_req = '1.6'
flag = True
if _backports.which("tex") is None:
flag = False
warnings.warn('matplotlibrc text.usetex option can not be used unless '
'TeX is installed on your system')
dvipng_v = checkdep_dvipng()
if not compare_versions(dvipng_v, dvipng_req):
flag = False
warnings.warn('matplotlibrc text.usetex can not be used with *Agg '
'backend unless dvipng-%s or later is installed on '
'your system' % dvipng_req)
gs_exec, gs_v = checkdep_ghostscript()
if not compare_versions(gs_v, gs_req):
flag = False
warnings.warn('matplotlibrc text.usetex can not be used unless '
'ghostscript-%s or later is installed on your system'
% gs_req)
return flag
def _get_home():
"""Find user's home directory if possible.
Otherwise, returns None.
:see:
http://mail.python.org/pipermail/python-list/2005-February/325395.html
"""
if six.PY2 and sys.platform == 'win32':
path = os.path.expanduser(b"~").decode(sys.getfilesystemencoding())
else:
path = os.path.expanduser("~")
if os.path.isdir(path):
return path
for evar in ('HOME', 'USERPROFILE', 'TMP'):
path = os.environ.get(evar)
if path is not None and os.path.isdir(path):
return path
return None
def _create_tmp_config_dir():
"""
If the config directory can not be created, create a temporary
directory.
"""
configdir = os.environ['MPLCONFIGDIR'] = (
tempfile.mkdtemp(prefix='matplotlib-'))
atexit.register(shutil.rmtree, configdir)
return configdir
get_home = _wrap('$HOME=%s', _get_home, always=False)
def _get_xdg_config_dir():
"""
Returns the XDG configuration directory, according to the `XDG
base directory spec
<http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_.
"""
path = os.environ.get('XDG_CONFIG_HOME')
if path is None:
path = get_home()
if path is not None:
path = os.path.join(path, '.config')
return path
def _get_xdg_cache_dir():
"""
Returns the XDG cache directory, according to the `XDG
base directory spec
<http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_.
"""
path = os.environ.get('XDG_CACHE_HOME')
if path is None:
path = get_home()
if path is not None:
path = os.path.join(path, '.cache')
return path
def _get_config_or_cache_dir(xdg_base):
from matplotlib.cbook import mkdirs
configdir = os.environ.get('MPLCONFIGDIR')
if configdir is not None:
configdir = os.path.abspath(configdir)
if not os.path.exists(configdir):
mkdirs(configdir)
if not _is_writable_dir(configdir):
return _create_tmp_config_dir()
return configdir
p = None
h = get_home()
if h is not None:
p = os.path.join(h, '.matplotlib')
if sys.platform.startswith(('linux', 'freebsd')):
p = None
if xdg_base is not None:
p = os.path.join(xdg_base, 'matplotlib')
if p is not None:
if os.path.exists(p):
if _is_writable_dir(p):
return p
else:
try:
mkdirs(p)
except OSError:
pass
else:
return p
return _create_tmp_config_dir()
def _get_configdir():
"""
Return the string representing the configuration directory.
The directory is chosen as follows:
1. If the MPLCONFIGDIR environment variable is supplied, choose that.
2a. On Linux, follow the XDG specification and look first in
`$XDG_CONFIG_HOME`, if defined, or `$HOME/.config`.
2b. On other platforms, choose `$HOME/.matplotlib`.
3. If the chosen directory exists and is writable, use that as the
configuration directory.
4. If possible, create a temporary directory, and use it as the
configuration directory.
5. A writable directory could not be found or created; return None.
"""
return _get_config_or_cache_dir(_get_xdg_config_dir())
get_configdir = _wrap('CONFIGDIR=%s', _get_configdir, always=False)
def _get_cachedir():
"""
Return the location of the cache directory.
The procedure used to find the directory is the same as for
_get_config_dir, except using `$XDG_CACHE_HOME`/`~/.cache` instead.
"""
return _get_config_or_cache_dir(_get_xdg_cache_dir())
get_cachedir = _wrap('CACHEDIR=%s', _get_cachedir, always=False)
def _decode_filesystem_path(path):
if not isinstance(path, str):
return path.decode(sys.getfilesystemencoding())
else:
return path
def _get_data_path():
'get the path to matplotlib data'
if 'MATPLOTLIBDATA' in os.environ:
path = os.environ['MATPLOTLIBDATA']
if not os.path.isdir(path):
raise RuntimeError('Path in environment MATPLOTLIBDATA not a '
'directory')
return path
_file = _decode_filesystem_path(__file__)
path = os.sep.join([os.path.dirname(_file), 'mpl-data'])
if os.path.isdir(path):
return path
# setuptools' namespace_packages may highjack this init file
# so need to try something known to be in matplotlib, not basemap
import matplotlib.afm
_file = _decode_filesystem_path(matplotlib.afm.__file__)
path = os.sep.join([os.path.dirname(_file), 'mpl-data'])
if os.path.isdir(path):
return path
# py2exe zips pure python, so still need special check
if getattr(sys, 'frozen', None):
exe_path = os.path.dirname(_decode_filesystem_path(sys.executable))
path = os.path.join(exe_path, 'mpl-data')
if os.path.isdir(path):
return path
# Try again assuming we need to step up one more directory
path = os.path.join(os.path.split(exe_path)[0], 'mpl-data')
if os.path.isdir(path):
return path
# Try again assuming sys.path[0] is a dir not a exe
path = os.path.join(sys.path[0], 'mpl-data')
if os.path.isdir(path):
return path
raise RuntimeError('Could not find the matplotlib data files')
def _get_data_path_cached():
if defaultParams['datapath'][0] is None:
defaultParams['datapath'][0] = _get_data_path()
return defaultParams['datapath'][0]
get_data_path = _wrap('matplotlib data path %s', _get_data_path_cached,
always=False)
def get_py2exe_datafiles():
datapath = get_data_path()
_, tail = os.path.split(datapath)
d = {}
for root, _, files in os.walk(datapath):
# Need to explicitly remove cocoa_agg files or py2exe complains
# NOTE I don't know why, but do as previous version
if 'Matplotlib.nib' in files:
files.remove('Matplotlib.nib')
files = [os.path.join(root, filename) for filename in files]
root = root.replace(tail, 'mpl-data')
root = root[root.index('mpl-data'):]
d[root] = files
return list(d.items())
def matplotlib_fname():
"""
Get the location of the config file.
The file location is determined in the following order
- `$PWD/matplotlibrc`
- `$MATPLOTLIBRC` if it is a file (or a named pipe, which can be created
e.g. by process substitution)
- `$MATPLOTLIBRC/matplotlibrc`
- `$MPLCONFIGDIR/matplotlibrc`
- On Linux,
- `$XDG_CONFIG_HOME/matplotlib/matplotlibrc` (if
$XDG_CONFIG_HOME is defined)
- or `$HOME/.config/matplotlib/matplotlibrc` (if
$XDG_CONFIG_HOME is not defined)
- On other platforms,
- `$HOME/.matplotlib/matplotlibrc` if `$HOME` is defined.
- Lastly, it looks in `$MATPLOTLIBDATA/matplotlibrc` for a
system-defined copy.
"""
def gen_candidates():
yield os.path.join(six.moves.getcwd(), 'matplotlibrc')
try:
matplotlibrc = os.environ['MATPLOTLIBRC']
except KeyError:
pass
else:
yield matplotlibrc
yield os.path.join(matplotlibrc, 'matplotlibrc')
yield os.path.join(_get_configdir(), 'matplotlibrc')
yield os.path.join(get_data_path(), 'matplotlibrc')
for fname in gen_candidates():
if os.path.exists(fname):
st_mode = os.stat(fname).st_mode
if stat.S_ISREG(st_mode) or stat.S_ISFIFO(st_mode):
break
# Return first candidate that is a file, or last candidate if none is
# valid (in that case, a warning is raised at startup by `rc_params`).
return fname
# names of keys to deprecate
# the values are a tuple of (new_name, f_old_2_new, f_new_2_old)
# the inverse function may be `None`
_deprecated_map = {}
_deprecated_ignore_map = {'nbagg.transparent': 'figure.facecolor'}
_obsolete_set = {'plugins.directory', 'text.dvipnghack'}
# The following may use a value of None to suppress the warning.
# do NOT include in _all_deprecated
_deprecated_set = {'axes.hold',
'backend.qt4',
'backend.qt5'}
_all_deprecated = set(itertools.chain(
_deprecated_ignore_map, _deprecated_map, _obsolete_set))
class RcParams(MutableMapping, dict):
"""
A dictionary object including validation
validating functions are defined and associated with rc parameters in
:mod:`matplotlib.rcsetup`
"""
validate = dict((key, converter) for key, (default, converter) in
six.iteritems(defaultParams)
if key not in _all_deprecated)
msg_depr = "%s is deprecated and replaced with %s; please use the latter."
msg_depr_set = ("%s is deprecated. Please remove it from your "
"matplotlibrc and/or style files.")
msg_depr_ignore = "%s is deprecated and ignored. Use %s instead."
msg_obsolete = ("%s is obsolete. Please remove it from your matplotlibrc "
"and/or style files.")
msg_backend_obsolete = ("The {} rcParam was deprecated in version 2.2. In"
" order to force the use of a specific Qt binding,"
" either import that binding first, or set the "
"QT_API environment variable.")
# validate values on the way in
def __init__(self, *args, **kwargs):
self.update(*args, **kwargs)
def __setitem__(self, key, val):
try:
if key in _deprecated_map:
alt_key, alt_val, inverse_alt = _deprecated_map[key]
warnings.warn(self.msg_depr % (key, alt_key),
mplDeprecation)
key = alt_key
val = alt_val(val)
elif key in _deprecated_set and val is not None:
if key.startswith('backend'):
warnings.warn(self.msg_backend_obsolete.format(key),
mplDeprecation)
else:
warnings.warn(self.msg_depr_set % key,
mplDeprecation)
elif key in _deprecated_ignore_map:
alt = _deprecated_ignore_map[key]
warnings.warn(self.msg_depr_ignore % (key, alt),
mplDeprecation)
return
elif key in _obsolete_set:
warnings.warn(self.msg_obsolete % (key, ),
mplDeprecation)
return
try:
cval = self.validate[key](val)
except ValueError as ve:
raise ValueError("Key %s: %s" % (key, str(ve)))
dict.__setitem__(self, key, cval)
except KeyError:
raise KeyError(
'%s is not a valid rc parameter. See rcParams.keys() for a '
'list of valid parameters.' % (key,))
def __getitem__(self, key):
inverse_alt = None
if key in _deprecated_map:
alt_key, alt_val, inverse_alt = _deprecated_map[key]
warnings.warn(self.msg_depr % (key, alt_key),
mplDeprecation)
key = alt_key
elif key in _deprecated_ignore_map:
alt = _deprecated_ignore_map[key]
warnings.warn(self.msg_depr_ignore % (key, alt),
mplDeprecation)
key = alt
elif key in _obsolete_set:
warnings.warn(self.msg_obsolete % (key, ),
mplDeprecation)
return None
val = dict.__getitem__(self, key)
if inverse_alt is not None:
return inverse_alt(val)
else:
return val
def __repr__(self):
import pprint
class_name = self.__class__.__name__
indent = len(class_name) + 1
repr_split = pprint.pformat(dict(self), indent=1,
width=80 - indent).split('\n')
repr_indented = ('\n' + ' ' * indent).join(repr_split)
return '{0}({1})'.format(class_name, repr_indented)
def __str__(self):
return '\n'.join('{0}: {1}'.format(k, v)
for k, v in sorted(self.items()))
def __iter__(self):
"""
Yield sorted list of keys.
"""
for k in sorted(dict.__iter__(self)):
yield k
def find_all(self, pattern):
"""
Return the subset of this RcParams dictionary whose keys match,
using :func:`re.search`, the given ``pattern``.
.. note::
Changes to the returned dictionary are *not* propagated to
the parent RcParams dictionary.
"""
pattern_re = re.compile(pattern)
return RcParams((key, value)
for key, value in self.items()
if pattern_re.search(key))
def rc_params(fail_on_error=False):
"""Return a :class:`matplotlib.RcParams` instance from the
default matplotlib rc file.
"""
fname = matplotlib_fname()
if not os.path.exists(fname):
# this should never happen, default in mpl-data should always be found
message = 'could not find rc file; returning defaults'
ret = RcParams([(key, default) for key, (default, _) in
six.iteritems(defaultParams)
if key not in _all_deprecated])
warnings.warn(message)
return ret
return rc_params_from_file(fname, fail_on_error)
URL_REGEX = re.compile(r'http://|https://|ftp://|file://|file:\\')
def is_url(filename):
"""Return True if string is an http, ftp, or file URL path."""
return URL_REGEX.match(filename) is not None
def _url_lines(f):
# Compatibility for urlopen in python 3, which yields bytes.
for line in f:
yield line.decode('utf8')
@contextlib.contextmanager
def _open_file_or_url(fname):
if is_url(fname):
f = urlopen(fname)
yield _url_lines(f)
f.close()
else:
fname = os.path.expanduser(fname)
encoding = locale.getpreferredencoding(do_setlocale=False)
if encoding is None:
encoding = "utf-8"
with io.open(fname, encoding=encoding) as f:
yield f
_error_details_fmt = 'line #%d\n\t"%s"\n\tin file "%s"'
def _rc_params_in_file(fname, fail_on_error=False):
"""Return :class:`matplotlib.RcParams` from the contents of the given file.
Unlike `rc_params_from_file`, the configuration class only contains the
parameters specified in the file (i.e. default values are not filled in).
"""
cnt = 0
rc_temp = {}
with _open_file_or_url(fname) as fd:
try:
for line in fd:
cnt += 1
strippedline = line.split('#', 1)[0].strip()
if not strippedline:
continue
tup = strippedline.split(':', 1)
if len(tup) != 2:
error_details = _error_details_fmt % (cnt, line, fname)
warnings.warn('Illegal %s' % error_details)
continue
key, val = tup
key = key.strip()
val = val.strip()
if key in rc_temp:
warnings.warn('Duplicate key in file "%s", line #%d' %
(fname, cnt))
rc_temp[key] = (val, line, cnt)
except UnicodeDecodeError:
warnings.warn(
('Cannot decode configuration file %s with '
'encoding %s, check LANG and LC_* variables')
% (fname, locale.getpreferredencoding(do_setlocale=False) or
'utf-8 (default)'))
raise
config = RcParams()
for key in ('verbose.level', 'verbose.fileo'):
if key in rc_temp:
val, line, cnt = rc_temp.pop(key)
if fail_on_error:
config[key] = val # try to convert to proper type or raise
else:
try:
config[key] = val # try to convert to proper type or skip
except Exception as msg:
error_details = _error_details_fmt % (cnt, line, fname)
warnings.warn('Bad val "%s" on %s\n\t%s' %
(val, error_details, msg))
for key, (val, line, cnt) in six.iteritems(rc_temp):
if key in defaultParams:
if fail_on_error:
config[key] = val # try to convert to proper type or raise
else:
try:
config[key] = val # try to convert to proper type or skip
except Exception as msg:
error_details = _error_details_fmt % (cnt, line, fname)
warnings.warn('Bad val "%s" on %s\n\t%s' %
(val, error_details, msg))
elif key in _deprecated_ignore_map:
warnings.warn('%s is deprecated. Update your matplotlibrc to use '
'%s instead.' % (key, _deprecated_ignore_map[key]),
mplDeprecation)
else:
print("""
Bad key "%s" on line %d in
%s.
You probably need to get an updated matplotlibrc file from
http://github.com/matplotlib/matplotlib/blob/master/matplotlibrc.template
or from the matplotlib source distribution""" % (key, cnt, fname),
file=sys.stderr)
return config
def rc_params_from_file(fname, fail_on_error=False, use_default_template=True):
"""Return :class:`matplotlib.RcParams` from the contents of the given file.
Parameters
----------
fname : str
Name of file parsed for matplotlib settings.
fail_on_error : bool
If True, raise an error when the parser fails to convert a parameter.
use_default_template : bool
If True, initialize with default parameters before updating with those
in the given file. If False, the configuration class only contains the
parameters specified in the file. (Useful for updating dicts.)
"""
config_from_file = _rc_params_in_file(fname, fail_on_error)
if not use_default_template:
return config_from_file
iter_params = six.iteritems(defaultParams)
with warnings.catch_warnings():
warnings.simplefilter("ignore", mplDeprecation)
config = RcParams([(key, default) for key, (default, _) in iter_params
if key not in _all_deprecated])
config.update(config_from_file)
if config['datapath'] is None:
config['datapath'] = get_data_path()
if "".join(config['text.latex.preamble']):
_log.info("""
*****************************************************************
You have the following UNSUPPORTED LaTeX preamble customizations:
%s
Please do not ask for support with these customizations active.
*****************************************************************
""", '\n'.join(config['text.latex.preamble']))
_log.debug('loaded rc file %s', fname)
return config
# this is the instance used by the matplotlib classes
rcParams = rc_params()
if rcParams['examples.directory']:
# paths that are intended to be relative to matplotlib_fname()
# are allowed for the examples.directory parameter.
# However, we will need to fully qualify the path because
# Sphinx requires absolute paths.
if not os.path.isabs(rcParams['examples.directory']):
_basedir, _fname = os.path.split(matplotlib_fname())
# Sometimes matplotlib_fname() can return relative paths,
# Also, using realpath() guarantees that Sphinx will use
# the same path that matplotlib sees (in case of weird symlinks).
_basedir = os.path.realpath(_basedir)
_fullpath = os.path.join(_basedir, rcParams['examples.directory'])
rcParams['examples.directory'] = _fullpath
rcParamsOrig = rcParams.copy()
with warnings.catch_warnings():
warnings.simplefilter("ignore", mplDeprecation)
rcParamsDefault = RcParams([(key, default) for key, (default, converter) in
six.iteritems(defaultParams)
if key not in _all_deprecated])
rcParams['ps.usedistiller'] = checkdep_ps_distiller(
rcParams['ps.usedistiller'])
rcParams['text.usetex'] = checkdep_usetex(rcParams['text.usetex'])
if rcParams['axes.formatter.use_locale']:
locale.setlocale(locale.LC_ALL, '')
def rc(group, **kwargs):
"""
Set the current rc params. Group is the grouping for the rc, e.g.,
for ``lines.linewidth`` the group is ``lines``, for
``axes.facecolor``, the group is ``axes``, and so on. Group may
also be a list or tuple of group names, e.g., (*xtick*, *ytick*).
*kwargs* is a dictionary attribute name/value pairs, e.g.,::
rc('lines', linewidth=2, color='r')
sets the current rc params and is equivalent to::
rcParams['lines.linewidth'] = 2
rcParams['lines.color'] = 'r'
The following aliases are available to save typing for interactive
users:
===== =================
Alias Property
===== =================
'lw' 'linewidth'
'ls' 'linestyle'
'c' 'color'
'fc' 'facecolor'
'ec' 'edgecolor'
'mew' 'markeredgewidth'
'aa' 'antialiased'
===== =================
Thus you could abbreviate the above rc command as::
rc('lines', lw=2, c='r')
Note you can use python's kwargs dictionary facility to store
dictionaries of default parameters. e.g., you can customize the
font rc as follows::
font = {'family' : 'monospace',
'weight' : 'bold',
'size' : 'larger'}
rc('font', **font) # pass in the font dict as kwargs
This enables you to easily switch between several configurations. Use
``matplotlib.style.use('default')`` or :func:`~matplotlib.rcdefaults` to
restore the default rc params after changes.
"""
aliases = {
'lw': 'linewidth',
'ls': 'linestyle',
'c': 'color',
'fc': 'facecolor',
'ec': 'edgecolor',
'mew': 'markeredgewidth',
'aa': 'antialiased',
}
if isinstance(group, six.string_types):
group = (group,)
for g in group:
for k, v in six.iteritems(kwargs):
name = aliases.get(k) or k
key = '%s.%s' % (g, name)
try:
rcParams[key] = v
except KeyError:
raise KeyError(('Unrecognized key "%s" for group "%s" and '
'name "%s"') % (key, g, name))
def rcdefaults():
"""Restore the rc params from Matplotlib's internal defaults.
See Also
--------
rc_file_defaults :
Restore the rc params from the rc file originally loaded by Matplotlib.
matplotlib.style.use :
Use a specific style file. Call ``style.use('default')`` to restore
the default style.
"""
rcParams.clear()
rcParams.update(rcParamsDefault)
def rc_file_defaults():
"""Restore the rc params from the original rc file loaded by Matplotlib.
"""
rcParams.update(rcParamsOrig)
def rc_file(fname):
"""
Update rc params from file.
"""
rcParams.update(rc_params_from_file(fname))
@contextlib.contextmanager
def rc_context(rc=None, fname=None):
"""
Return a context manager for managing rc settings.
This allows one to do::
with mpl.rc_context(fname='screen.rc'):
plt.plot(x, a)
with mpl.rc_context(fname='print.rc'):
plt.plot(x, b)
plt.plot(x, c)
The 'a' vs 'x' and 'c' vs 'x' plots would have settings from
'screen.rc', while the 'b' vs 'x' plot would have settings from
'print.rc'.
A dictionary can also be passed to the context manager::
with mpl.rc_context(rc={'text.usetex': True}, fname='screen.rc'):
plt.plot(x, a)
The 'rc' dictionary takes precedence over the settings loaded from
'fname'. Passing a dictionary only is also valid. For example a
common usage is::
with mpl.rc_context(rc={'interactive': False}):
fig, ax = plt.subplots()
ax.plot(range(3), range(3))
fig.savefig('A.png', format='png')
plt.close(fig)
"""
orig = rcParams.copy()
try:
if fname:
rc_file(fname)
if rc:
rcParams.update(rc)
yield
finally:
# No need to revalidate the original values.
dict.update(rcParams, orig)
_use_error_msg = """
This call to matplotlib.use() has no effect because the backend has already
been chosen; matplotlib.use() must be called *before* pylab, matplotlib.pyplot,
or matplotlib.backends is imported for the first time.
The backend was *originally* set to {backend!r} by the following code:
{tb}
"""
def use(arg, warn=True, force=False):
"""
Set the matplotlib backend to one of the known backends.
The argument is case-insensitive. *warn* specifies whether a
warning should be issued if a backend has already been set up.
*force* is an **experimental** flag that tells matplotlib to
attempt to initialize a new backend by reloading the backend
module.
.. note::
This function must be called *before* importing pyplot for
the first time; or, if you are not using pyplot, it must be called
before importing matplotlib.backends. If warn is True, a warning
is issued if you try and call this after pylab or pyplot have been
loaded. In certain black magic use cases, e.g.
:func:`pyplot.switch_backend`, we are doing the reloading necessary to
make the backend switch work (in some cases, e.g., pure image
backends) so one can set warn=False to suppress the warnings.
To find out which backend is currently set, see
:func:`matplotlib.get_backend`.
"""
# Lets determine the proper backend name first
if arg.startswith('module://'):
name = arg
else:
# Lowercase only non-module backend names (modules are case-sensitive)
arg = arg.lower()
name = validate_backend(arg)
# Check if we've already set up a backend
if 'matplotlib.backends' in sys.modules:
# Warn only if called with a different name
if (rcParams['backend'] != name) and warn:
import matplotlib.backends
warnings.warn(
_use_error_msg.format(
backend=rcParams['backend'],
tb=matplotlib.backends._backend_loading_tb),
stacklevel=2)
# Unless we've been told to force it, just return
if not force:
return
need_reload = True
else:
need_reload = False
# Store the backend name
rcParams['backend'] = name
# If needed we reload here because a lot of setup code is triggered on
# module import. See backends/__init__.py for more detail.
if need_reload:
reload(sys.modules['matplotlib.backends'])
try:
use(os.environ['MPLBACKEND'])
except KeyError:
pass
def get_backend():
"""Return the name of the current backend."""
return rcParams['backend']
def interactive(b):
"""
Set interactive mode to boolean b.
If b is True, then draw after every plotting command, e.g., after xlabel
"""
rcParams['interactive'] = b
def is_interactive():
'Return true if plot mode is interactive'
return rcParams['interactive']
def tk_window_focus():
"""Return true if focus maintenance under TkAgg on win32 is on.
This currently works only for python.exe and IPython.exe.
Both IDLE and Pythonwin.exe fail badly when tk_window_focus is on."""
if rcParams['backend'] != 'TkAgg':
return False
return rcParams['tk.window_focus']
default_test_modules = [
'matplotlib.tests',
'matplotlib.sphinxext.tests',
'mpl_toolkits.tests',
]
def _init_tests():
try:
import faulthandler
except ImportError:
pass
else:
# CPython's faulthandler since v3.6 handles exceptions on Windows
# https://bugs.python.org/issue23848 but until v3.6.4 it was
# printing non-fatal exceptions https://bugs.python.org/issue30557
import platform
if not (sys.platform == 'win32' and
(3, 6) < sys.version_info < (3, 6, 4) and
platform.python_implementation() == 'CPython'):
faulthandler.enable()
# The version of FreeType to install locally for running the
# tests. This must match the value in `setupext.py`
LOCAL_FREETYPE_VERSION = '2.6.1'
from matplotlib import ft2font
if (ft2font.__freetype_version__ != LOCAL_FREETYPE_VERSION or
ft2font.__freetype_build_type__ != 'local'):
warnings.warn(
"Matplotlib is not built with the correct FreeType version to run "
"tests. Set local_freetype=True in setup.cfg and rebuild. "
"Expect many image comparison failures below. "
"Expected freetype version {0}. "
"Found freetype version {1}. "
"Freetype build type is {2}local".format(
LOCAL_FREETYPE_VERSION,
ft2font.__freetype_version__,
"" if ft2font.__freetype_build_type__ == 'local' else "not "
)
)
try:
import pytest
try:
from unittest import mock
except ImportError:
import mock
except ImportError:
print("matplotlib.test requires pytest and mock to run.")
raise
def test(verbosity=None, coverage=False, switch_backend_warn=True,
recursionlimit=0, **kwargs):
"""run the matplotlib test suite"""
_init_tests()
if not os.path.isdir(os.path.join(os.path.dirname(__file__), 'tests')):
raise ImportError("Matplotlib test data is not installed")
old_backend = get_backend()
old_recursionlimit = sys.getrecursionlimit()
try:
use('agg')
if recursionlimit:
sys.setrecursionlimit(recursionlimit)
import pytest
args = kwargs.pop('argv', [])
provide_default_modules = True
use_pyargs = True
for arg in args:
if any(arg.startswith(module_path)
for module_path in default_test_modules):
provide_default_modules = False
break
if os.path.exists(arg):
provide_default_modules = False
use_pyargs = False
break
if use_pyargs:
args += ['--pyargs']
if provide_default_modules:
args += default_test_modules
if coverage:
args += ['--cov']
if verbosity:
args += ['-' + 'v' * verbosity]
retcode = pytest.main(args, **kwargs)
finally:
if old_backend.lower() != 'agg':
use(old_backend, warn=switch_backend_warn)
if recursionlimit:
sys.setrecursionlimit(old_recursionlimit)
return retcode
test.__test__ = False # pytest: this function is not a test
def _replacer(data, key):
"""Either returns data[key] or passes data back. Also
converts input data to a sequence as needed.
"""
# if key isn't a string don't bother
if not isinstance(key, six.string_types):
return (key)
# try to use __getitem__
try:
return sanitize_sequence(data[key])
# key does not exist, silently fall back to key
except KeyError:
return key
_DATA_DOC_APPENDIX = """
.. note::
In addition to the above described arguments, this function can take a
**data** keyword argument. If such a **data** argument is given, the
following arguments are replaced by **data[<arg>]**:
{replaced}
"""
def _add_data_doc(docstring, replace_names, replace_all_args):
"""Add documentation for a *data* field to the given docstring.
Parameters
----------
docstring : str
The input docstring.
replace_names : list of strings or None
The list of parameter names which arguments should be replaced by
`data[name]`. If None, all arguments are replaced if they are
included in `data`.
replace_all_args : bool
If True, all arguments in *args get replaced, even if they are not
in replace_names.
Returns
-------
The augmented docstring.
"""
if docstring is None:
docstring = ''
else:
docstring = dedent(docstring)
_repl = ""
if replace_names is None:
_repl = "* All positional and all keyword arguments."
else:
if len(replace_names) != 0:
_repl = "* All arguments with the following names: '{names}'."
if replace_all_args:
_repl += "\n * All positional arguments."
_repl = _repl.format(names="', '".join(sorted(replace_names)))
return docstring + _DATA_DOC_APPENDIX.format(replaced=_repl)
def _preprocess_data(replace_names=None, replace_all_args=False,
label_namer=None, positional_parameter_names=None):
"""
A decorator to add a 'data' kwarg to any a function. The signature
of the input function must include the ax argument at the first position ::
def foo(ax, *args, **kwargs)
so this is suitable for use with Axes methods.
Parameters
----------
replace_names : list of strings, optional, default: None
The list of parameter names which arguments should be replaced by
`data[name]`. If None, all arguments are replaced if they are
included in `data`.
replace_all_args : bool, default: False
If True, all arguments in *args get replaced, even if they are not
in replace_names.
label_namer : string, optional, default: None
The name of the parameter which argument should be used as label, if
label is not set. If None, the label keyword argument is not set.
positional_parameter_names : list of strings or callable, optional
The full list of positional parameter names (excluding an explicit
`ax`/'self' argument at the first place and including all possible
positional parameter in `*args`), in the right order. Can also include
all other keyword parameter. Only needed if the wrapped function does
contain `*args` and (replace_names is not None or replace_all_args is
False). If it is a callable, it will be called with the actual
tuple of *args and the data and should return a list like
above.
NOTE: callables should only be used when the names and order of *args
can only be determined at runtime. Please use list of names
when the order and names of *args is clear before runtime!
.. note:: decorator also converts MappingView input data to list.
"""
if replace_names is not None:
replace_names = set(replace_names)
def param(func):
new_sig = None
# signature is since 3.3 and wrapped since 3.2, but we support 3.4+.
python_has_signature = python_has_wrapped = six.PY3
# if in a legacy version of python and IPython is already imported
# try to use their back-ported signature
if not python_has_signature and 'IPython' in sys.modules:
try:
import IPython.utils.signatures
signature = IPython.utils.signatures.signature
Parameter = IPython.utils.signatures.Parameter
except ImportError:
pass
else:
python_has_signature = True
else:
if python_has_signature:
signature = inspect.signature
Parameter = inspect.Parameter
if not python_has_signature:
arg_spec = inspect.getargspec(func)
_arg_names = arg_spec.args
_has_varargs = arg_spec.varargs is not None
_has_varkwargs = arg_spec.keywords is not None
else:
sig = signature(func)
_has_varargs = False
_has_varkwargs = False
_arg_names = []
params = list(sig.parameters.values())
for p in params:
if p.kind is Parameter.VAR_POSITIONAL:
_has_varargs = True
elif p.kind is Parameter.VAR_KEYWORD:
_has_varkwargs = True
else:
_arg_names.append(p.name)
data_param = Parameter('data',
Parameter.KEYWORD_ONLY,
default=None)
if _has_varkwargs:
params.insert(-1, data_param)
else:
params.append(data_param)
new_sig = sig.replace(parameters=params)
# Import-time check: do we have enough information to replace *args?
arg_names_at_runtime = False
# there can't be any positional arguments behind *args and no
# positional args can end up in **kwargs, so only *varargs make
# problems.
# http://stupidpythonideas.blogspot.de/2013/08/arguments-and-parameters.html
if not _has_varargs:
# all args are "named", so no problem
# remove the first "ax" / self arg
arg_names = _arg_names[1:]
else:
# Here we have "unnamed" variables and we need a way to determine
# whether to replace a arg or not
if replace_names is None:
# all argnames should be replaced
arg_names = None
elif len(replace_names) == 0:
# No argnames should be replaced
arg_names = []
elif len(_arg_names) > 1 and (positional_parameter_names is None):
# we got no manual parameter names but more than an 'ax' ...
if len(replace_names - set(_arg_names[1:])) == 0:
# all to be replaced arguments are in the list
arg_names = _arg_names[1:]
else:
raise AssertionError(
"Got unknown 'replace_names' and wrapped function "
"{!r} uses '*args', need 'positional_parameter_names'"
.format(func.__name__))
else:
if positional_parameter_names is not None:
if callable(positional_parameter_names):
# determined by the function at runtime
arg_names_at_runtime = True
# so that we don't compute the label_pos at import time
arg_names = []
else:
arg_names = positional_parameter_names
else:
if replace_all_args:
arg_names = []
else:
raise AssertionError(
"Got 'replace_names' and wrapped function {!r} "
"uses *args, need 'positional_parameter_names' or "
"'replace_all_args'".format(func.__name__))
# compute the possible label_namer and label position in positional
# arguments
label_pos = 9999 # bigger than all "possible" argument lists
label_namer_pos = 9999 # bigger than all "possible" argument lists
if (label_namer and # we actually want a label here ...
arg_names and # and we can determine a label in *args ...
(label_namer in arg_names)): # and it is in *args
label_namer_pos = arg_names.index(label_namer)
if "label" in arg_names:
label_pos = arg_names.index("label")
# Check the case we know a label_namer but we can't find it the
# arg_names... Unfortunately the label_namer can be in **kwargs,
# which we can't detect here and which results in a non-set label
# which might surprise the user :-(
if label_namer and not arg_names_at_runtime and not _has_varkwargs:
if not arg_names:
raise AssertionError(
"label_namer {!r} can't be found as the parameter without "
"'positional_parameter_names'".format(label_namer))
elif label_namer not in arg_names:
raise AssertionError(
"label_namer {!r} can't be found in the parameter names "
"(known argnames: %s).".format(label_namer, arg_names))
else:
# this is the case when the name is in arg_names
pass
@functools.wraps(func)
def inner(ax, *args, **kwargs):
# this is needed because we want to change these values if
# arg_names_at_runtime==True, but python does not allow assigning
# to a variable in a outer scope. So use some new local ones and
# set them to the already computed values.
_label_pos = label_pos
_label_namer_pos = label_namer_pos
_arg_names = arg_names
label = None
data = kwargs.pop('data', None)
if data is None: # data validation
args = tuple(sanitize_sequence(a) for a in args)
else:
if arg_names_at_runtime:
# update the information about replace names and
# label position
_arg_names = positional_parameter_names(args, data)
if (label_namer and # we actually want a label here ...
_arg_names and # and we can find a label in *args
(label_namer in _arg_names)): # and it is in *args
_label_namer_pos = _arg_names.index(label_namer)
if "label" in _arg_names:
_label_pos = arg_names.index("label")
# save the current label_namer value so that it can be used as
# a label
if _label_namer_pos < len(args):
label = args[_label_namer_pos]
else:
label = kwargs.get(label_namer, None)
# ensure a string, as label can't be anything else
if not isinstance(label, six.string_types):
label = None
if (replace_names is None) or (replace_all_args is True):
# all should be replaced
args = tuple(_replacer(data, a) for
j, a in enumerate(args))
else:
# An arg is replaced if the arg_name of that position is
# in replace_names ...
if len(_arg_names) < len(args):
raise RuntimeError(
"Got more args than function expects")
args = tuple(_replacer(data, a)
if _arg_names[j] in replace_names else a
for j, a in enumerate(args))
if replace_names is None:
# replace all kwargs ...
kwargs = dict((k, _replacer(data, v))
for k, v in six.iteritems(kwargs))
else:
# ... or only if a kwarg of that name is in replace_names
kwargs = dict((k, _replacer(data, v)
if k in replace_names else v)
for k, v in six.iteritems(kwargs))
# replace the label if this func "wants" a label arg and the user
# didn't set one. Note: if the user puts in "label=None", it does
# *NOT* get replaced!
user_supplied_label = (
(len(args) >= _label_pos) or # label is included in args
('label' in kwargs) # ... or in kwargs
)
if (label_namer and not user_supplied_label):
if _label_namer_pos < len(args):
kwargs['label'] = get_label(args[_label_namer_pos], label)
elif label_namer in kwargs:
kwargs['label'] = get_label(kwargs[label_namer], label)
else:
warnings.warn(
"Tried to set a label via parameter %r in func %r but "
"couldn't find such an argument.\n"
"(This is a programming error, please report to "
"the Matplotlib list!)" % (label_namer, func.__name__),
RuntimeWarning, stacklevel=2)
return func(ax, *args, **kwargs)
inner.__doc__ = _add_data_doc(inner.__doc__,
replace_names, replace_all_args)
if not python_has_wrapped:
inner.__wrapped__ = func
if new_sig is not None:
inner.__signature__ = new_sig
return inner
return param
_log.debug('matplotlib version %s', __version__)
_log.debug('interactive is %s', is_interactive())
_log.debug('platform is %s', sys.platform)
_log.debug('loaded modules: %s', list(sys.modules))
| 64,530 | 33.490112 | 84 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/collections.py
|
"""
Classes for the efficient drawing of large collections of objects that
share most properties, e.g., a large number of line segments or
polygons.
The classes are not meant to be as flexible as their single element
counterparts (e.g., you may not be able to select all line styles) but
they are meant to be fast for common use cases (e.g., a large set of solid
line segemnts)
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import warnings
import six
from six.moves import zip
try:
from math import gcd
except ImportError:
# LPy workaround
from fractions import gcd
import numpy as np
import matplotlib as mpl
from . import (_path, artist, cbook, cm, colors as mcolors, docstring,
lines as mlines, path as mpath, transforms)
CIRCLE_AREA_FACTOR = 1.0 / np.sqrt(np.pi)
_color_aliases = {'facecolors': ['facecolor'],
'edgecolors': ['edgecolor']}
class Collection(artist.Artist, cm.ScalarMappable):
"""
Base class for Collections. Must be subclassed to be usable.
All properties in a collection must be sequences or scalars;
if scalars, they will be converted to sequences. The
property of the ith element of the collection is::
prop[i % len(props)]
Exceptions are *capstyle* and *joinstyle* properties, these can
only be set globally for the whole collection.
Keyword arguments and default values:
* *edgecolors*: None
* *facecolors*: None
* *linewidths*: None
* *capstyle*: None
* *joinstyle*: None
* *antialiaseds*: None
* *offsets*: None
* *transOffset*: transforms.IdentityTransform()
* *offset_position*: 'screen' (default) or 'data'
* *norm*: None (optional for
:class:`matplotlib.cm.ScalarMappable`)
* *cmap*: None (optional for
:class:`matplotlib.cm.ScalarMappable`)
* *hatch*: None
* *zorder*: 1
*offsets* and *transOffset* are used to translate the patch after
rendering (default no offsets). If offset_position is 'screen'
(default) the offset is applied after the master transform has
been applied, that is, the offsets are in screen coordinates. If
offset_position is 'data', the offset is applied before the master
transform, i.e., the offsets are in data coordinates.
If any of *edgecolors*, *facecolors*, *linewidths*, *antialiaseds*
are None, they default to their :data:`matplotlib.rcParams` patch
setting, in sequence form.
The use of :class:`~matplotlib.cm.ScalarMappable` is optional. If
the :class:`~matplotlib.cm.ScalarMappable` matrix _A is not None
(i.e., a call to set_array has been made), at draw time a call to
scalar mappable will be made to set the face colors.
"""
_offsets = np.zeros((0, 2))
_transOffset = transforms.IdentityTransform()
#: Either a list of 3x3 arrays or an Nx3x3 array of transforms, suitable
#: for the `all_transforms` argument to
#: :meth:`~matplotlib.backend_bases.RendererBase.draw_path_collection`;
#: each 3x3 array is used to initialize an
#: :class:`~matplotlib.transforms.Affine2D` object.
#: Each kind of collection defines this based on its arguments.
_transforms = np.empty((0, 3, 3))
# Whether to draw an edge by default. Set on a
# subclass-by-subclass basis.
_edge_default = False
def __init__(self,
edgecolors=None,
facecolors=None,
linewidths=None,
linestyles='solid',
capstyle=None,
joinstyle=None,
antialiaseds=None,
offsets=None,
transOffset=None,
norm=None, # optional for ScalarMappable
cmap=None, # ditto
pickradius=5.0,
hatch=None,
urls=None,
offset_position='screen',
zorder=1,
**kwargs
):
"""
Create a Collection
%(Collection)s
"""
artist.Artist.__init__(self)
cm.ScalarMappable.__init__(self, norm, cmap)
# list of un-scaled dash patterns
# this is needed scaling the dash pattern by linewidth
self._us_linestyles = [(None, None)]
# list of dash patterns
self._linestyles = [(None, None)]
# list of unbroadcast/scaled linewidths
self._us_lw = [0]
self._linewidths = [0]
self._is_filled = True # May be modified by set_facecolor().
self._hatch_color = mcolors.to_rgba(mpl.rcParams['hatch.color'])
self.set_facecolor(facecolors)
self.set_edgecolor(edgecolors)
self.set_linewidth(linewidths)
self.set_linestyle(linestyles)
self.set_antialiased(antialiaseds)
self.set_pickradius(pickradius)
self.set_urls(urls)
self.set_hatch(hatch)
self.set_offset_position(offset_position)
self.set_zorder(zorder)
if capstyle:
self.set_capstyle(capstyle)
else:
self._capstyle = None
if joinstyle:
self.set_joinstyle(joinstyle)
else:
self._joinstyle = None
self._offsets = np.zeros((1, 2))
self._uniform_offsets = None
if offsets is not None:
offsets = np.asanyarray(offsets, float)
# Broadcast (2,) -> (1, 2) but nothing else.
if offsets.shape == (2,):
offsets = offsets[None, :]
if transOffset is not None:
self._offsets = offsets
self._transOffset = transOffset
else:
self._uniform_offsets = offsets
self._path_effects = None
self.update(kwargs)
self._paths = None
def get_paths(self):
return self._paths
def set_paths(self):
raise NotImplementedError
def get_transforms(self):
return self._transforms
def get_offset_transform(self):
t = self._transOffset
if (not isinstance(t, transforms.Transform)
and hasattr(t, '_as_mpl_transform')):
t = t._as_mpl_transform(self.axes)
return t
def get_datalim(self, transData):
transform = self.get_transform()
transOffset = self.get_offset_transform()
offsets = self._offsets
paths = self.get_paths()
if not transform.is_affine:
paths = [transform.transform_path_non_affine(p) for p in paths]
transform = transform.get_affine()
if not transOffset.is_affine:
offsets = transOffset.transform_non_affine(offsets)
transOffset = transOffset.get_affine()
if isinstance(offsets, np.ma.MaskedArray):
offsets = offsets.filled(np.nan)
# get_path_collection_extents handles nan but not masked arrays
if len(paths) and len(offsets):
result = mpath.get_path_collection_extents(
transform.frozen(), paths, self.get_transforms(),
offsets, transOffset.frozen())
result = result.inverse_transformed(transData)
else:
result = transforms.Bbox.null()
return result
def get_window_extent(self, renderer):
# TODO:check to ensure that this does not fail for
# cases other than scatter plot legend
return self.get_datalim(transforms.IdentityTransform())
def _prepare_points(self):
"""Point prep for drawing and hit testing"""
transform = self.get_transform()
transOffset = self.get_offset_transform()
offsets = self._offsets
paths = self.get_paths()
if self.have_units():
paths = []
for path in self.get_paths():
vertices = path.vertices
xs, ys = vertices[:, 0], vertices[:, 1]
xs = self.convert_xunits(xs)
ys = self.convert_yunits(ys)
paths.append(mpath.Path(np.column_stack([xs, ys]), path.codes))
if offsets.size > 0:
xs = self.convert_xunits(offsets[:, 0])
ys = self.convert_yunits(offsets[:, 1])
offsets = np.column_stack([xs, ys])
if not transform.is_affine:
paths = [transform.transform_path_non_affine(path)
for path in paths]
transform = transform.get_affine()
if not transOffset.is_affine:
offsets = transOffset.transform_non_affine(offsets)
# This might have changed an ndarray into a masked array.
transOffset = transOffset.get_affine()
if isinstance(offsets, np.ma.MaskedArray):
offsets = offsets.filled(np.nan)
# Changing from a masked array to nan-filled ndarray
# is probably most efficient at this point.
return transform, transOffset, offsets, paths
@artist.allow_rasterization
def draw(self, renderer):
if not self.get_visible():
return
renderer.open_group(self.__class__.__name__, self.get_gid())
self.update_scalarmappable()
transform, transOffset, offsets, paths = self._prepare_points()
gc = renderer.new_gc()
self._set_gc_clip(gc)
gc.set_snap(self.get_snap())
if self._hatch:
gc.set_hatch(self._hatch)
try:
gc.set_hatch_color(self._hatch_color)
except AttributeError:
# if we end up with a GC that does not have this method
warnings.warn("Your backend does not support setting the "
"hatch color.")
if self.get_sketch_params() is not None:
gc.set_sketch_params(*self.get_sketch_params())
if self.get_path_effects():
from matplotlib.patheffects import PathEffectRenderer
renderer = PathEffectRenderer(self.get_path_effects(), renderer)
# If the collection is made up of a single shape/color/stroke,
# it can be rendered once and blitted multiple times, using
# `draw_markers` rather than `draw_path_collection`. This is
# *much* faster for Agg, and results in smaller file sizes in
# PDF/SVG/PS.
trans = self.get_transforms()
facecolors = self.get_facecolor()
edgecolors = self.get_edgecolor()
do_single_path_optimization = False
if (len(paths) == 1 and len(trans) <= 1 and
len(facecolors) == 1 and len(edgecolors) == 1 and
len(self._linewidths) == 1 and
self._linestyles == [(None, None)] and
len(self._antialiaseds) == 1 and len(self._urls) == 1 and
self.get_hatch() is None):
if len(trans):
combined_transform = (transforms.Affine2D(trans[0]) +
transform)
else:
combined_transform = transform
extents = paths[0].get_extents(combined_transform)
width, height = renderer.get_canvas_width_height()
if (extents.width < width and
extents.height < height):
do_single_path_optimization = True
if self._joinstyle:
gc.set_joinstyle(self._joinstyle)
if self._capstyle:
gc.set_capstyle(self._capstyle)
if do_single_path_optimization:
gc.set_foreground(tuple(edgecolors[0]))
gc.set_linewidth(self._linewidths[0])
gc.set_dashes(*self._linestyles[0])
gc.set_antialiased(self._antialiaseds[0])
gc.set_url(self._urls[0])
renderer.draw_markers(
gc, paths[0], combined_transform.frozen(),
mpath.Path(offsets), transOffset, tuple(facecolors[0]))
else:
renderer.draw_path_collection(
gc, transform.frozen(), paths,
self.get_transforms(), offsets, transOffset,
self.get_facecolor(), self.get_edgecolor(),
self._linewidths, self._linestyles,
self._antialiaseds, self._urls,
self._offset_position)
gc.restore()
renderer.close_group(self.__class__.__name__)
self.stale = False
def set_pickradius(self, pr):
"""Set the pick radius used for containment tests.
.. ACCEPTS: float distance in points
Parameters
----------
d : float
Pick radius, in points.
"""
self._pickradius = pr
def get_pickradius(self):
return self._pickradius
def contains(self, mouseevent):
"""
Test whether the mouse event occurred in the collection.
Returns True | False, ``dict(ind=itemlist)``, where every
item in itemlist contains the event.
"""
if callable(self._contains):
return self._contains(self, mouseevent)
if not self.get_visible():
return False, {}
pickradius = (
float(self._picker)
if cbook.is_numlike(self._picker) and
self._picker is not True # the bool, not just nonzero or 1
else self._pickradius)
transform, transOffset, offsets, paths = self._prepare_points()
ind = _path.point_in_path_collection(
mouseevent.x, mouseevent.y, pickradius,
transform.frozen(), paths, self.get_transforms(),
offsets, transOffset, pickradius <= 0,
self.get_offset_position())
return len(ind) > 0, dict(ind=ind)
def set_urls(self, urls):
"""
Parameters
----------
urls : List[str] or None
.. ACCEPTS: List[str] or None
"""
self._urls = urls if urls is not None else [None]
self.stale = True
def get_urls(self):
return self._urls
def set_hatch(self, hatch):
r"""
Set the hatching pattern
*hatch* can be one of::
/ - diagonal hatching
\ - back diagonal
| - vertical
- - horizontal
+ - crossed
x - crossed diagonal
o - small circle
O - large circle
. - dots
* - stars
Letters can be combined, in which case all the specified
hatchings are done. If same letter repeats, it increases the
density of hatching of that pattern.
Hatching is supported in the PostScript, PDF, SVG and Agg
backends only.
Unlike other properties such as linewidth and colors, hatching
can only be specified for the collection as a whole, not separately
for each member.
ACCEPTS: [ '/' | '\\' | '|' | '-' | '+' | 'x' | 'o' | 'O' | '.' | '*' ]
"""
self._hatch = hatch
self.stale = True
def get_hatch(self):
"""Return the current hatching pattern."""
return self._hatch
def set_offsets(self, offsets):
"""
Set the offsets for the collection. *offsets* can be a scalar
or a sequence.
ACCEPTS: float or sequence of floats
"""
offsets = np.asanyarray(offsets, float)
if offsets.shape == (2,): # Broadcast (2,) -> (1, 2) but nothing else.
offsets = offsets[None, :]
# This decision is based on how they are initialized above in __init__.
if self._uniform_offsets is None:
self._offsets = offsets
else:
self._uniform_offsets = offsets
self.stale = True
def get_offsets(self):
"""Return the offsets for the collection."""
# This decision is based on how they are initialized above in __init__.
if self._uniform_offsets is None:
return self._offsets
else:
return self._uniform_offsets
def set_offset_position(self, offset_position):
"""
Set how offsets are applied. If *offset_position* is 'screen'
(default) the offset is applied after the master transform has
been applied, that is, the offsets are in screen coordinates.
If offset_position is 'data', the offset is applied before the
master transform, i.e., the offsets are in data coordinates.
.. ACCEPTS: [ 'screen' | 'data' ]
"""
if offset_position not in ('screen', 'data'):
raise ValueError("offset_position must be 'screen' or 'data'")
self._offset_position = offset_position
self.stale = True
def get_offset_position(self):
"""
Returns how offsets are applied for the collection. If
*offset_position* is 'screen', the offset is applied after the
master transform has been applied, that is, the offsets are in
screen coordinates. If offset_position is 'data', the offset
is applied before the master transform, i.e., the offsets are
in data coordinates.
"""
return self._offset_position
def set_linewidth(self, lw):
"""
Set the linewidth(s) for the collection. *lw* can be a scalar
or a sequence; if it is a sequence the patches will cycle
through the sequence
ACCEPTS: float or sequence of floats
"""
if lw is None:
lw = mpl.rcParams['patch.linewidth']
if lw is None:
lw = mpl.rcParams['lines.linewidth']
# get the un-scaled/broadcast lw
self._us_lw = np.atleast_1d(np.asarray(lw))
# scale all of the dash patterns.
self._linewidths, self._linestyles = self._bcast_lwls(
self._us_lw, self._us_linestyles)
self.stale = True
def set_linewidths(self, lw):
"""alias for set_linewidth"""
return self.set_linewidth(lw)
def set_lw(self, lw):
"""alias for set_linewidth"""
return self.set_linewidth(lw)
def set_linestyle(self, ls):
"""
Set the linestyle(s) for the collection.
=========================== =================
linestyle description
=========================== =================
``'-'`` or ``'solid'`` solid line
``'--'`` or ``'dashed'`` dashed line
``'-.'`` or ``'dashdot'`` dash-dotted line
``':'`` or ``'dotted'`` dotted line
=========================== =================
Alternatively a dash tuple of the following form can be provided::
(offset, onoffseq),
where ``onoffseq`` is an even length tuple of on and off ink
in points.
ACCEPTS: ['solid' | 'dashed', 'dashdot', 'dotted' |
(offset, on-off-dash-seq) |
``'-'`` | ``'--'`` | ``'-.'`` | ``':'`` | ``'None'`` |
``' '`` | ``''``]
Parameters
----------
ls : { '-', '--', '-.', ':'} and more see description
The line style.
"""
try:
if isinstance(ls, six.string_types):
ls = cbook.ls_mapper.get(ls, ls)
dashes = [mlines._get_dash_pattern(ls)]
else:
try:
dashes = [mlines._get_dash_pattern(ls)]
except ValueError:
dashes = [mlines._get_dash_pattern(x) for x in ls]
except ValueError:
raise ValueError(
'Do not know how to convert {!r} to dashes'.format(ls))
# get the list of raw 'unscaled' dash patterns
self._us_linestyles = dashes
# broadcast and scale the lw and dash patterns
self._linewidths, self._linestyles = self._bcast_lwls(
self._us_lw, self._us_linestyles)
def set_capstyle(self, cs):
"""
Set the capstyle for the collection. The capstyle can
only be set globally for all elements in the collection
Parameters
----------
cs : ['butt' | 'round' | 'projecting']
The capstyle
"""
if cs in ('butt', 'round', 'projecting'):
self._capstyle = cs
else:
raise ValueError('Unrecognized cap style. Found %s' % cs)
def get_capstyle(self):
return self._capstyle
def set_joinstyle(self, js):
"""
Set the joinstyle for the collection. The joinstyle can only be
set globally for all elements in the collection.
Parameters
----------
js : ['miter' | 'round' | 'bevel']
The joinstyle
"""
if js in ('miter', 'round', 'bevel'):
self._joinstyle = js
else:
raise ValueError('Unrecognized join style. Found %s' % js)
def get_joinstyle(self):
return self._joinstyle
@staticmethod
def _bcast_lwls(linewidths, dashes):
'''Internal helper function to broadcast + scale ls/lw
In the collection drawing code the linewidth and linestyle are
cycled through as circular buffers (via v[i % len(v)]). Thus,
if we are going to scale the dash pattern at set time (not
draw time) we need to do the broadcasting now and expand both
lists to be the same length.
Parameters
----------
linewidths : list
line widths of collection
dashes : list
dash specification (offset, (dash pattern tuple))
Returns
-------
linewidths, dashes : list
Will be the same length, dashes are scaled by paired linewidth
'''
if mpl.rcParams['_internal.classic_mode']:
return linewidths, dashes
# make sure they are the same length so we can zip them
if len(dashes) != len(linewidths):
l_dashes = len(dashes)
l_lw = len(linewidths)
GCD = gcd(l_dashes, l_lw)
dashes = list(dashes) * (l_lw // GCD)
linewidths = list(linewidths) * (l_dashes // GCD)
# scale the dash patters
dashes = [mlines._scale_dashes(o, d, lw)
for (o, d), lw in zip(dashes, linewidths)]
return linewidths, dashes
def set_linestyles(self, ls):
"""alias for set_linestyle"""
return self.set_linestyle(ls)
def set_dashes(self, ls):
"""alias for set_linestyle"""
return self.set_linestyle(ls)
def set_antialiased(self, aa):
"""
Set the antialiasing state for rendering.
ACCEPTS: Boolean or sequence of booleans
"""
if aa is None:
aa = mpl.rcParams['patch.antialiased']
self._antialiaseds = np.atleast_1d(np.asarray(aa, bool))
self.stale = True
def set_antialiaseds(self, aa):
"""alias for set_antialiased"""
return self.set_antialiased(aa)
def set_color(self, c):
"""
Set both the edgecolor and the facecolor.
ACCEPTS: matplotlib color arg or sequence of rgba tuples
.. seealso::
:meth:`set_facecolor`, :meth:`set_edgecolor`
For setting the edge or face color individually.
"""
self.set_facecolor(c)
self.set_edgecolor(c)
def _set_facecolor(self, c):
if c is None:
c = mpl.rcParams['patch.facecolor']
self._is_filled = True
try:
if c.lower() == 'none':
self._is_filled = False
except AttributeError:
pass
self._facecolors = mcolors.to_rgba_array(c, self._alpha)
self.stale = True
def set_facecolor(self, c):
"""
Set the facecolor(s) of the collection. *c* can be a
matplotlib color spec (all patches have same color), or a
sequence of specs; if it is a sequence the patches will
cycle through the sequence.
If *c* is 'none', the patch will not be filled.
ACCEPTS: matplotlib color spec or sequence of specs
"""
self._original_facecolor = c
self._set_facecolor(c)
def set_facecolors(self, c):
"""alias for set_facecolor"""
return self.set_facecolor(c)
def get_facecolor(self):
return self._facecolors
get_facecolors = get_facecolor
def get_edgecolor(self):
if (isinstance(self._edgecolors, six.string_types)
and self._edgecolors == str('face')):
return self.get_facecolors()
else:
return self._edgecolors
get_edgecolors = get_edgecolor
def _set_edgecolor(self, c):
set_hatch_color = True
if c is None:
if (mpl.rcParams['patch.force_edgecolor'] or
not self._is_filled or self._edge_default):
c = mpl.rcParams['patch.edgecolor']
else:
c = 'none'
set_hatch_color = False
self._is_stroked = True
try:
if c.lower() == 'none':
self._is_stroked = False
except AttributeError:
pass
try:
if c.lower() == 'face': # Special case: lookup in "get" method.
self._edgecolors = 'face'
return
except AttributeError:
pass
self._edgecolors = mcolors.to_rgba_array(c, self._alpha)
if set_hatch_color and len(self._edgecolors):
self._hatch_color = tuple(self._edgecolors[0])
self.stale = True
def set_edgecolor(self, c):
"""
Set the edgecolor(s) of the collection. *c* can be a
matplotlib color spec (all patches have same color), or a
sequence of specs; if it is a sequence the patches will
cycle through the sequence.
If *c* is 'face', the edge color will always be the same as
the face color. If it is 'none', the patch boundary will not
be drawn.
ACCEPTS: matplotlib color spec or sequence of specs
"""
self._original_edgecolor = c
self._set_edgecolor(c)
def set_edgecolors(self, c):
"""alias for set_edgecolor"""
return self.set_edgecolor(c)
def set_alpha(self, alpha):
"""
Set the alpha tranparencies of the collection. *alpha* must be
a float or *None*.
ACCEPTS: float or None
"""
if alpha is not None:
try:
float(alpha)
except TypeError:
raise TypeError('alpha must be a float or None')
self.update_dict['array'] = True
artist.Artist.set_alpha(self, alpha)
self._set_facecolor(self._original_facecolor)
self._set_edgecolor(self._original_edgecolor)
def get_linewidths(self):
return self._linewidths
get_linewidth = get_linewidths
def get_linestyles(self):
return self._linestyles
get_dashes = get_linestyle = get_linestyles
def update_scalarmappable(self):
"""
If the scalar mappable array is not none, update colors
from scalar data
"""
if self._A is None:
return
if self._A.ndim > 1:
raise ValueError('Collections can only map rank 1 arrays')
if not self.check_update("array"):
return
if self._is_filled:
self._facecolors = self.to_rgba(self._A, self._alpha)
elif self._is_stroked:
self._edgecolors = self.to_rgba(self._A, self._alpha)
self.stale = True
def get_fill(self):
'return whether fill is set'
return self._is_filled
def update_from(self, other):
'copy properties from other to self'
artist.Artist.update_from(self, other)
self._antialiaseds = other._antialiaseds
self._original_edgecolor = other._original_edgecolor
self._edgecolors = other._edgecolors
self._original_facecolor = other._original_facecolor
self._facecolors = other._facecolors
self._linewidths = other._linewidths
self._linestyles = other._linestyles
self._us_linestyles = other._us_linestyles
self._pickradius = other._pickradius
self._hatch = other._hatch
# update_from for scalarmappable
self._A = other._A
self.norm = other.norm
self.cmap = other.cmap
# self.update_dict = other.update_dict # do we need to copy this? -JJL
self.stale = True
# these are not available for the object inspector until after the
# class is built so we define an initial set here for the init
# function and they will be overridden after object defn
docstring.interpd.update(Collection="""\
Valid Collection keyword arguments:
* *edgecolors*: None
* *facecolors*: None
* *linewidths*: None
* *antialiaseds*: None
* *offsets*: None
* *transOffset*: transforms.IdentityTransform()
* *norm*: None (optional for
:class:`matplotlib.cm.ScalarMappable`)
* *cmap*: None (optional for
:class:`matplotlib.cm.ScalarMappable`)
*offsets* and *transOffset* are used to translate the patch after
rendering (default no offsets)
If any of *edgecolors*, *facecolors*, *linewidths*, *antialiaseds*
are None, they default to their :data:`matplotlib.rcParams` patch
setting, in sequence form.
""")
class _CollectionWithSizes(Collection):
"""
Base class for collections that have an array of sizes.
"""
_factor = 1.0
def get_sizes(self):
"""
Returns the sizes of the elements in the collection. The
value represents the 'area' of the element.
Returns
-------
sizes : array
The 'area' of each element.
"""
return self._sizes
def set_sizes(self, sizes, dpi=72.0):
"""
Set the sizes of each member of the collection.
Parameters
----------
sizes : ndarray or None
The size to set for each element of the collection. The
value is the 'area' of the element.
dpi : float
The dpi of the canvas. Defaults to 72.0.
"""
if sizes is None:
self._sizes = np.array([])
self._transforms = np.empty((0, 3, 3))
else:
self._sizes = np.asarray(sizes)
self._transforms = np.zeros((len(self._sizes), 3, 3))
scale = np.sqrt(self._sizes) * dpi / 72.0 * self._factor
self._transforms[:, 0, 0] = scale
self._transforms[:, 1, 1] = scale
self._transforms[:, 2, 2] = 1.0
self.stale = True
@artist.allow_rasterization
def draw(self, renderer):
self.set_sizes(self._sizes, self.figure.dpi)
Collection.draw(self, renderer)
class PathCollection(_CollectionWithSizes):
"""
This is the most basic :class:`Collection` subclass.
"""
@docstring.dedent_interpd
def __init__(self, paths, sizes=None, **kwargs):
"""
*paths* is a sequence of :class:`matplotlib.path.Path`
instances.
%(Collection)s
"""
Collection.__init__(self, **kwargs)
self.set_paths(paths)
self.set_sizes(sizes)
self.stale = True
def set_paths(self, paths):
self._paths = paths
self.stale = True
def get_paths(self):
return self._paths
class PolyCollection(_CollectionWithSizes):
@docstring.dedent_interpd
def __init__(self, verts, sizes=None, closed=True, **kwargs):
"""
*verts* is a sequence of ( *verts0*, *verts1*, ...) where
*verts_i* is a sequence of *xy* tuples of vertices, or an
equivalent :mod:`numpy` array of shape (*nv*, 2).
*sizes* is *None* (default) or a sequence of floats that
scale the corresponding *verts_i*. The scaling is applied
before the Artist master transform; if the latter is an identity
transform, then the overall scaling is such that if
*verts_i* specify a unit square, then *sizes_i* is the area
of that square in points^2.
If len(*sizes*) < *nv*, the additional values will be
taken cyclically from the array.
*closed*, when *True*, will explicitly close the polygon.
%(Collection)s
"""
Collection.__init__(self, **kwargs)
self.set_sizes(sizes)
self.set_verts(verts, closed)
self.stale = True
def set_verts(self, verts, closed=True):
'''This allows one to delay initialization of the vertices.'''
if isinstance(verts, np.ma.MaskedArray):
verts = verts.astype(float).filled(np.nan)
# This is much faster than having Path do it one at a time.
if closed:
self._paths = []
for xy in verts:
if len(xy):
if isinstance(xy, np.ma.MaskedArray):
xy = np.ma.concatenate([xy, xy[0:1]])
else:
xy = np.asarray(xy)
xy = np.concatenate([xy, xy[0:1]])
codes = np.empty(xy.shape[0], dtype=mpath.Path.code_type)
codes[:] = mpath.Path.LINETO
codes[0] = mpath.Path.MOVETO
codes[-1] = mpath.Path.CLOSEPOLY
self._paths.append(mpath.Path(xy, codes))
else:
self._paths.append(mpath.Path(xy))
else:
self._paths = [mpath.Path(xy) for xy in verts]
self.stale = True
set_paths = set_verts
def set_verts_and_codes(self, verts, codes):
'''This allows one to initialize vertices with path codes.'''
if (len(verts) != len(codes)):
raise ValueError("'codes' must be a 1D list or array "
"with the same length of 'verts'")
self._paths = []
for xy, cds in zip(verts, codes):
if len(xy):
self._paths.append(mpath.Path(xy, cds))
else:
self._paths.append(mpath.Path(xy))
self.stale = True
class BrokenBarHCollection(PolyCollection):
"""
A collection of horizontal bars spanning *yrange* with a sequence of
*xranges*.
"""
@docstring.dedent_interpd
def __init__(self, xranges, yrange, **kwargs):
"""
*xranges*
sequence of (*xmin*, *xwidth*)
*yrange*
*ymin*, *ywidth*
%(Collection)s
"""
ymin, ywidth = yrange
ymax = ymin + ywidth
verts = [[(xmin, ymin),
(xmin, ymax),
(xmin + xwidth, ymax),
(xmin + xwidth, ymin),
(xmin, ymin)] for xmin, xwidth in xranges]
PolyCollection.__init__(self, verts, **kwargs)
@staticmethod
def span_where(x, ymin, ymax, where, **kwargs):
"""
Create a BrokenBarHCollection to plot horizontal bars from
over the regions in *x* where *where* is True. The bars range
on the y-axis from *ymin* to *ymax*
A :class:`BrokenBarHCollection` is returned. *kwargs* are
passed on to the collection.
"""
xranges = []
for ind0, ind1 in cbook.contiguous_regions(where):
xslice = x[ind0:ind1]
if not len(xslice):
continue
xranges.append((xslice[0], xslice[-1] - xslice[0]))
collection = BrokenBarHCollection(
xranges, [ymin, ymax - ymin], **kwargs)
return collection
class RegularPolyCollection(_CollectionWithSizes):
"""Draw a collection of regular polygons with *numsides*."""
_path_generator = mpath.Path.unit_regular_polygon
_factor = CIRCLE_AREA_FACTOR
@docstring.dedent_interpd
def __init__(self,
numsides,
rotation=0,
sizes=(1,),
**kwargs):
"""
*numsides*
the number of sides of the polygon
*rotation*
the rotation of the polygon in radians
*sizes*
gives the area of the circle circumscribing the
regular polygon in points^2
%(Collection)s
Example: see :file:`examples/dynamic_collection.py` for
complete example::
offsets = np.random.rand(20,2)
facecolors = [cm.jet(x) for x in np.random.rand(20)]
black = (0,0,0,1)
collection = RegularPolyCollection(
numsides=5, # a pentagon
rotation=0, sizes=(50,),
facecolors = facecolors,
edgecolors = (black,),
linewidths = (1,),
offsets = offsets,
transOffset = ax.transData,
)
"""
Collection.__init__(self, **kwargs)
self.set_sizes(sizes)
self._numsides = numsides
self._paths = [self._path_generator(numsides)]
self._rotation = rotation
self.set_transform(transforms.IdentityTransform())
def get_numsides(self):
return self._numsides
def get_rotation(self):
return self._rotation
@artist.allow_rasterization
def draw(self, renderer):
self.set_sizes(self._sizes, self.figure.dpi)
self._transforms = [
transforms.Affine2D(x).rotate(-self._rotation).get_matrix()
for x in self._transforms
]
Collection.draw(self, renderer)
class StarPolygonCollection(RegularPolyCollection):
"""
Draw a collection of regular stars with *numsides* points."""
_path_generator = mpath.Path.unit_regular_star
class AsteriskPolygonCollection(RegularPolyCollection):
"""
Draw a collection of regular asterisks with *numsides* points."""
_path_generator = mpath.Path.unit_regular_asterisk
class LineCollection(Collection):
"""
All parameters must be sequences or scalars; if scalars, they will
be converted to sequences. The property of the ith line
segment is::
prop[i % len(props)]
i.e., the properties cycle if the ``len`` of props is less than the
number of segments.
"""
_edge_default = True
def __init__(self, segments, # Can be None.
linewidths=None,
colors=None,
antialiaseds=None,
linestyles='solid',
offsets=None,
transOffset=None,
norm=None,
cmap=None,
pickradius=5,
zorder=2,
facecolors='none',
**kwargs
):
"""
Parameters
----------
segments :
A sequence of (*line0*, *line1*, *line2*), where::
linen = (x0, y0), (x1, y1), ... (xm, ym)
or the equivalent numpy array with two columns. Each line
can be a different length.
colors : sequence, optional
A sequence of RGBA tuples (e.g., arbitrary color
strings, etc, not allowed).
antialiaseds : sequence, optional
A sequence of ones or zeros.
linestyles : string, tuple, optional
Either one of [ 'solid' | 'dashed' | 'dashdot' | 'dotted' ], or
a dash tuple. The dash tuple is::
(offset, onoffseq)
where ``onoffseq`` is an even length tuple of on and off ink
in points.
norm : Normalize, optional
`~.colors.Normalize` instance.
cmap : string or Colormap, optional
Colormap name or `~.colors.Colormap` instance.
pickradius : float, optional
The tolerance in points for mouse clicks picking a line.
Default is 5 pt.
zorder : int, optional
zorder of the LineCollection. Default is 2.
facecolors : optional
The facecolors of the LineCollection. Default is 'none'.
Setting to a value other than 'none' will lead to a filled
polygon being drawn between points on each line.
Notes
-----
If *linewidths*, *colors*, or *antialiaseds* is None, they
default to their rcParams setting, in sequence form.
If *offsets* and *transOffset* are not None, then
*offsets* are transformed by *transOffset* and applied after
the segments have been transformed to display coordinates.
If *offsets* is not None but *transOffset* is None, then the
*offsets* are added to the segments before any transformation.
In this case, a single offset can be specified as::
offsets=(xo,yo)
and this value will be added cumulatively to each successive
segment, so as to produce a set of successively offset curves.
The use of :class:`~matplotlib.cm.ScalarMappable` is optional.
If the :class:`~matplotlib.cm.ScalarMappable` array
:attr:`~matplotlib.cm.ScalarMappable._A` is not None (i.e., a call to
:meth:`~matplotlib.cm.ScalarMappable.set_array` has been made), at
draw time a call to scalar mappable will be made to set the colors.
"""
if colors is None:
colors = mpl.rcParams['lines.color']
if linewidths is None:
linewidths = (mpl.rcParams['lines.linewidth'],)
if antialiaseds is None:
antialiaseds = (mpl.rcParams['lines.antialiased'],)
colors = mcolors.to_rgba_array(colors)
Collection.__init__(
self,
edgecolors=colors,
facecolors=facecolors,
linewidths=linewidths,
linestyles=linestyles,
antialiaseds=antialiaseds,
offsets=offsets,
transOffset=transOffset,
norm=norm,
cmap=cmap,
pickradius=pickradius,
zorder=zorder,
**kwargs)
self.set_segments(segments)
def set_segments(self, segments):
if segments is None:
return
_segments = []
for seg in segments:
if not isinstance(seg, np.ma.MaskedArray):
seg = np.asarray(seg, float)
_segments.append(seg)
if self._uniform_offsets is not None:
_segments = self._add_offsets(_segments)
self._paths = [mpath.Path(_seg) for _seg in _segments]
self.stale = True
set_verts = set_segments # for compatibility with PolyCollection
set_paths = set_segments
def get_segments(self):
"""
Returns
-------
segments : list
List of segments in the LineCollection. Each list item contains an
array of vertices.
"""
segments = []
for path in self._paths:
vertices = [vertex for vertex, _ in path.iter_segments()]
vertices = np.asarray(vertices)
segments.append(vertices)
return segments
def _add_offsets(self, segs):
offsets = self._uniform_offsets
Nsegs = len(segs)
Noffs = offsets.shape[0]
if Noffs == 1:
for i in range(Nsegs):
segs[i] = segs[i] + i * offsets
else:
for i in range(Nsegs):
io = i % Noffs
segs[i] = segs[i] + offsets[io:io + 1]
return segs
def set_color(self, c):
"""
Set the color(s) of the LineCollection.
Parameters
----------
c :
Matplotlib color argument (all patches have same color), or a
sequence or rgba tuples; if it is a sequence the patches will
cycle through the sequence.
"""
self.set_edgecolor(c)
self.stale = True
def get_color(self):
return self._edgecolors
get_colors = get_color # for compatibility with old versions
class EventCollection(LineCollection):
'''
A collection of discrete events.
The events are given by a 1-dimensional array, usually the position of
something along an axis, such as time or length. They do not have an
amplitude and are displayed as vertical or horizontal parallel bars.
'''
_edge_default = True
def __init__(self,
positions, # Cannot be None.
orientation=None,
lineoffset=0,
linelength=1,
linewidth=None,
color=None,
linestyle='solid',
antialiased=None,
**kwargs
):
"""
Parameters
----------
positions : 1D array-like object
Each value is an event.
orientation : {None, 'horizontal', 'vertical'}, optional
The orientation of the **collection** (the event bars are along
the orthogonal direction). Defaults to 'horizontal' if not
specified or None.
lineoffset : scalar, optional, default: 0
The offset of the center of the markers from the origin, in the
direction orthogonal to *orientation*.
linelength : scalar, optional, default: 1
The total height of the marker (i.e. the marker stretches from
``lineoffset - linelength/2`` to ``lineoffset + linelength/2``).
linewidth : scalar or None, optional, default: None
If it is None, defaults to its rcParams setting, in sequence form.
color : color, sequence of colors or None, optional, default: None
If it is None, defaults to its rcParams setting, in sequence form.
linestyle : str or tuple, optional, default: 'solid'
Valid strings are ['solid', 'dashed', 'dashdot', 'dotted',
'-', '--', '-.', ':']. Dash tuples should be of the form::
(offset, onoffseq),
where *onoffseq* is an even length tuple of on and off ink
in points.
antialiased : {None, 1, 2}, optional
If it is None, defaults to its rcParams setting, in sequence form.
**kwargs : optional
Other keyword arguments are line collection properties. See
:class:`~matplotlib.collections.LineCollection` for a list of
the valid properties.
Examples
--------
.. plot:: gallery/lines_bars_and_markers/eventcollection_demo.py
"""
segment = (lineoffset + linelength / 2.,
lineoffset - linelength / 2.)
if positions is None or len(positions) == 0:
segments = []
elif hasattr(positions, 'ndim') and positions.ndim > 1:
raise ValueError('positions cannot be an array with more than '
'one dimension.')
elif (orientation is None or orientation.lower() == 'none' or
orientation.lower() == 'horizontal'):
positions.sort()
segments = [[(coord1, coord2) for coord2 in segment] for
coord1 in positions]
self._is_horizontal = True
elif orientation.lower() == 'vertical':
positions.sort()
segments = [[(coord2, coord1) for coord2 in segment] for
coord1 in positions]
self._is_horizontal = False
else:
raise ValueError("orientation must be 'horizontal' or 'vertical'")
LineCollection.__init__(self,
segments,
linewidths=linewidth,
colors=color,
antialiaseds=antialiased,
linestyles=linestyle,
**kwargs)
self._linelength = linelength
self._lineoffset = lineoffset
def get_positions(self):
'''
return an array containing the floating-point values of the positions
'''
segments = self.get_segments()
pos = 0 if self.is_horizontal() else 1
positions = []
for segment in segments:
positions.append(segment[0, pos])
return positions
def set_positions(self, positions):
'''
set the positions of the events to the specified value
'''
if positions is None or (hasattr(positions, 'len') and
len(positions) == 0):
self.set_segments([])
return
lineoffset = self.get_lineoffset()
linelength = self.get_linelength()
segment = (lineoffset + linelength / 2.,
lineoffset - linelength / 2.)
positions = np.asanyarray(positions)
positions.sort()
if self.is_horizontal():
segments = [[(coord1, coord2) for coord2 in segment] for
coord1 in positions]
else:
segments = [[(coord2, coord1) for coord2 in segment] for
coord1 in positions]
self.set_segments(segments)
def add_positions(self, position):
'''
add one or more events at the specified positions
'''
if position is None or (hasattr(position, 'len') and
len(position) == 0):
return
positions = self.get_positions()
positions = np.hstack([positions, np.asanyarray(position)])
self.set_positions(positions)
extend_positions = append_positions = add_positions
def is_horizontal(self):
'''
True if the eventcollection is horizontal, False if vertical
'''
return self._is_horizontal
def get_orientation(self):
'''
get the orientation of the event line, may be:
[ 'horizontal' | 'vertical' ]
'''
return 'horizontal' if self.is_horizontal() else 'vertical'
def switch_orientation(self):
'''
switch the orientation of the event line, either from vertical to
horizontal or vice versus
'''
segments = self.get_segments()
for i, segment in enumerate(segments):
segments[i] = np.fliplr(segment)
self.set_segments(segments)
self._is_horizontal = not self.is_horizontal()
self.stale = True
def set_orientation(self, orientation=None):
'''
set the orientation of the event line
[ 'horizontal' | 'vertical' | None ]
defaults to 'horizontal' if not specified or None
'''
if (orientation is None or orientation.lower() == 'none' or
orientation.lower() == 'horizontal'):
is_horizontal = True
elif orientation.lower() == 'vertical':
is_horizontal = False
else:
raise ValueError("orientation must be 'horizontal' or 'vertical'")
if is_horizontal == self.is_horizontal():
return
self.switch_orientation()
def get_linelength(self):
'''
get the length of the lines used to mark each event
'''
return self._linelength
def set_linelength(self, linelength):
'''
set the length of the lines used to mark each event
'''
if linelength == self.get_linelength():
return
lineoffset = self.get_lineoffset()
segments = self.get_segments()
pos = 1 if self.is_horizontal() else 0
for segment in segments:
segment[0, pos] = lineoffset + linelength / 2.
segment[1, pos] = lineoffset - linelength / 2.
self.set_segments(segments)
self._linelength = linelength
def get_lineoffset(self):
'''
get the offset of the lines used to mark each event
'''
return self._lineoffset
def set_lineoffset(self, lineoffset):
'''
set the offset of the lines used to mark each event
'''
if lineoffset == self.get_lineoffset():
return
linelength = self.get_linelength()
segments = self.get_segments()
pos = 1 if self.is_horizontal() else 0
for segment in segments:
segment[0, pos] = lineoffset + linelength / 2.
segment[1, pos] = lineoffset - linelength / 2.
self.set_segments(segments)
self._lineoffset = lineoffset
def get_linewidth(self):
'''
get the width of the lines used to mark each event
'''
return self.get_linewidths()[0]
def get_linestyle(self):
'''
get the style of the lines used to mark each event
[ 'solid' | 'dashed' | 'dashdot' | 'dotted' ]
'''
return self.get_linestyles()
def get_color(self):
'''
get the color of the lines used to mark each event
'''
return self.get_colors()[0]
class CircleCollection(_CollectionWithSizes):
"""
A collection of circles, drawn using splines.
"""
_factor = CIRCLE_AREA_FACTOR
@docstring.dedent_interpd
def __init__(self, sizes, **kwargs):
"""
*sizes*
Gives the area of the circle in points^2
%(Collection)s
"""
Collection.__init__(self, **kwargs)
self.set_sizes(sizes)
self.set_transform(transforms.IdentityTransform())
self._paths = [mpath.Path.unit_circle()]
class EllipseCollection(Collection):
"""
A collection of ellipses, drawn using splines.
"""
@docstring.dedent_interpd
def __init__(self, widths, heights, angles, units='points', **kwargs):
"""
*widths*: sequence
lengths of first axes (e.g., major axis lengths)
*heights*: sequence
lengths of second axes
*angles*: sequence
angles of first axes, degrees CCW from the X-axis
*units*: ['points' | 'inches' | 'dots' | 'width' | 'height'
| 'x' | 'y' | 'xy']
units in which majors and minors are given; 'width' and
'height' refer to the dimensions of the axes, while 'x'
and 'y' refer to the *offsets* data units. 'xy' differs
from all others in that the angle as plotted varies with
the aspect ratio, and equals the specified angle only when
the aspect ratio is unity. Hence it behaves the same as
the :class:`~matplotlib.patches.Ellipse` with
axes.transData as its transform.
Additional kwargs inherited from the base :class:`Collection`:
%(Collection)s
"""
Collection.__init__(self, **kwargs)
self._widths = 0.5 * np.asarray(widths).ravel()
self._heights = 0.5 * np.asarray(heights).ravel()
self._angles = np.deg2rad(angles).ravel()
self._units = units
self.set_transform(transforms.IdentityTransform())
self._transforms = np.empty((0, 3, 3))
self._paths = [mpath.Path.unit_circle()]
def _set_transforms(self):
"""
Calculate transforms immediately before drawing.
"""
ax = self.axes
fig = self.figure
if self._units == 'xy':
sc = 1
elif self._units == 'x':
sc = ax.bbox.width / ax.viewLim.width
elif self._units == 'y':
sc = ax.bbox.height / ax.viewLim.height
elif self._units == 'inches':
sc = fig.dpi
elif self._units == 'points':
sc = fig.dpi / 72.0
elif self._units == 'width':
sc = ax.bbox.width
elif self._units == 'height':
sc = ax.bbox.height
elif self._units == 'dots':
sc = 1.0
else:
raise ValueError('unrecognized units: %s' % self._units)
self._transforms = np.zeros((len(self._widths), 3, 3))
widths = self._widths * sc
heights = self._heights * sc
sin_angle = np.sin(self._angles)
cos_angle = np.cos(self._angles)
self._transforms[:, 0, 0] = widths * cos_angle
self._transforms[:, 0, 1] = heights * -sin_angle
self._transforms[:, 1, 0] = widths * sin_angle
self._transforms[:, 1, 1] = heights * cos_angle
self._transforms[:, 2, 2] = 1.0
_affine = transforms.Affine2D
if self._units == 'xy':
m = ax.transData.get_affine().get_matrix().copy()
m[:2, 2:] = 0
self.set_transform(_affine(m))
@artist.allow_rasterization
def draw(self, renderer):
self._set_transforms()
Collection.draw(self, renderer)
class PatchCollection(Collection):
"""
A generic collection of patches.
This makes it easier to assign a color map to a heterogeneous
collection of patches.
This also may improve plotting speed, since PatchCollection will
draw faster than a large number of patches.
"""
def __init__(self, patches, match_original=False, **kwargs):
"""
*patches*
a sequence of Patch objects. This list may include
a heterogeneous assortment of different patch types.
*match_original*
If True, use the colors and linewidths of the original
patches. If False, new colors may be assigned by
providing the standard collection arguments, facecolor,
edgecolor, linewidths, norm or cmap.
If any of *edgecolors*, *facecolors*, *linewidths*,
*antialiaseds* are None, they default to their
:data:`matplotlib.rcParams` patch setting, in sequence form.
The use of :class:`~matplotlib.cm.ScalarMappable` is optional.
If the :class:`~matplotlib.cm.ScalarMappable` matrix _A is not
None (i.e., a call to set_array has been made), at draw time a
call to scalar mappable will be made to set the face colors.
"""
if match_original:
def determine_facecolor(patch):
if patch.get_fill():
return patch.get_facecolor()
return [0, 0, 0, 0]
kwargs['facecolors'] = [determine_facecolor(p) for p in patches]
kwargs['edgecolors'] = [p.get_edgecolor() for p in patches]
kwargs['linewidths'] = [p.get_linewidth() for p in patches]
kwargs['linestyles'] = [p.get_linestyle() for p in patches]
kwargs['antialiaseds'] = [p.get_antialiased() for p in patches]
Collection.__init__(self, **kwargs)
self.set_paths(patches)
def set_paths(self, patches):
paths = [p.get_transform().transform_path(p.get_path())
for p in patches]
self._paths = paths
class TriMesh(Collection):
"""
Class for the efficient drawing of a triangular mesh using
Gouraud shading.
A triangular mesh is a :class:`~matplotlib.tri.Triangulation`
object.
"""
def __init__(self, triangulation, **kwargs):
Collection.__init__(self, **kwargs)
self._triangulation = triangulation
self._shading = 'gouraud'
self._is_filled = True
self._bbox = transforms.Bbox.unit()
# Unfortunately this requires a copy, unless Triangulation
# was rewritten.
xy = np.hstack((triangulation.x.reshape(-1, 1),
triangulation.y.reshape(-1, 1)))
self._bbox.update_from_data_xy(xy)
def get_paths(self):
if self._paths is None:
self.set_paths()
return self._paths
def set_paths(self):
self._paths = self.convert_mesh_to_paths(self._triangulation)
@staticmethod
def convert_mesh_to_paths(tri):
"""
Converts a given mesh into a sequence of
:class:`matplotlib.path.Path` objects for easier rendering by
backends that do not directly support meshes.
This function is primarily of use to backend implementers.
"""
Path = mpath.Path
triangles = tri.get_masked_triangles()
verts = np.concatenate((tri.x[triangles][..., np.newaxis],
tri.y[triangles][..., np.newaxis]), axis=2)
return [Path(x) for x in verts]
@artist.allow_rasterization
def draw(self, renderer):
if not self.get_visible():
return
renderer.open_group(self.__class__.__name__)
transform = self.get_transform()
# Get a list of triangles and the color at each vertex.
tri = self._triangulation
triangles = tri.get_masked_triangles()
verts = np.concatenate((tri.x[triangles][..., np.newaxis],
tri.y[triangles][..., np.newaxis]), axis=2)
self.update_scalarmappable()
colors = self._facecolors[triangles]
gc = renderer.new_gc()
self._set_gc_clip(gc)
gc.set_linewidth(self.get_linewidth()[0])
renderer.draw_gouraud_triangles(gc, verts, colors, transform.frozen())
gc.restore()
renderer.close_group(self.__class__.__name__)
class QuadMesh(Collection):
"""
Class for the efficient drawing of a quadrilateral mesh.
A quadrilateral mesh consists of a grid of vertices. The
dimensions of this array are (*meshWidth* + 1, *meshHeight* +
1). Each vertex in the mesh has a different set of "mesh
coordinates" representing its position in the topology of the
mesh. For any values (*m*, *n*) such that 0 <= *m* <= *meshWidth*
and 0 <= *n* <= *meshHeight*, the vertices at mesh coordinates
(*m*, *n*), (*m*, *n* + 1), (*m* + 1, *n* + 1), and (*m* + 1, *n*)
form one of the quadrilaterals in the mesh. There are thus
(*meshWidth* * *meshHeight*) quadrilaterals in the mesh. The mesh
need not be regular and the polygons need not be convex.
A quadrilateral mesh is represented by a (2 x ((*meshWidth* + 1) *
(*meshHeight* + 1))) numpy array *coordinates*, where each row is
the *x* and *y* coordinates of one of the vertices. To define the
function that maps from a data point to its corresponding color,
use the :meth:`set_cmap` method. Each of these arrays is indexed in
row-major order by the mesh coordinates of the vertex (or the mesh
coordinates of the lower left vertex, in the case of the
colors).
For example, the first entry in *coordinates* is the
coordinates of the vertex at mesh coordinates (0, 0), then the one
at (0, 1), then at (0, 2) .. (0, meshWidth), (1, 0), (1, 1), and
so on.
*shading* may be 'flat', or 'gouraud'
"""
def __init__(self, meshWidth, meshHeight, coordinates,
antialiased=True, shading='flat', **kwargs):
Collection.__init__(self, **kwargs)
self._meshWidth = meshWidth
self._meshHeight = meshHeight
# By converting to floats now, we can avoid that on every draw.
self._coordinates = np.asarray(coordinates, float).reshape(
(meshHeight + 1, meshWidth + 1, 2))
self._antialiased = antialiased
self._shading = shading
self._bbox = transforms.Bbox.unit()
self._bbox.update_from_data_xy(coordinates.reshape(
((meshWidth + 1) * (meshHeight + 1), 2)))
def get_paths(self):
if self._paths is None:
self.set_paths()
return self._paths
def set_paths(self):
self._paths = self.convert_mesh_to_paths(
self._meshWidth, self._meshHeight, self._coordinates)
self.stale = True
def get_datalim(self, transData):
return (self.get_transform() - transData).transform_bbox(self._bbox)
@staticmethod
def convert_mesh_to_paths(meshWidth, meshHeight, coordinates):
"""
Converts a given mesh into a sequence of
:class:`matplotlib.path.Path` objects for easier rendering by
backends that do not directly support quadmeshes.
This function is primarily of use to backend implementers.
"""
Path = mpath.Path
if isinstance(coordinates, np.ma.MaskedArray):
c = coordinates.data
else:
c = coordinates
points = np.concatenate((
c[0:-1, 0:-1],
c[0:-1, 1:],
c[1:, 1:],
c[1:, 0:-1],
c[0:-1, 0:-1]
), axis=2)
points = points.reshape((meshWidth * meshHeight, 5, 2))
return [Path(x) for x in points]
def convert_mesh_to_triangles(self, meshWidth, meshHeight, coordinates):
"""
Converts a given mesh into a sequence of triangles, each point
with its own color. This is useful for experiments using
`draw_qouraud_triangle`.
"""
if isinstance(coordinates, np.ma.MaskedArray):
p = coordinates.data
else:
p = coordinates
p_a = p[:-1, :-1]
p_b = p[:-1, 1:]
p_c = p[1:, 1:]
p_d = p[1:, :-1]
p_center = (p_a + p_b + p_c + p_d) / 4.0
triangles = np.concatenate((
p_a, p_b, p_center,
p_b, p_c, p_center,
p_c, p_d, p_center,
p_d, p_a, p_center,
), axis=2)
triangles = triangles.reshape((meshWidth * meshHeight * 4, 3, 2))
c = self.get_facecolor().reshape((meshHeight + 1, meshWidth + 1, 4))
c_a = c[:-1, :-1]
c_b = c[:-1, 1:]
c_c = c[1:, 1:]
c_d = c[1:, :-1]
c_center = (c_a + c_b + c_c + c_d) / 4.0
colors = np.concatenate((
c_a, c_b, c_center,
c_b, c_c, c_center,
c_c, c_d, c_center,
c_d, c_a, c_center,
), axis=2)
colors = colors.reshape((meshWidth * meshHeight * 4, 3, 4))
return triangles, colors
@artist.allow_rasterization
def draw(self, renderer):
if not self.get_visible():
return
renderer.open_group(self.__class__.__name__, self.get_gid())
transform = self.get_transform()
transOffset = self.get_offset_transform()
offsets = self._offsets
if self.have_units():
if len(self._offsets):
xs = self.convert_xunits(self._offsets[:, 0])
ys = self.convert_yunits(self._offsets[:, 1])
offsets = np.column_stack([xs, ys])
self.update_scalarmappable()
if not transform.is_affine:
coordinates = self._coordinates.reshape((-1, 2))
coordinates = transform.transform(coordinates)
coordinates = coordinates.reshape(self._coordinates.shape)
transform = transforms.IdentityTransform()
else:
coordinates = self._coordinates
if not transOffset.is_affine:
offsets = transOffset.transform_non_affine(offsets)
transOffset = transOffset.get_affine()
gc = renderer.new_gc()
self._set_gc_clip(gc)
gc.set_linewidth(self.get_linewidth()[0])
if self._shading == 'gouraud':
triangles, colors = self.convert_mesh_to_triangles(
self._meshWidth, self._meshHeight, coordinates)
renderer.draw_gouraud_triangles(
gc, triangles, colors, transform.frozen())
else:
renderer.draw_quad_mesh(
gc, transform.frozen(), self._meshWidth, self._meshHeight,
coordinates, offsets, transOffset, self.get_facecolor(),
self._antialiased, self.get_edgecolors())
gc.restore()
renderer.close_group(self.__class__.__name__)
self.stale = False
patchstr = artist.kwdoc(Collection)
for k in ('QuadMesh', 'TriMesh', 'PolyCollection', 'BrokenBarHCollection',
'RegularPolyCollection', 'PathCollection',
'StarPolygonCollection', 'PatchCollection',
'CircleCollection', 'Collection',):
docstring.interpd.update({k: patchstr})
docstring.interpd.update(LineCollection=artist.kwdoc(LineCollection))
| 68,166 | 33.168922 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/_constrained_layout.py
|
"""
This module provides the routine to adjust subplot layouts so that there are
no overlapping axes or axes decorations. All axes decorations are dealt with
(labels, ticks, titles, ticklabels) and some dependent artists are also dealt
with (colorbar, suptitle, legend).
Layout is done via :meth:`~matplotlib.gridspec`, with one constraint per
gridspec, so it is possible to have overlapping axes if the gridspecs
overlap (i.e. using :meth:`~matplotlib.gridspec.GridSpecFromSubplotSpec`).
Axes placed using ``figure.subplots()`` or ``figure.add_subplots()`` will
participate in the layout. Axes manually placed via ``figure.add_axes()``
will not.
See Tutorial: :doc:`/tutorials/intermediate/constrainedlayout_guide`
"""
# Development Notes:
# What gets a layoutbox:
# - figure
# - gridspec
# - subplotspec
# EITHER:
# - axes + pos for the axes (i.e. the total area taken by axis and
# the actual "position" argument that needs to be sent to
# ax.set_position.)
# - The axes layout box will also encomapss the legend, and that is
# how legends get included (axes legeneds, not figure legends)
# - colorbars are sibblings of the axes if they are single-axes
# colorbars
# OR:
# - a gridspec can be inside a subplotspec.
# - subplotspec
# EITHER:
# - axes...
# OR:
# - gridspec... with arbitrary nesting...
# - colorbars are siblings of the subplotspecs if they are multi-axes
# colorbars.
# - suptitle:
# - right now suptitles are just stacked atop everything else in figure.
# Could imagine suptitles being gridspec suptitles, but not implimented
#
# Todo: AnchoredOffsetbox connected to gridspecs or axes. This would
# be more general way to add extra-axes annotations.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import logging
import warnings
from matplotlib.legend import Legend
import matplotlib.transforms as transforms
import matplotlib._layoutbox as layoutbox
_log = logging.getLogger(__name__)
def get_axall_tightbbox(ax, renderer):
'''
Get the tight_bbox of the axis ax, and any dependent decorations, like
a `Legend` instance.
'''
# main bbox of the axis....
bbox = ax.get_tightbbox(renderer=renderer)
# now add the possibility of the legend...
for child in ax.get_children():
if isinstance(child, Legend):
bboxn = child._legend_box.get_window_extent(renderer)
bbox = transforms.Bbox.union([bbox, bboxn])
# add other children here....
return bbox
def in_same_column(colnum0min, colnum0max, colnumCmin, colnumCmax):
if colnum0min >= colnumCmin and colnum0min <= colnumCmax:
return True
if colnum0max >= colnumCmin and colnum0max <= colnumCmax:
return True
return False
def in_same_row(rownum0min, rownum0max, rownumCmin, rownumCmax):
if rownum0min >= rownumCmin and rownum0min <= rownumCmax:
return True
if rownum0max >= rownumCmin and rownum0max <= rownumCmax:
return True
return False
######################################################
def do_constrained_layout(fig, renderer, h_pad, w_pad,
hspace=None, wspace=None):
"""
Do the constrained_layout. Called at draw time in
``figure.constrained_layout()``
Parameters
----------
fig: Figure
is the ``figure`` instance to do the layout in.
renderer: Renderer
the renderer to use.
h_pad, w_pad : float
are in figure-normalized units, and are a padding around the axes
elements.
hspace, wspace : float
are in fractions of the subplot sizes.
"""
''' Steps:
1. get a list of unique gridspecs in this figure. Each gridspec will be
constrained separately.
2. Check for gaps in the gridspecs. i.e. if not every axes slot in the
gridspec has been filled. If empty, add a ghost axis that is made so
that it cannot be seen (though visible=True). This is needed to make
a blank spot in the layout.
3. Compare the tight_bbox of each axes to its `position`, and assume that
the difference is the space needed by the elements around the edge of
the axes (decorations) like the title, ticklabels, x-labels, etc. This
can include legends who overspill the axes boundaries.
4. Constrain gridspec elements to line up:
a) if colnum0 neq colnumC, the two subplotspecs are stacked next to
each other, with the appropriate order.
b) if colnum0 == columnC line up the left or right side of the
_poslayoutbox (depending if it is the min or max num that is equal).
c) do the same for rows...
5. The above doesn't constrain relative sizes of the _poslayoutboxes at
all, and indeed zero-size is a solution that the solver often finds more
convenient than expanding the sizes. Right now the solution is to compare
subplotspec sizes (i.e. drowsC and drows0) and constrain the larger
_poslayoutbox to be larger than the ratio of the sizes. i.e. if drows0 >
drowsC, then ax._poslayoutbox > axc._poslayoutbox * drowsC / drows0. This
works fine *if* the decorations are similar between the axes. If the
larger subplotspec has much larger axes decorations, then the constraint
above is incorrect.
We need the greater than in the above, in general, rather than an equals
sign. Consider the case of the left column having 2 rows, and the right
column having 1 row. We want the top and bottom of the _poslayoutboxes to
line up. So that means if there are decorations on the left column axes
they will be smaller than half as large as the right hand axis.
This can break down if the decoration size for the right hand axis (the
margins) is very large. There must be a math way to check for this case.
'''
invTransFig = fig.transFigure.inverted().transform_bbox
# list of unique gridspecs that contain child axes:
gss = set([])
for ax in fig.axes:
if hasattr(ax, 'get_subplotspec'):
gs = ax.get_subplotspec().get_gridspec()
if gs._layoutbox is not None:
gss.add(gs)
if len(gss) == 0:
warnings.warn('There are no gridspecs with layoutboxes. '
'Possibly did not call parent GridSpec with the figure= '
'keyword')
# check for unoccupied gridspec slots and make ghost axes for these
# slots... Do for each gs separately. This is a pretty big kludge
# but shoudn't have too much ill effect. The worst is that
# someone querrying the figure will wonder why there are more
# axes than they thought.
if fig._layoutbox.constrained_layout_called < 1:
for gs in gss:
nrows, ncols = gs.get_geometry()
hassubplotspec = np.zeros(nrows * ncols, dtype=bool)
axs = []
for ax in fig.axes:
if (hasattr(ax, 'get_subplotspec')
and ax._layoutbox is not None
and ax.get_subplotspec().get_gridspec() == gs):
axs += [ax]
for ax in axs:
ss0 = ax.get_subplotspec()
if ss0.num2 is None:
ss0.num2 = ss0.num1
hassubplotspec[ss0.num1:(ss0.num2 + 1)] = True
for nn, hss in enumerate(hassubplotspec):
if not hss:
# this gridspec slot doesn't have an axis so we
# make a "ghost".
ax = fig.add_subplot(gs[nn])
ax.set_frame_on(False)
ax.set_xticks([])
ax.set_yticks([])
ax.set_facecolor((1, 0, 0, 0))
# for each axes, make a margin between the *pos* layoutbox and the
# *axes* layoutbox be a minimum size that can accomodate the
# decorations on the axis.
for ax in fig.axes:
_log.debug(ax._layoutbox)
if ax._layoutbox is not None:
pos = ax.get_position(original=True)
tightbbox = get_axall_tightbbox(ax, renderer)
bbox = invTransFig(tightbbox)
# use stored h_pad if it exists
h_padt = ax._poslayoutbox.h_pad
if h_padt is None:
h_padt = h_pad
w_padt = ax._poslayoutbox.w_pad
if w_padt is None:
w_padt = w_pad
ax._poslayoutbox.edit_left_margin_min(-bbox.x0 +
pos.x0 + w_padt)
ax._poslayoutbox.edit_right_margin_min(bbox.x1 -
pos.x1 + w_padt)
ax._poslayoutbox.edit_bottom_margin_min(
-bbox.y0 + pos.y0 + h_padt)
ax._poslayoutbox.edit_top_margin_min(bbox.y1-pos.y1+h_padt)
_log.debug('left %f', (-bbox.x0 + pos.x0 + w_pad))
_log.debug('right %f', (bbox.x1 - pos.x1 + w_pad))
_log.debug('bottom %f', (-bbox.y0 + pos.y0 + h_padt))
# Sometimes its possible for the solver to collapse
# rather than expand axes, so they all have zero height
# or width. This stops that... It *should* have been
# taken into account w/ pref_width...
if fig._layoutbox.constrained_layout_called < 1:
ax._poslayoutbox.constrain_height_min(20, strength='weak')
ax._poslayoutbox.constrain_width_min(20, strength='weak')
ax._layoutbox.constrain_height_min(20, strength='weak')
ax._layoutbox.constrain_width_min(20, strength='weak')
ax._poslayoutbox.constrain_top_margin(0, strength='weak')
ax._poslayoutbox.constrain_bottom_margin(0,
strength='weak')
ax._poslayoutbox.constrain_right_margin(0, strength='weak')
ax._poslayoutbox.constrain_left_margin(0, strength='weak')
# do layout for suptitle.
if fig._suptitle is not None:
sup = fig._suptitle
bbox = invTransFig(sup.get_window_extent(renderer=renderer))
height = bbox.y1 - bbox.y0
sup._layoutbox.edit_height(height+h_pad)
# OK, the above lines up ax._poslayoutbox with ax._layoutbox
# now we need to
# 1) arrange the subplotspecs. We do it at this level because
# the subplotspecs are meant to contain other dependent axes
# like colorbars or legends.
# 2) line up the right and left side of the ax._poslayoutbox
# that have the same subplotspec maxes.
if fig._layoutbox.constrained_layout_called < 1:
# arrange the subplotspecs... This is all done relative to each
# other. Some subplotspecs conatain axes, and others contain gridspecs
# the ones that contain gridspecs are a set proportion of their
# parent gridspec. The ones that contain axes are not so constrained.
figlb = fig._layoutbox
for child in figlb.children:
if child._is_gridspec_layoutbox():
# farm the gridspec layout out.
#
# This routine makes all the subplot spec containers
# have the correct arrangement. It just stacks the
# subplot layoutboxes in the correct order...
arange_subplotspecs(child, hspace=hspace, wspace=wspace)
# - Align right/left and bottom/top spines of appropriate subplots.
# - Compare size of subplotspec including height and width ratios
# and make sure that the axes spines are at least as large
# as they should be.
for gs in gss:
# for each gridspec...
nrows, ncols = gs.get_geometry()
width_ratios = gs.get_width_ratios()
height_ratios = gs.get_height_ratios()
if width_ratios is None:
width_ratios = np.ones(ncols)
if height_ratios is None:
height_ratios = np.ones(nrows)
# get axes in this gridspec....
axs = []
for ax in fig.axes:
if (hasattr(ax, 'get_subplotspec')
and ax._layoutbox is not None):
if ax.get_subplotspec().get_gridspec() == gs:
axs += [ax]
rownummin = np.zeros(len(axs), dtype=np.int8)
rownummax = np.zeros(len(axs), dtype=np.int8)
colnummin = np.zeros(len(axs), dtype=np.int8)
colnummax = np.zeros(len(axs), dtype=np.int8)
width = np.zeros(len(axs))
height = np.zeros(len(axs))
for n, ax in enumerate(axs):
ss0 = ax.get_subplotspec()
if ss0.num2 is None:
ss0.num2 = ss0.num1
rownummin[n], colnummin[n] = divmod(ss0.num1, ncols)
rownummax[n], colnummax[n] = divmod(ss0.num2, ncols)
width[n] = np.sum(
width_ratios[colnummin[n]:(colnummax[n] + 1)])
height[n] = np.sum(
height_ratios[rownummin[n]:(rownummax[n] + 1)])
for nn, ax in enumerate(axs[:-1]):
ss0 = ax.get_subplotspec()
# now compare ax to all the axs:
#
# If the subplotspecs have the same colnumXmax, then line
# up their right sides. If they have the same min, then
# line up their left sides (and vertical equivalents).
rownum0min, colnum0min = rownummin[nn], colnummin[nn]
rownum0max, colnum0max = rownummax[nn], colnummax[nn]
width0, height0 = width[nn], height[nn]
alignleft = False
alignright = False
alignbot = False
aligntop = False
alignheight = False
alignwidth = False
for mm in range(nn+1, len(axs)):
axc = axs[mm]
rownumCmin, colnumCmin = rownummin[mm], colnummin[mm]
rownumCmax, colnumCmax = rownummax[mm], colnummax[mm]
widthC, heightC = width[mm], height[mm]
# Horizontally align axes spines if they have the
# same min or max:
if not alignleft and colnum0min == colnumCmin:
# we want the _poslayoutboxes to line up on left
# side of the axes spines...
layoutbox.align([ax._poslayoutbox,
axc._poslayoutbox],
'left')
alignleft = True
if not alignright and colnum0max == colnumCmax:
# line up right sides of _poslayoutbox
layoutbox.align([ax._poslayoutbox,
axc._poslayoutbox],
'right')
alignright = True
# Vertically align axes spines if they have the
# same min or max:
if not aligntop and rownum0min == rownumCmin:
# line up top of _poslayoutbox
_log.debug('rownum0min == rownumCmin')
layoutbox.align([ax._poslayoutbox, axc._poslayoutbox],
'top')
aligntop = True
if not alignbot and rownum0max == rownumCmax:
# line up bottom of _poslayoutbox
_log.debug('rownum0max == rownumCmax')
layoutbox.align([ax._poslayoutbox, axc._poslayoutbox],
'bottom')
alignbot = True
###########
# Now we make the widths and heights of position boxes
# similar. (i.e the spine locations)
# This allows vertically stacked subplots to have
# different sizes if they occupy different amounts
# of the gridspec: i.e.
# gs = gridspec.GridSpec(3,1)
# ax1 = gs[0,:]
# ax2 = gs[1:,:]
# then drows0 = 1, and drowsC = 2, and ax2
# should be at least twice as large as ax1.
# But it can be more than twice as large because
# it needs less room for the labeling.
#
# For height, this only needs to be done if the
# subplots share a column. For width if they
# share a row.
drowsC = (rownumCmax - rownumCmin + 1)
drows0 = (rownum0max - rownum0min + 1)
dcolsC = (colnumCmax - colnumCmin + 1)
dcols0 = (colnum0max - colnum0min + 1)
if not alignheight and drows0 == drowsC:
ax._poslayoutbox.constrain_height(
axc._poslayoutbox.height * height0 / heightC)
alignheight = True
elif in_same_column(colnum0min, colnum0max,
colnumCmin, colnumCmax):
if height0 > heightC:
ax._poslayoutbox.constrain_height_min(
axc._poslayoutbox.height * height0 / heightC)
# these constraints stop the smaller axes from
# being allowed to go to zero height...
axc._poslayoutbox.constrain_height_min(
ax._poslayoutbox.height * heightC /
(height0*1.8))
elif height0 < heightC:
axc._poslayoutbox.constrain_height_min(
ax._poslayoutbox.height * heightC / height0)
ax._poslayoutbox.constrain_height_min(
ax._poslayoutbox.height * height0 /
(heightC*1.8))
# widths...
if not alignwidth and dcols0 == dcolsC:
ax._poslayoutbox.constrain_width(
axc._poslayoutbox.width * width0 / widthC)
alignwidth = True
elif in_same_row(rownum0min, rownum0max,
rownumCmin, rownumCmax):
if width0 > widthC:
ax._poslayoutbox.constrain_width_min(
axc._poslayoutbox.width * width0 / widthC)
axc._poslayoutbox.constrain_width_min(
ax._poslayoutbox.width * widthC /
(width0*1.8))
elif width0 < widthC:
axc._poslayoutbox.constrain_width_min(
ax._poslayoutbox.width * widthC / width0)
ax._poslayoutbox.constrain_width_min(
axc._poslayoutbox.width * width0 /
(widthC*1.8))
fig._layoutbox.constrained_layout_called += 1
fig._layoutbox.update_variables()
# Now set the position of the axes...
for ax in fig.axes:
if ax._layoutbox is not None:
newpos = ax._poslayoutbox.get_rect()
_log.debug('newpos %r', newpos)
# Now set the new position.
# ax.set_position will zero out the layout for
# this axis, allowing users to hard-code the position,
# so this does the same w/o zeroing layout.
ax._set_position(newpos, which='original')
def arange_subplotspecs(gs, hspace=0, wspace=0):
"""
arange the subplotspec children of this gridspec, and then recursively
do the same of any gridspec children of those gridspecs...
"""
sschildren = []
for child in gs.children:
if child._is_subplotspec_layoutbox():
for child2 in child.children:
# check for gridspec children...
name = (child2.name).split('.')[-1][:-3]
if name == 'gridspec':
arange_subplotspecs(child2, hspace=hspace, wspace=wspace)
sschildren += [child]
# now arrange the subplots...
for child0 in sschildren:
ss0 = child0.artist
nrows, ncols = ss0.get_gridspec().get_geometry()
if ss0.num2 is None:
ss0.num2 = ss0.num1
rowNum0min, colNum0min = divmod(ss0.num1, ncols)
rowNum0max, colNum0max = divmod(ss0.num2, ncols)
sschildren = sschildren[1:]
for childc in sschildren:
ssc = childc.artist
rowNumCmin, colNumCmin = divmod(ssc.num1, ncols)
if ssc.num2 is None:
ssc.num2 = ssc.num1
rowNumCmax, colNumCmax = divmod(ssc.num2, ncols)
# OK, this tells us the relative layout of ax
# with axc
thepad = wspace / ncols
if colNum0max < colNumCmin:
layoutbox.hstack([ss0._layoutbox, ssc._layoutbox],
padding=thepad)
if colNumCmax < colNum0min:
layoutbox.hstack([ssc._layoutbox, ss0._layoutbox],
padding=thepad)
####
# vertical alignment
thepad = hspace / nrows
if rowNum0max < rowNumCmin:
layoutbox.vstack([ss0._layoutbox,
ssc._layoutbox],
padding=thepad)
if rowNumCmax < rowNum0min:
layoutbox.vstack([ssc._layoutbox,
ss0._layoutbox],
padding=thepad)
def layoutcolorbarsingle(ax, cax, shrink, aspect, location, pad=0.05):
"""
Do the layout for a colorbar, to not oeverly pollute colorbar.py
`pad` is in fraction of the original axis size.
"""
axlb = ax._layoutbox
axpos = ax._poslayoutbox
axsslb = ax.get_subplotspec()._layoutbox
lb = layoutbox.LayoutBox(
parent=axsslb,
name=axsslb.name + '.cbar',
artist=cax)
if location in ('left', 'right'):
lbpos = layoutbox.LayoutBox(
parent=lb,
name=lb.name + '.pos',
tightwidth=False,
pos=True,
subplot=False,
artist=cax)
if location == 'right':
# arrange to right of parent axis
layoutbox.hstack([axlb, lb], padding=pad * axlb.width,
strength='strong')
else:
layoutbox.hstack([lb, axlb], padding=pad * axlb.width)
# constrain the height and center...
layoutbox.match_heights([axpos, lbpos], [1, shrink])
layoutbox.align([axpos, lbpos], 'v_center')
# set the width of the pos box
lbpos.constrain_width(shrink * axpos.height * (1/aspect),
strength='strong')
elif location in ('bottom', 'top'):
lbpos = layoutbox.LayoutBox(
parent=lb,
name=lb.name + '.pos',
tightheight=True,
pos=True,
subplot=False,
artist=cax)
if location == 'bottom':
layoutbox.vstack([axlb, lb], padding=pad * axlb.height)
else:
layoutbox.vstack([lb, axlb], padding=pad * axlb.height)
# constrain the height and center...
layoutbox.match_widths([axpos, lbpos],
[1, shrink], strength='strong')
layoutbox.align([axpos, lbpos], 'h_center')
# set the height of the pos box
lbpos.constrain_height(axpos.width * aspect * shrink,
strength='medium')
return lb, lbpos
def layoutcolorbargridspec(parents, cax, shrink, aspect, location, pad=0.05):
"""
Do the layout for a colorbar, to not oeverly pollute colorbar.py
`pad` is in fraction of the original axis size.
"""
gs = parents[0].get_subplotspec().get_gridspec()
# parent layout box....
gslb = gs._layoutbox
lb = layoutbox.LayoutBox(parent=gslb.parent,
name=gslb.parent.name + '.cbar',
artist=cax)
if location in ('left', 'right'):
lbpos = layoutbox.LayoutBox(
parent=lb,
name=lb.name + '.pos',
tightwidth=False,
pos=True,
subplot=False,
artist=cax)
if location == 'right':
# arrange to right of the gridpec sibbling
layoutbox.hstack([gslb, lb], padding=pad * gslb.width,
strength='strong')
else:
layoutbox.hstack([lb, gslb], padding=pad * gslb.width)
# constrain the height and center...
# This isn't quite right. We'd like the colorbar
# pos to line up w/ the axes poss, not the size of the
# gs.
maxrow = -100000
minrow = 1000000
maxax = None
minax = None
for ax in parents:
subspec = ax.get_subplotspec()
nrows, ncols = subspec.get_gridspec().get_geometry()
for num in [subspec.num1, subspec.num2]:
rownum1, colnum1 = divmod(subspec.num1, ncols)
if rownum1 > maxrow:
maxrow = rownum1
maxax = ax
if rownum1 < minrow:
minrow = rownum1
minax = ax
# invert the order so these are bottom to top:
maxposlb = minax._poslayoutbox
minposlb = maxax._poslayoutbox
# now we want the height of the colorbar pos to be
# set by the top and bottom of these poss
# bottom top
# b t
# h = (top-bottom)*shrink
# b = bottom + (top-bottom - h) / 2.
lbpos.constrain_height(
(maxposlb.top - minposlb.bottom) *
shrink, strength='strong')
lbpos.constrain_bottom(
(maxposlb.top - minposlb.bottom) *
(1 - shrink)/2 + minposlb.bottom,
strength='strong')
# set the width of the pos box
lbpos.constrain_width(lbpos.height * (shrink / aspect),
strength='strong')
elif location in ('bottom', 'top'):
lbpos = layoutbox.LayoutBox(
parent=lb,
name=lb.name + '.pos',
tightheight=True,
pos=True,
subplot=False,
artist=cax)
if location == 'bottom':
layoutbox.vstack([gslb, lb], padding=pad * gslb.width)
else:
layoutbox.vstack([lb, gslb], padding=pad * gslb.width)
maxcol = -100000
mincol = 1000000
maxax = None
minax = None
for ax in parents:
subspec = ax.get_subplotspec()
nrows, ncols = subspec.get_gridspec().get_geometry()
for num in [subspec.num1, subspec.num2]:
rownum1, colnum1 = divmod(subspec.num1, ncols)
if colnum1 > maxcol:
maxcol = colnum1
maxax = ax
if rownum1 < mincol:
mincol = colnum1
minax = ax
maxposlb = maxax._poslayoutbox
minposlb = minax._poslayoutbox
lbpos.constrain_width((maxposlb.right - minposlb.left) *
shrink)
lbpos.constrain_left(
(maxposlb.right - minposlb.left) *
(1-shrink)/2 + minposlb.left)
# set the height of the pos box
lbpos.constrain_height(lbpos.width * shrink * aspect,
strength='medium')
return lb, lbpos
| 28,386 | 41.495509 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/offsetbox.py
|
"""
The OffsetBox is a simple container artist. The child artist are meant
to be drawn at a relative position to its parent. The [VH]Packer,
DrawingArea and TextArea are derived from the OffsetBox.
The [VH]Packer automatically adjust the relative postisions of their
children, which should be instances of the OffsetBox. This is used to
align similar artists together, e.g., in legend.
The DrawingArea can contain any Artist as a child. The
DrawingArea has a fixed width and height. The position of children
relative to the parent is fixed. The TextArea is contains a single
Text instance. The width and height of the TextArea instance is the
width and height of the its child text.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange, zip
import warnings
import matplotlib.transforms as mtransforms
import matplotlib.artist as martist
import matplotlib.text as mtext
import matplotlib.path as mpath
import numpy as np
from matplotlib.transforms import Bbox, BboxBase, TransformedBbox
from matplotlib.font_manager import FontProperties
from matplotlib.patches import FancyBboxPatch, FancyArrowPatch
from matplotlib import rcParams
from matplotlib import docstring
from matplotlib.image import BboxImage
from matplotlib.patches import bbox_artist as mbbox_artist
from matplotlib.text import _AnnotationBase
DEBUG = False
# for debugging use
def bbox_artist(*args, **kwargs):
if DEBUG:
mbbox_artist(*args, **kwargs)
# _get_packed_offsets() and _get_aligned_offsets() are coded assuming
# that we are packing boxes horizontally. But same function will be
# used with vertical packing.
def _get_packed_offsets(wd_list, total, sep, mode="fixed"):
"""
Geiven a list of (width, xdescent) of each boxes, calculate the
total width and the x-offset positions of each items according to
*mode*. xdescent is analogous to the usual descent, but along the
x-direction. xdescent values are currently ignored.
*wd_list* : list of (width, xdescent) of boxes to be packed.
*sep* : spacing between boxes
*total* : Intended total length. None if not used.
*mode* : packing mode. 'fixed', 'expand', or 'equal'.
"""
w_list, d_list = zip(*wd_list)
# d_list is currently not used.
if mode == "fixed":
offsets_ = np.cumsum([0] + [w + sep for w in w_list])
offsets = offsets_[:-1]
if total is None:
total = offsets_[-1] - sep
return total, offsets
elif mode == "expand":
# This is a bit of a hack to avoid a TypeError when *total*
# is None and used in conjugation with tight layout.
if total is None:
total = 1
if len(w_list) > 1:
sep = (total - sum(w_list)) / (len(w_list) - 1)
else:
sep = 0
offsets_ = np.cumsum([0] + [w + sep for w in w_list])
offsets = offsets_[:-1]
return total, offsets
elif mode == "equal":
maxh = max(w_list)
if total is None:
total = (maxh + sep) * len(w_list)
else:
sep = total / len(w_list) - maxh
offsets = (maxh + sep) * np.arange(len(w_list))
return total, offsets
else:
raise ValueError("Unknown mode : %s" % (mode,))
def _get_aligned_offsets(hd_list, height, align="baseline"):
"""
Given a list of (height, descent) of each boxes, align the boxes
with *align* and calculate the y-offsets of each boxes.
total width and the offset positions of each items according to
*mode*. xdescent is analogous to the usual descent, but along the
x-direction. xdescent values are currently ignored.
*hd_list* : list of (width, xdescent) of boxes to be aligned.
*sep* : spacing between boxes
*height* : Intended total length. None if not used.
*align* : align mode. 'baseline', 'top', 'bottom', or 'center'.
"""
if height is None:
height = max(h for h, d in hd_list)
if align == "baseline":
height_descent = max(h - d for h, d in hd_list)
descent = max(d for h, d in hd_list)
height = height_descent + descent
offsets = [0. for h, d in hd_list]
elif align in ["left", "top"]:
descent = 0.
offsets = [d for h, d in hd_list]
elif align in ["right", "bottom"]:
descent = 0.
offsets = [height - h + d for h, d in hd_list]
elif align == "center":
descent = 0.
offsets = [(height - h) * .5 + d for h, d in hd_list]
else:
raise ValueError("Unknown Align mode : %s" % (align,))
return height, descent, offsets
class OffsetBox(martist.Artist):
"""
The OffsetBox is a simple container artist. The child artist are meant
to be drawn at a relative position to its parent.
"""
def __init__(self, *args, **kwargs):
super(OffsetBox, self).__init__(*args, **kwargs)
# Clipping has not been implemented in the OffesetBox family, so
# disable the clip flag for consistency. It can always be turned back
# on to zero effect.
self.set_clip_on(False)
self._children = []
self._offset = (0, 0)
def __getstate__(self):
state = martist.Artist.__getstate__(self)
# pickle cannot save instancemethods, so handle them here
from .cbook import _InstanceMethodPickler
import inspect
offset = state['_offset']
if inspect.ismethod(offset):
state['_offset'] = _InstanceMethodPickler(offset)
return state
def __setstate__(self, state):
self.__dict__ = state
from .cbook import _InstanceMethodPickler
if isinstance(self._offset, _InstanceMethodPickler):
self._offset = self._offset.get_instancemethod()
self.stale = True
def set_figure(self, fig):
"""
Set the figure
accepts a class:`~matplotlib.figure.Figure` instance
"""
martist.Artist.set_figure(self, fig)
for c in self.get_children():
c.set_figure(fig)
@martist.Artist.axes.setter
def axes(self, ax):
# TODO deal with this better
martist.Artist.axes.fset(self, ax)
for c in self.get_children():
if c is not None:
c.axes = ax
def contains(self, mouseevent):
for c in self.get_children():
a, b = c.contains(mouseevent)
if a:
return a, b
return False, {}
def set_offset(self, xy):
"""
Set the offset
accepts x, y, tuple, or a callable object.
"""
self._offset = xy
self.stale = True
def get_offset(self, width, height, xdescent, ydescent, renderer):
"""
Get the offset
accepts extent of the box
"""
return (self._offset(width, height, xdescent, ydescent, renderer)
if callable(self._offset)
else self._offset)
def set_width(self, width):
"""
Set the width
accepts float
"""
self.width = width
self.stale = True
def set_height(self, height):
"""
Set the height
accepts float
"""
self.height = height
self.stale = True
def get_visible_children(self):
"""
Return a list of visible artists it contains.
"""
return [c for c in self._children if c.get_visible()]
def get_children(self):
"""
Return a list of artists it contains.
"""
return self._children
def get_extent_offsets(self, renderer):
raise Exception("")
def get_extent(self, renderer):
"""
Return with, height, xdescent, ydescent of box
"""
w, h, xd, yd, offsets = self.get_extent_offsets(renderer)
return w, h, xd, yd
def get_window_extent(self, renderer):
'''
get the bounding box in display space.
'''
w, h, xd, yd, offsets = self.get_extent_offsets(renderer)
px, py = self.get_offset(w, h, xd, yd, renderer)
return mtransforms.Bbox.from_bounds(px - xd, py - yd, w, h)
def draw(self, renderer):
"""
Update the location of children if necessary and draw them
to the given *renderer*.
"""
width, height, xdescent, ydescent, offsets = self.get_extent_offsets(
renderer)
px, py = self.get_offset(width, height, xdescent, ydescent, renderer)
for c, (ox, oy) in zip(self.get_visible_children(), offsets):
c.set_offset((px + ox, py + oy))
c.draw(renderer)
bbox_artist(self, renderer, fill=False, props=dict(pad=0.))
self.stale = False
class PackerBase(OffsetBox):
def __init__(self, pad=None, sep=None, width=None, height=None,
align=None, mode=None,
children=None):
"""
Parameters
----------
pad : float, optional
Boundary pad.
sep : float, optional
Spacing between items.
width : float, optional
height : float, optional
Width and height of the container box, calculated if
`None`.
align : str, optional
Alignment of boxes. Can be one of ``top``, ``bottom``,
``left``, ``right``, ``center`` and ``baseline``
mode : str, optional
Packing mode.
Notes
-----
*pad* and *sep* need to given in points and will be scale with
the renderer dpi, while *width* and *height* need to be in
pixels.
"""
super(PackerBase, self).__init__()
self.height = height
self.width = width
self.sep = sep
self.pad = pad
self.mode = mode
self.align = align
self._children = children
class VPacker(PackerBase):
"""
The VPacker has its children packed vertically. It automatically
adjust the relative positions of children in the drawing time.
"""
def __init__(self, pad=None, sep=None, width=None, height=None,
align="baseline", mode="fixed",
children=None):
"""
Parameters
----------
pad : float, optional
Boundary pad.
sep : float, optional
Spacing between items.
width : float, optional
height : float, optional
width and height of the container box, calculated if
`None`.
align : str, optional
Alignment of boxes.
mode : str, optional
Packing mode.
Notes
-----
*pad* and *sep* need to given in points and will be scale with
the renderer dpi, while *width* and *height* need to be in
pixels.
"""
super(VPacker, self).__init__(pad, sep, width, height,
align, mode,
children)
def get_extent_offsets(self, renderer):
"""
update offset of childrens and return the extents of the box
"""
dpicor = renderer.points_to_pixels(1.)
pad = self.pad * dpicor
sep = self.sep * dpicor
if self.width is not None:
for c in self.get_visible_children():
if isinstance(c, PackerBase) and c.mode == "expand":
c.set_width(self.width)
whd_list = [c.get_extent(renderer)
for c in self.get_visible_children()]
whd_list = [(w, h, xd, (h - yd)) for w, h, xd, yd in whd_list]
wd_list = [(w, xd) for w, h, xd, yd in whd_list]
width, xdescent, xoffsets = _get_aligned_offsets(wd_list,
self.width,
self.align)
pack_list = [(h, yd) for w, h, xd, yd in whd_list]
height, yoffsets_ = _get_packed_offsets(pack_list, self.height,
sep, self.mode)
yoffsets = yoffsets_ + [yd for w, h, xd, yd in whd_list]
ydescent = height - yoffsets[0]
yoffsets = height - yoffsets
yoffsets = yoffsets - ydescent
return width + 2 * pad, height + 2 * pad, \
xdescent + pad, ydescent + pad, \
list(zip(xoffsets, yoffsets))
class HPacker(PackerBase):
"""
The HPacker has its children packed horizontally. It automatically
adjusts the relative positions of children at draw time.
"""
def __init__(self, pad=None, sep=None, width=None, height=None,
align="baseline", mode="fixed",
children=None):
"""
Parameters
----------
pad : float, optional
Boundary pad.
sep : float, optional
Spacing between items.
width : float, optional
height : float, optional
Width and height of the container box, calculated if
`None`.
align : str
Alignment of boxes.
mode : str
Packing mode.
Notes
-----
*pad* and *sep* need to given in points and will be scale with
the renderer dpi, while *width* and *height* need to be in
pixels.
"""
super(HPacker, self).__init__(pad, sep, width, height,
align, mode, children)
def get_extent_offsets(self, renderer):
"""
update offset of children and return the extents of the box
"""
dpicor = renderer.points_to_pixels(1.)
pad = self.pad * dpicor
sep = self.sep * dpicor
whd_list = [c.get_extent(renderer)
for c in self.get_visible_children()]
if not whd_list:
return 2 * pad, 2 * pad, pad, pad, []
if self.height is None:
height_descent = max(h - yd for w, h, xd, yd in whd_list)
ydescent = max(yd for w, h, xd, yd in whd_list)
height = height_descent + ydescent
else:
height = self.height - 2 * pad # width w/o pad
hd_list = [(h, yd) for w, h, xd, yd in whd_list]
height, ydescent, yoffsets = _get_aligned_offsets(hd_list,
self.height,
self.align)
pack_list = [(w, xd) for w, h, xd, yd in whd_list]
width, xoffsets_ = _get_packed_offsets(pack_list, self.width,
sep, self.mode)
xoffsets = xoffsets_ + [xd for w, h, xd, yd in whd_list]
xdescent = whd_list[0][2]
xoffsets = xoffsets - xdescent
return width + 2 * pad, height + 2 * pad, \
xdescent + pad, ydescent + pad, \
list(zip(xoffsets, yoffsets))
class PaddedBox(OffsetBox):
def __init__(self, child, pad=None, draw_frame=False, patch_attrs=None):
"""
*pad* : boundary pad
.. note::
*pad* need to given in points and will be
scale with the renderer dpi, while *width* and *height*
need to be in pixels.
"""
super(PaddedBox, self).__init__()
self.pad = pad
self._children = [child]
self.patch = FancyBboxPatch(
xy=(0.0, 0.0), width=1., height=1.,
facecolor='w', edgecolor='k',
mutation_scale=1, # self.prop.get_size_in_points(),
snap=True
)
self.patch.set_boxstyle("square", pad=0)
if patch_attrs is not None:
self.patch.update(patch_attrs)
self._drawFrame = draw_frame
def get_extent_offsets(self, renderer):
"""
update offset of childrens and return the extents of the box
"""
dpicor = renderer.points_to_pixels(1.)
pad = self.pad * dpicor
w, h, xd, yd = self._children[0].get_extent(renderer)
return w + 2 * pad, h + 2 * pad, \
xd + pad, yd + pad, \
[(0, 0)]
def draw(self, renderer):
"""
Update the location of children if necessary and draw them
to the given *renderer*.
"""
width, height, xdescent, ydescent, offsets = self.get_extent_offsets(
renderer)
px, py = self.get_offset(width, height, xdescent, ydescent, renderer)
for c, (ox, oy) in zip(self.get_visible_children(), offsets):
c.set_offset((px + ox, py + oy))
self.draw_frame(renderer)
for c in self.get_visible_children():
c.draw(renderer)
#bbox_artist(self, renderer, fill=False, props=dict(pad=0.))
self.stale = False
def update_frame(self, bbox, fontsize=None):
self.patch.set_bounds(bbox.x0, bbox.y0,
bbox.width, bbox.height)
if fontsize:
self.patch.set_mutation_scale(fontsize)
self.stale = True
def draw_frame(self, renderer):
# update the location and size of the legend
bbox = self.get_window_extent(renderer)
self.update_frame(bbox)
if self._drawFrame:
self.patch.draw(renderer)
class DrawingArea(OffsetBox):
"""
The DrawingArea can contain any Artist as a child. The DrawingArea
has a fixed width and height. The position of children relative to
the parent is fixed. The children can be clipped at the
boundaries of the parent.
"""
def __init__(self, width, height, xdescent=0.,
ydescent=0., clip=False):
"""
*width*, *height* : width and height of the container box.
*xdescent*, *ydescent* : descent of the box in x- and y-direction.
*clip* : Whether to clip the children
"""
super(DrawingArea, self).__init__()
self.width = width
self.height = height
self.xdescent = xdescent
self.ydescent = ydescent
self._clip_children = clip
self.offset_transform = mtransforms.Affine2D()
self.offset_transform.clear()
self.offset_transform.translate(0, 0)
self.dpi_transform = mtransforms.Affine2D()
@property
def clip_children(self):
"""
If the children of this DrawingArea should be clipped
by DrawingArea bounding box.
"""
return self._clip_children
@clip_children.setter
def clip_children(self, val):
self._clip_children = bool(val)
self.stale = True
def get_transform(self):
"""
Return the :class:`~matplotlib.transforms.Transform` applied
to the children
"""
return self.dpi_transform + self.offset_transform
def set_transform(self, t):
"""
set_transform is ignored.
"""
pass
def set_offset(self, xy):
"""
set offset of the container.
Accept : tuple of x,y coordinate in display units.
"""
self._offset = xy
self.offset_transform.clear()
self.offset_transform.translate(xy[0], xy[1])
self.stale = True
def get_offset(self):
"""
return offset of the container.
"""
return self._offset
def get_window_extent(self, renderer):
'''
get the bounding box in display space.
'''
w, h, xd, yd = self.get_extent(renderer)
ox, oy = self.get_offset() # w, h, xd, yd)
return mtransforms.Bbox.from_bounds(ox - xd, oy - yd, w, h)
def get_extent(self, renderer):
"""
Return with, height, xdescent, ydescent of box
"""
dpi_cor = renderer.points_to_pixels(1.)
return self.width * dpi_cor, self.height * dpi_cor, \
self.xdescent * dpi_cor, self.ydescent * dpi_cor
def add_artist(self, a):
'Add any :class:`~matplotlib.artist.Artist` to the container box'
self._children.append(a)
if not a.is_transform_set():
a.set_transform(self.get_transform())
if self.axes is not None:
a.axes = self.axes
fig = self.figure
if fig is not None:
a.set_figure(fig)
def draw(self, renderer):
"""
Draw the children
"""
dpi_cor = renderer.points_to_pixels(1.)
self.dpi_transform.clear()
self.dpi_transform.scale(dpi_cor, dpi_cor)
# At this point the DrawingArea has a transform
# to the display space so the path created is
# good for clipping children
tpath = mtransforms.TransformedPath(
mpath.Path([[0, 0], [0, self.height],
[self.width, self.height],
[self.width, 0]]),
self.get_transform())
for c in self._children:
if self._clip_children and not (c.clipbox or c._clippath):
c.set_clip_path(tpath)
c.draw(renderer)
bbox_artist(self, renderer, fill=False, props=dict(pad=0.))
self.stale = False
class TextArea(OffsetBox):
"""
The TextArea is contains a single Text instance. The text is
placed at (0,0) with baseline+left alignment. The width and height
of the TextArea instance is the width and height of the its child
text.
"""
def __init__(self, s,
textprops=None,
multilinebaseline=None,
minimumdescent=True,
):
"""
Parameters
----------
s : str
a string to be displayed.
textprops : `~matplotlib.font_manager.FontProperties`, optional
multilinebaseline : bool, optional
If `True`, baseline for multiline text is adjusted so that
it is (approximatedly) center-aligned with singleline
text.
minimumdescent : bool, optional
If `True`, the box has a minimum descent of "p".
"""
if textprops is None:
textprops = {}
if "va" not in textprops:
textprops["va"] = "baseline"
self._text = mtext.Text(0, 0, s, **textprops)
OffsetBox.__init__(self)
self._children = [self._text]
self.offset_transform = mtransforms.Affine2D()
self.offset_transform.clear()
self.offset_transform.translate(0, 0)
self._baseline_transform = mtransforms.Affine2D()
self._text.set_transform(self.offset_transform +
self._baseline_transform)
self._multilinebaseline = multilinebaseline
self._minimumdescent = minimumdescent
def set_text(self, s):
"Set the text of this area as a string."
self._text.set_text(s)
self.stale = True
def get_text(self):
"Returns the string representation of this area's text"
return self._text.get_text()
def set_multilinebaseline(self, t):
"""
Set multilinebaseline .
If True, baseline for multiline text is
adjusted so that it is (approximatedly) center-aligned with
singleline text.
"""
self._multilinebaseline = t
self.stale = True
def get_multilinebaseline(self):
"""
get multilinebaseline .
"""
return self._multilinebaseline
def set_minimumdescent(self, t):
"""
Set minimumdescent .
If True, extent of the single line text is adjusted so that
it has minimum descent of "p"
"""
self._minimumdescent = t
self.stale = True
def get_minimumdescent(self):
"""
get minimumdescent.
"""
return self._minimumdescent
def set_transform(self, t):
"""
set_transform is ignored.
"""
pass
def set_offset(self, xy):
"""
set offset of the container.
Accept : tuple of x,y coordinates in display units.
"""
self._offset = xy
self.offset_transform.clear()
self.offset_transform.translate(xy[0], xy[1])
self.stale = True
def get_offset(self):
"""
return offset of the container.
"""
return self._offset
def get_window_extent(self, renderer):
'''
get the bounding box in display space.
'''
w, h, xd, yd = self.get_extent(renderer)
ox, oy = self.get_offset() # w, h, xd, yd)
return mtransforms.Bbox.from_bounds(ox - xd, oy - yd, w, h)
def get_extent(self, renderer):
clean_line, ismath = self._text.is_math_text(self._text._text)
_, h_, d_ = renderer.get_text_width_height_descent(
"lp", self._text._fontproperties, ismath=False)
bbox, info, d = self._text._get_layout(renderer)
w, h = bbox.width, bbox.height
line = info[-1][0] # last line
self._baseline_transform.clear()
if len(info) > 1 and self._multilinebaseline:
d_new = 0.5 * h - 0.5 * (h_ - d_)
self._baseline_transform.translate(0, d - d_new)
d = d_new
else: # single line
h_d = max(h_ - d_, h - d)
if self.get_minimumdescent():
## to have a minimum descent, #i.e., "l" and "p" have same
## descents.
d = max(d, d_)
#else:
# d = d
h = h_d + d
return w, h, 0., d
def draw(self, renderer):
"""
Draw the children
"""
self._text.draw(renderer)
bbox_artist(self, renderer, fill=False, props=dict(pad=0.))
self.stale = False
class AuxTransformBox(OffsetBox):
"""
Offset Box with the aux_transform . Its children will be
transformed with the aux_transform first then will be
offseted. The absolute coordinate of the aux_transform is meaning
as it will be automatically adjust so that the left-lower corner
of the bounding box of children will be set to (0,0) before the
offset transform.
It is similar to drawing area, except that the extent of the box
is not predetermined but calculated from the window extent of its
children. Furthermore, the extent of the children will be
calculated in the transformed coordinate.
"""
def __init__(self, aux_transform):
self.aux_transform = aux_transform
OffsetBox.__init__(self)
self.offset_transform = mtransforms.Affine2D()
self.offset_transform.clear()
self.offset_transform.translate(0, 0)
# ref_offset_transform is used to make the offset_transform is
# always reference to the lower-left corner of the bbox of its
# children.
self.ref_offset_transform = mtransforms.Affine2D()
self.ref_offset_transform.clear()
def add_artist(self, a):
'Add any :class:`~matplotlib.artist.Artist` to the container box'
self._children.append(a)
a.set_transform(self.get_transform())
self.stale = True
def get_transform(self):
"""
Return the :class:`~matplotlib.transforms.Transform` applied
to the children
"""
return self.aux_transform + \
self.ref_offset_transform + \
self.offset_transform
def set_transform(self, t):
"""
set_transform is ignored.
"""
pass
def set_offset(self, xy):
"""
set offset of the container.
Accept : tuple of x,y coordinate in display units.
"""
self._offset = xy
self.offset_transform.clear()
self.offset_transform.translate(xy[0], xy[1])
self.stale = True
def get_offset(self):
"""
return offset of the container.
"""
return self._offset
def get_window_extent(self, renderer):
'''
get the bounding box in display space.
'''
w, h, xd, yd = self.get_extent(renderer)
ox, oy = self.get_offset() # w, h, xd, yd)
return mtransforms.Bbox.from_bounds(ox - xd, oy - yd, w, h)
def get_extent(self, renderer):
# clear the offset transforms
_off = self.offset_transform.to_values() # to be restored later
self.ref_offset_transform.clear()
self.offset_transform.clear()
# calculate the extent
bboxes = [c.get_window_extent(renderer) for c in self._children]
ub = mtransforms.Bbox.union(bboxes)
# adjust ref_offset_tansform
self.ref_offset_transform.translate(-ub.x0, -ub.y0)
# restor offset transform
mtx = self.offset_transform.matrix_from_values(*_off)
self.offset_transform.set_matrix(mtx)
return ub.width, ub.height, 0., 0.
def draw(self, renderer):
"""
Draw the children
"""
for c in self._children:
c.draw(renderer)
bbox_artist(self, renderer, fill=False, props=dict(pad=0.))
self.stale = False
class AnchoredOffsetbox(OffsetBox):
"""
An offset box placed according to the legend location
loc. AnchoredOffsetbox has a single child. When multiple children
is needed, use other OffsetBox class to enclose them. By default,
the offset box is anchored against its parent axes. You may
explicitly specify the bbox_to_anchor.
"""
zorder = 5 # zorder of the legend
# Location codes
codes = {'upper right': 1,
'upper left': 2,
'lower left': 3,
'lower right': 4,
'right': 5,
'center left': 6,
'center right': 7,
'lower center': 8,
'upper center': 9,
'center': 10,
}
def __init__(self, loc,
pad=0.4, borderpad=0.5,
child=None, prop=None, frameon=True,
bbox_to_anchor=None,
bbox_transform=None,
**kwargs):
"""
loc is a string or an integer specifying the legend location.
The valid location codes are::
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4,
'right' : 5, (same as 'center right', for back-compatibility)
'center left' : 6,
'center right' : 7,
'lower center' : 8,
'upper center' : 9,
'center' : 10,
pad : pad around the child for drawing a frame. given in
fraction of fontsize.
borderpad : pad between offsetbox frame and the bbox_to_anchor,
child : OffsetBox instance that will be anchored.
prop : font property. This is only used as a reference for paddings.
frameon : draw a frame box if True.
bbox_to_anchor : bbox to anchor. Use self.axes.bbox if None.
bbox_transform : with which the bbox_to_anchor will be transformed.
"""
super(AnchoredOffsetbox, self).__init__(**kwargs)
self.set_bbox_to_anchor(bbox_to_anchor, bbox_transform)
self.set_child(child)
if isinstance(loc, six.string_types):
try:
loc = self.codes[loc]
except KeyError:
raise ValueError('Unrecognized location "%s". Valid '
'locations are\n\t%s\n'
% (loc, '\n\t'.join(self.codes)))
self.loc = loc
self.borderpad = borderpad
self.pad = pad
if prop is None:
self.prop = FontProperties(size=rcParams["legend.fontsize"])
elif isinstance(prop, dict):
self.prop = FontProperties(**prop)
if "size" not in prop:
self.prop.set_size(rcParams["legend.fontsize"])
else:
self.prop = prop
self.patch = FancyBboxPatch(
xy=(0.0, 0.0), width=1., height=1.,
facecolor='w', edgecolor='k',
mutation_scale=self.prop.get_size_in_points(),
snap=True
)
self.patch.set_boxstyle("square", pad=0)
self._drawFrame = frameon
def set_child(self, child):
"set the child to be anchored"
self._child = child
if child is not None:
child.axes = self.axes
self.stale = True
def get_child(self):
"return the child"
return self._child
def get_children(self):
"return the list of children"
return [self._child]
def get_extent(self, renderer):
"""
return the extent of the artist. The extent of the child
added with the pad is returned
"""
w, h, xd, yd = self.get_child().get_extent(renderer)
fontsize = renderer.points_to_pixels(self.prop.get_size_in_points())
pad = self.pad * fontsize
return w + 2 * pad, h + 2 * pad, xd + pad, yd + pad
def get_bbox_to_anchor(self):
"""
return the bbox that the legend will be anchored
"""
if self._bbox_to_anchor is None:
return self.axes.bbox
else:
transform = self._bbox_to_anchor_transform
if transform is None:
return self._bbox_to_anchor
else:
return TransformedBbox(self._bbox_to_anchor,
transform)
def set_bbox_to_anchor(self, bbox, transform=None):
"""
set the bbox that the child will be anchored.
*bbox* can be a Bbox instance, a list of [left, bottom, width,
height], or a list of [left, bottom] where the width and
height will be assumed to be zero. The bbox will be
transformed to display coordinate by the given transform.
"""
if bbox is None or isinstance(bbox, BboxBase):
self._bbox_to_anchor = bbox
else:
try:
l = len(bbox)
except TypeError:
raise ValueError("Invalid argument for bbox : %s" % str(bbox))
if l == 2:
bbox = [bbox[0], bbox[1], 0, 0]
self._bbox_to_anchor = Bbox.from_bounds(*bbox)
self._bbox_to_anchor_transform = transform
self.stale = True
def get_window_extent(self, renderer):
'''
get the bounding box in display space.
'''
self._update_offset_func(renderer)
w, h, xd, yd = self.get_extent(renderer)
ox, oy = self.get_offset(w, h, xd, yd, renderer)
return Bbox.from_bounds(ox - xd, oy - yd, w, h)
def _update_offset_func(self, renderer, fontsize=None):
"""
Update the offset func which depends on the dpi of the
renderer (because of the padding).
"""
if fontsize is None:
fontsize = renderer.points_to_pixels(
self.prop.get_size_in_points())
def _offset(w, h, xd, yd, renderer, fontsize=fontsize, self=self):
bbox = Bbox.from_bounds(0, 0, w, h)
borderpad = self.borderpad * fontsize
bbox_to_anchor = self.get_bbox_to_anchor()
x0, y0 = self._get_anchored_bbox(self.loc,
bbox,
bbox_to_anchor,
borderpad)
return x0 + xd, y0 + yd
self.set_offset(_offset)
def update_frame(self, bbox, fontsize=None):
self.patch.set_bounds(bbox.x0, bbox.y0,
bbox.width, bbox.height)
if fontsize:
self.patch.set_mutation_scale(fontsize)
def draw(self, renderer):
"draw the artist"
if not self.get_visible():
return
fontsize = renderer.points_to_pixels(self.prop.get_size_in_points())
self._update_offset_func(renderer, fontsize)
if self._drawFrame:
# update the location and size of the legend
bbox = self.get_window_extent(renderer)
self.update_frame(bbox, fontsize)
self.patch.draw(renderer)
width, height, xdescent, ydescent = self.get_extent(renderer)
px, py = self.get_offset(width, height, xdescent, ydescent, renderer)
self.get_child().set_offset((px, py))
self.get_child().draw(renderer)
self.stale = False
def _get_anchored_bbox(self, loc, bbox, parentbbox, borderpad):
"""
return the position of the bbox anchored at the parentbbox
with the loc code, with the borderpad.
"""
assert loc in range(1, 11) # called only internally
BEST, UR, UL, LL, LR, R, CL, CR, LC, UC, C = xrange(11)
anchor_coefs = {UR: "NE",
UL: "NW",
LL: "SW",
LR: "SE",
R: "E",
CL: "W",
CR: "E",
LC: "S",
UC: "N",
C: "C"}
c = anchor_coefs[loc]
container = parentbbox.padded(-borderpad)
anchored_box = bbox.anchored(c, container=container)
return anchored_box.x0, anchored_box.y0
class AnchoredText(AnchoredOffsetbox):
"""
AnchoredOffsetbox with Text.
"""
def __init__(self, s, loc, pad=0.4, borderpad=0.5, prop=None, **kwargs):
"""
Parameters
----------
s : string
Text.
loc : str
Location code.
pad : float, optional
Pad between the text and the frame as fraction of the font
size.
borderpad : float, optional
Pad between the frame and the axes (or *bbox_to_anchor*).
prop : `matplotlib.font_manager.FontProperties`
Font properties.
Notes
-----
Other keyword parameters of `AnchoredOffsetbox` are also
allowed.
"""
if prop is None:
prop = {}
badkwargs = {'ha', 'horizontalalignment', 'va', 'verticalalignment'}
if badkwargs & set(prop):
warnings.warn("Mixing horizontalalignment or verticalalignment "
"with AnchoredText is not supported.")
self.txt = TextArea(s, textprops=prop, minimumdescent=False)
fp = self.txt._text.get_fontproperties()
super(AnchoredText, self).__init__(
loc, pad=pad, borderpad=borderpad, child=self.txt, prop=fp,
**kwargs)
class OffsetImage(OffsetBox):
def __init__(self, arr,
zoom=1,
cmap=None,
norm=None,
interpolation=None,
origin=None,
filternorm=1,
filterrad=4.0,
resample=False,
dpi_cor=True,
**kwargs
):
OffsetBox.__init__(self)
self._dpi_cor = dpi_cor
self.image = BboxImage(bbox=self.get_window_extent,
cmap=cmap,
norm=norm,
interpolation=interpolation,
origin=origin,
filternorm=filternorm,
filterrad=filterrad,
resample=resample,
**kwargs
)
self._children = [self.image]
self.set_zoom(zoom)
self.set_data(arr)
def set_data(self, arr):
self._data = np.asarray(arr)
self.image.set_data(self._data)
self.stale = True
def get_data(self):
return self._data
def set_zoom(self, zoom):
self._zoom = zoom
self.stale = True
def get_zoom(self):
return self._zoom
# def set_axes(self, axes):
# self.image.set_axes(axes)
# martist.Artist.set_axes(self, axes)
# def set_offset(self, xy):
# """
# set offset of the container.
# Accept : tuple of x,y coordinate in display units.
# """
# self._offset = xy
# self.offset_transform.clear()
# self.offset_transform.translate(xy[0], xy[1])
def get_offset(self):
"""
return offset of the container.
"""
return self._offset
def get_children(self):
return [self.image]
def get_window_extent(self, renderer):
'''
get the bounding box in display space.
'''
w, h, xd, yd = self.get_extent(renderer)
ox, oy = self.get_offset()
return mtransforms.Bbox.from_bounds(ox - xd, oy - yd, w, h)
def get_extent(self, renderer):
if self._dpi_cor: # True, do correction
dpi_cor = renderer.points_to_pixels(1.)
else:
dpi_cor = 1.
zoom = self.get_zoom()
data = self.get_data()
ny, nx = data.shape[:2]
w, h = dpi_cor * nx * zoom, dpi_cor * ny * zoom
return w, h, 0, 0
def draw(self, renderer):
"""
Draw the children
"""
self.image.draw(renderer)
# bbox_artist(self, renderer, fill=False, props=dict(pad=0.))
self.stale = False
class AnnotationBbox(martist.Artist, _AnnotationBase):
"""
Annotation-like class, but with offsetbox instead of Text.
"""
zorder = 3
def __str__(self):
return "AnnotationBbox(%g,%g)" % (self.xy[0], self.xy[1])
@docstring.dedent_interpd
def __init__(self, offsetbox, xy,
xybox=None,
xycoords='data',
boxcoords=None,
frameon=True, pad=0.4, # BboxPatch
annotation_clip=None,
box_alignment=(0.5, 0.5),
bboxprops=None,
arrowprops=None,
fontsize=None,
**kwargs):
"""
*offsetbox* : OffsetBox instance
*xycoords* : same as Annotation but can be a tuple of two
strings which are interpreted as x and y coordinates.
*boxcoords* : similar to textcoords as Annotation but can be a
tuple of two strings which are interpreted as x and y
coordinates.
*box_alignment* : a tuple of two floats for a vertical and
horizontal alignment of the offset box w.r.t. the *boxcoords*.
The lower-left corner is (0.0) and upper-right corner is (1.1).
other parameters are identical to that of Annotation.
"""
martist.Artist.__init__(self, **kwargs)
_AnnotationBase.__init__(self,
xy,
xycoords=xycoords,
annotation_clip=annotation_clip)
self.offsetbox = offsetbox
self.arrowprops = arrowprops
self.set_fontsize(fontsize)
if xybox is None:
self.xybox = xy
else:
self.xybox = xybox
if boxcoords is None:
self.boxcoords = xycoords
else:
self.boxcoords = boxcoords
if arrowprops is not None:
self._arrow_relpos = self.arrowprops.pop("relpos", (0.5, 0.5))
self.arrow_patch = FancyArrowPatch((0, 0), (1, 1),
**self.arrowprops)
else:
self._arrow_relpos = None
self.arrow_patch = None
#self._fw, self._fh = 0., 0. # for alignment
self._box_alignment = box_alignment
# frame
self.patch = FancyBboxPatch(
xy=(0.0, 0.0), width=1., height=1.,
facecolor='w', edgecolor='k',
mutation_scale=self.prop.get_size_in_points(),
snap=True
)
self.patch.set_boxstyle("square", pad=pad)
if bboxprops:
self.patch.set(**bboxprops)
self._drawFrame = frameon
@property
def xyann(self):
return self.xybox
@xyann.setter
def xyann(self, xyann):
self.xybox = xyann
self.stale = True
@property
def anncoords(self):
return self.boxcoords
@anncoords.setter
def anncoords(self, coords):
self.boxcoords = coords
self.stale = True
def contains(self, event):
t, tinfo = self.offsetbox.contains(event)
#if self.arrow_patch is not None:
# a,ainfo=self.arrow_patch.contains(event)
# t = t or a
# self.arrow_patch is currently not checked as this can be a line - JJ
return t, tinfo
def get_children(self):
children = [self.offsetbox, self.patch]
if self.arrow_patch:
children.append(self.arrow_patch)
return children
def set_figure(self, fig):
if self.arrow_patch is not None:
self.arrow_patch.set_figure(fig)
self.offsetbox.set_figure(fig)
martist.Artist.set_figure(self, fig)
def set_fontsize(self, s=None):
"""
set fontsize in points
"""
if s is None:
s = rcParams["legend.fontsize"]
self.prop = FontProperties(size=s)
self.stale = True
def get_fontsize(self, s=None):
"""
return fontsize in points
"""
return self.prop.get_size_in_points()
def update_positions(self, renderer):
"""
Update the pixel positions of the annotated point and the text.
"""
xy_pixel = self._get_position_xy(renderer)
self._update_position_xybox(renderer, xy_pixel)
mutation_scale = renderer.points_to_pixels(self.get_fontsize())
self.patch.set_mutation_scale(mutation_scale)
if self.arrow_patch:
self.arrow_patch.set_mutation_scale(mutation_scale)
def _update_position_xybox(self, renderer, xy_pixel):
"""
Update the pixel positions of the annotation text and the arrow
patch.
"""
x, y = self.xybox
if isinstance(self.boxcoords, tuple):
xcoord, ycoord = self.boxcoords
x1, y1 = self._get_xy(renderer, x, y, xcoord)
x2, y2 = self._get_xy(renderer, x, y, ycoord)
ox0, oy0 = x1, y2
else:
ox0, oy0 = self._get_xy(renderer, x, y, self.boxcoords)
w, h, xd, yd = self.offsetbox.get_extent(renderer)
_fw, _fh = self._box_alignment
self.offsetbox.set_offset((ox0 - _fw * w + xd, oy0 - _fh * h + yd))
# update patch position
bbox = self.offsetbox.get_window_extent(renderer)
#self.offsetbox.set_offset((ox0-_fw*w, oy0-_fh*h))
self.patch.set_bounds(bbox.x0, bbox.y0,
bbox.width, bbox.height)
x, y = xy_pixel
ox1, oy1 = x, y
if self.arrowprops:
x0, y0 = x, y
d = self.arrowprops.copy()
# Use FancyArrowPatch if self.arrowprops has "arrowstyle" key.
# adjust the starting point of the arrow relative to
# the textbox.
# TODO : Rotation needs to be accounted.
relpos = self._arrow_relpos
ox0 = bbox.x0 + bbox.width * relpos[0]
oy0 = bbox.y0 + bbox.height * relpos[1]
# The arrow will be drawn from (ox0, oy0) to (ox1,
# oy1). It will be first clipped by patchA and patchB.
# Then it will be shrunk by shrinkA and shrinkB
# (in points). If patch A is not set, self.bbox_patch
# is used.
self.arrow_patch.set_positions((ox0, oy0), (ox1, oy1))
fs = self.prop.get_size_in_points()
mutation_scale = d.pop("mutation_scale", fs)
mutation_scale = renderer.points_to_pixels(mutation_scale)
self.arrow_patch.set_mutation_scale(mutation_scale)
patchA = d.pop("patchA", self.patch)
self.arrow_patch.set_patchA(patchA)
def draw(self, renderer):
"""
Draw the :class:`Annotation` object to the given *renderer*.
"""
if renderer is not None:
self._renderer = renderer
if not self.get_visible():
return
xy_pixel = self._get_position_xy(renderer)
if not self._check_xy(renderer, xy_pixel):
return
self.update_positions(renderer)
if self.arrow_patch is not None:
if self.arrow_patch.figure is None and self.figure is not None:
self.arrow_patch.figure = self.figure
self.arrow_patch.draw(renderer)
if self._drawFrame:
self.patch.draw(renderer)
self.offsetbox.draw(renderer)
self.stale = False
class DraggableBase(object):
"""
helper code for a draggable artist (legend, offsetbox)
The derived class must override following two method.
def save_offset(self):
pass
def update_offset(self, dx, dy):
pass
*save_offset* is called when the object is picked for dragging and it
is meant to save reference position of the artist.
*update_offset* is called during the dragging. dx and dy is the pixel
offset from the point where the mouse drag started.
Optionally you may override following two methods.
def artist_picker(self, artist, evt):
return self.ref_artist.contains(evt)
def finalize_offset(self):
pass
*artist_picker* is a picker method that will be
used. *finalize_offset* is called when the mouse is released. In
current implementation of DraggableLegend and DraggableAnnotation,
*update_offset* places the artists simply in display
coordinates. And *finalize_offset* recalculate their position in
the normalized axes coordinate and set a relavant attribute.
"""
def __init__(self, ref_artist, use_blit=False):
self.ref_artist = ref_artist
self.got_artist = False
self.canvas = self.ref_artist.figure.canvas
self._use_blit = use_blit and self.canvas.supports_blit
c2 = self.canvas.mpl_connect('pick_event', self.on_pick)
c3 = self.canvas.mpl_connect('button_release_event', self.on_release)
ref_artist.set_picker(self.artist_picker)
self.cids = [c2, c3]
def on_motion(self, evt):
if self.got_artist:
dx = evt.x - self.mouse_x
dy = evt.y - self.mouse_y
self.update_offset(dx, dy)
self.canvas.draw()
def on_motion_blit(self, evt):
if self.got_artist:
dx = evt.x - self.mouse_x
dy = evt.y - self.mouse_y
self.update_offset(dx, dy)
self.canvas.restore_region(self.background)
self.ref_artist.draw(self.ref_artist.figure._cachedRenderer)
self.canvas.blit(self.ref_artist.figure.bbox)
def on_pick(self, evt):
if evt.artist == self.ref_artist:
self.mouse_x = evt.mouseevent.x
self.mouse_y = evt.mouseevent.y
self.got_artist = True
if self._use_blit:
self.ref_artist.set_animated(True)
self.canvas.draw()
self.background = self.canvas.copy_from_bbox(
self.ref_artist.figure.bbox)
self.ref_artist.draw(self.ref_artist.figure._cachedRenderer)
self.canvas.blit(self.ref_artist.figure.bbox)
self._c1 = self.canvas.mpl_connect('motion_notify_event',
self.on_motion_blit)
else:
self._c1 = self.canvas.mpl_connect('motion_notify_event',
self.on_motion)
self.save_offset()
def on_release(self, event):
if self.got_artist:
self.finalize_offset()
self.got_artist = False
self.canvas.mpl_disconnect(self._c1)
if self._use_blit:
self.ref_artist.set_animated(False)
def disconnect(self):
"""disconnect the callbacks"""
for cid in self.cids:
self.canvas.mpl_disconnect(cid)
try:
c1 = self._c1
except AttributeError:
pass
else:
self.canvas.mpl_disconnect(c1)
def artist_picker(self, artist, evt):
return self.ref_artist.contains(evt)
def save_offset(self):
pass
def update_offset(self, dx, dy):
pass
def finalize_offset(self):
pass
class DraggableOffsetBox(DraggableBase):
def __init__(self, ref_artist, offsetbox, use_blit=False):
DraggableBase.__init__(self, ref_artist, use_blit=use_blit)
self.offsetbox = offsetbox
def save_offset(self):
offsetbox = self.offsetbox
renderer = offsetbox.figure._cachedRenderer
w, h, xd, yd = offsetbox.get_extent(renderer)
offset = offsetbox.get_offset(w, h, xd, yd, renderer)
self.offsetbox_x, self.offsetbox_y = offset
self.offsetbox.set_offset(offset)
def update_offset(self, dx, dy):
loc_in_canvas = self.offsetbox_x + dx, self.offsetbox_y + dy
self.offsetbox.set_offset(loc_in_canvas)
def get_loc_in_canvas(self):
offsetbox = self.offsetbox
renderer = offsetbox.figure._cachedRenderer
w, h, xd, yd = offsetbox.get_extent(renderer)
ox, oy = offsetbox._offset
loc_in_canvas = (ox - xd, oy - yd)
return loc_in_canvas
class DraggableAnnotation(DraggableBase):
def __init__(self, annotation, use_blit=False):
DraggableBase.__init__(self, annotation, use_blit=use_blit)
self.annotation = annotation
def save_offset(self):
ann = self.annotation
self.ox, self.oy = ann.get_transform().transform(ann.xyann)
def update_offset(self, dx, dy):
ann = self.annotation
ann.xyann = ann.get_transform().inverted().transform(
(self.ox + dx, self.oy + dy))
if __name__ == "__main__":
import matplotlib.pyplot as plt
fig = plt.figure(1)
fig.clf()
ax = plt.subplot(121)
#txt = ax.text(0.5, 0.5, "Test", size=30, ha="center", color="w")
kwargs = dict()
a = np.arange(256).reshape(16, 16) / 256.
myimage = OffsetImage(a,
zoom=2,
norm=None,
origin=None,
**kwargs
)
ax.add_artist(myimage)
myimage.set_offset((100, 100))
myimage2 = OffsetImage(a,
zoom=2,
norm=None,
origin=None,
**kwargs
)
ann = AnnotationBbox(myimage2, (0.5, 0.5),
xybox=(30, 30),
xycoords='data',
boxcoords="offset points",
frameon=True, pad=0.4, # BboxPatch
bboxprops=dict(boxstyle="round", fc="y"),
fontsize=None,
arrowprops=dict(arrowstyle="->"),
)
ax.add_artist(ann)
plt.draw()
plt.show()
| 55,538 | 29.650662 | 78 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/animation.py
|
# TODO:
# * Loop Delay is broken on GTKAgg. This is because source_remove() is not
# working as we want. PyGTK bug?
# * Documentation -- this will need a new section of the User's Guide.
# Both for Animations and just timers.
# - Also need to update http://www.scipy.org/Cookbook/Matplotlib/Animations
# * Blit
# * Currently broken with Qt4 for widgets that don't start on screen
# * Still a few edge cases that aren't working correctly
# * Can this integrate better with existing matplotlib animation artist flag?
# - If animated removes from default draw(), perhaps we could use this to
# simplify initial draw.
# * Example
# * Frameless animation - pure procedural with no loop
# * Need example that uses something like inotify or subprocess
# * Complex syncing examples
# * Movies
# * Can blit be enabled for movies?
# * Need to consider event sources to allow clicking through multiple figures
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange, zip
import abc
import contextlib
from io import BytesIO
import itertools
import logging
import os
import platform
import sys
import tempfile
import uuid
import numpy as np
from matplotlib._animation_data import (DISPLAY_TEMPLATE, INCLUDED_FRAMES,
JS_INCLUDE)
from matplotlib.compat import subprocess
from matplotlib import cbook, rcParams, rcParamsDefault, rc_context
if six.PY2:
from base64 import encodestring as encodebytes
else:
from base64 import encodebytes
_log = logging.getLogger(__name__)
# Process creation flag for subprocess to prevent it raising a terminal
# window. See for example:
# https://stackoverflow.com/questions/24130623/using-python-subprocess-popen-cant-prevent-exe-stopped-working-prompt
if platform.system() == 'Windows':
subprocess_creation_flags = CREATE_NO_WINDOW = 0x08000000
else:
# Apparently None won't work here
subprocess_creation_flags = 0
# Other potential writing methods:
# * http://pymedia.org/
# * libmng (produces swf) python wrappers: https://github.com/libming/libming
# * Wrap x264 API:
# (http://stackoverflow.com/questions/2940671/
# how-to-encode-series-of-images-into-h264-using-x264-api-c-c )
def adjusted_figsize(w, h, dpi, n):
'''Compute figure size so that pixels are a multiple of n
Parameters
----------
w, h : float
Size in inches
dpi : float
The dpi
n : int
The target multiple
Returns
-------
wnew, hnew : float
The new figure size in inches.
'''
# this maybe simplified if / when we adopt consistent rounding for
# pixel size across the whole library
def correct_roundoff(x, dpi, n):
if int(x*dpi) % n != 0:
if int(np.nextafter(x, np.inf)*dpi) % n == 0:
x = np.nextafter(x, np.inf)
elif int(np.nextafter(x, -np.inf)*dpi) % n == 0:
x = np.nextafter(x, -np.inf)
return x
wnew = int(w * dpi / n) * n / dpi
hnew = int(h * dpi / n) * n / dpi
return (correct_roundoff(wnew, dpi, n), correct_roundoff(hnew, dpi, n))
# A registry for available MovieWriter classes
class MovieWriterRegistry(object):
'''Registry of available writer classes by human readable name.'''
def __init__(self):
self.avail = dict()
self._registered = dict()
self._dirty = False
def set_dirty(self):
"""Sets a flag to re-setup the writers."""
self._dirty = True
def register(self, name):
"""Decorator for registering a class under a name.
Example use::
@registry.register(name)
class Foo:
pass
"""
def wrapper(writerClass):
self._registered[name] = writerClass
if writerClass.isAvailable():
self.avail[name] = writerClass
return writerClass
return wrapper
def ensure_not_dirty(self):
"""If dirty, reasks the writers if they are available"""
if self._dirty:
self.reset_available_writers()
def reset_available_writers(self):
"""Reset the available state of all registered writers"""
self.avail = {}
for name, writerClass in self._registered.items():
if writerClass.isAvailable():
self.avail[name] = writerClass
self._dirty = False
def list(self):
'''Get a list of available MovieWriters.'''
self.ensure_not_dirty()
return list(self.avail)
def is_available(self, name):
'''Check if given writer is available by name.
Parameters
----------
name : str
Returns
-------
available : bool
'''
self.ensure_not_dirty()
return name in self.avail
def __getitem__(self, name):
self.ensure_not_dirty()
if not self.avail:
raise RuntimeError("No MovieWriters available!")
try:
return self.avail[name]
except KeyError:
raise RuntimeError(
'Requested MovieWriter ({}) not available'.format(name))
writers = MovieWriterRegistry()
class AbstractMovieWriter(six.with_metaclass(abc.ABCMeta)):
'''
Abstract base class for writing movies. Fundamentally, what a MovieWriter
does is provide is a way to grab frames by calling grab_frame().
setup() is called to start the process and finish() is called afterwards.
This class is set up to provide for writing movie frame data to a pipe.
saving() is provided as a context manager to facilitate this process as::
with moviewriter.saving(fig, outfile='myfile.mp4', dpi=100):
# Iterate over frames
moviewriter.grab_frame(**savefig_kwargs)
The use of the context manager ensures that setup() and finish() are
performed as necessary.
An instance of a concrete subclass of this class can be given as the
``writer`` argument of `Animation.save()`.
'''
@abc.abstractmethod
def setup(self, fig, outfile, dpi=None):
'''
Perform setup for writing the movie file.
Parameters
----------
fig: `matplotlib.figure.Figure` instance
The figure object that contains the information for frames
outfile: string
The filename of the resulting movie file
dpi: int, optional
The DPI (or resolution) for the file. This controls the size
in pixels of the resulting movie file. Default is ``fig.dpi``.
'''
@abc.abstractmethod
def grab_frame(self, **savefig_kwargs):
'''
Grab the image information from the figure and save as a movie frame.
All keyword arguments in savefig_kwargs are passed on to the `savefig`
command that saves the figure.
'''
@abc.abstractmethod
def finish(self):
'''Finish any processing for writing the movie.'''
@contextlib.contextmanager
def saving(self, fig, outfile, dpi, *args, **kwargs):
'''
Context manager to facilitate writing the movie file.
``*args, **kw`` are any parameters that should be passed to `setup`.
'''
# This particular sequence is what contextlib.contextmanager wants
self.setup(fig, outfile, dpi, *args, **kwargs)
try:
yield self
finally:
self.finish()
class MovieWriter(AbstractMovieWriter):
'''Base class for writing movies.
This class is set up to provide for writing movie frame data to a pipe.
See examples for how to use these classes.
Attributes
----------
frame_format : str
The format used in writing frame data, defaults to 'rgba'
fig : `~matplotlib.figure.Figure`
The figure to capture data from.
This must be provided by the sub-classes.
'''
def __init__(self, fps=5, codec=None, bitrate=None, extra_args=None,
metadata=None):
'''MovieWriter
Parameters
----------
fps: int
Framerate for movie.
codec: string or None, optional
The codec to use. If ``None`` (the default) the ``animation.codec``
rcParam is used.
bitrate: int or None, optional
The bitrate for the saved movie file, which is one way to control
the output file size and quality. The default value is ``None``,
which uses the ``animation.bitrate`` rcParam. A value of -1
implies that the bitrate should be determined automatically by the
underlying utility.
extra_args: list of strings or None, optional
A list of extra string arguments to be passed to the underlying
movie utility. The default is ``None``, which passes the additional
arguments in the ``animation.extra_args`` rcParam.
metadata: Dict[str, str] or None
A dictionary of keys and values for metadata to include in the
output file. Some keys that may be of use include:
title, artist, genre, subject, copyright, srcform, comment.
'''
self.fps = fps
self.frame_format = 'rgba'
if codec is None:
self.codec = rcParams['animation.codec']
else:
self.codec = codec
if bitrate is None:
self.bitrate = rcParams['animation.bitrate']
else:
self.bitrate = bitrate
if extra_args is None:
self.extra_args = list(rcParams[self.args_key])
else:
self.extra_args = extra_args
if metadata is None:
self.metadata = dict()
else:
self.metadata = metadata
@property
def frame_size(self):
'''A tuple ``(width, height)`` in pixels of a movie frame.'''
w, h = self.fig.get_size_inches()
return int(w * self.dpi), int(h * self.dpi)
def _adjust_frame_size(self):
if self.codec == 'h264':
wo, ho = self.fig.get_size_inches()
w, h = adjusted_figsize(wo, ho, self.dpi, 2)
if not (wo, ho) == (w, h):
self.fig.set_size_inches(w, h, forward=True)
_log.info('figure size (inches) has been adjusted '
'from %s x %s to %s x %s', wo, ho, w, h)
else:
w, h = self.fig.get_size_inches()
_log.debug('frame size in pixels is %s x %s', *self.frame_size)
return w, h
def setup(self, fig, outfile, dpi=None):
'''
Perform setup for writing the movie file.
Parameters
----------
fig : matplotlib.figure.Figure
The figure object that contains the information for frames
outfile : string
The filename of the resulting movie file
dpi : int, optional
The DPI (or resolution) for the file. This controls the size
in pixels of the resulting movie file. Default is fig.dpi.
'''
self.outfile = outfile
self.fig = fig
if dpi is None:
dpi = self.fig.dpi
self.dpi = dpi
self._w, self._h = self._adjust_frame_size()
# Run here so that grab_frame() can write the data to a pipe. This
# eliminates the need for temp files.
self._run()
def _run(self):
# Uses subprocess to call the program for assembling frames into a
# movie file. *args* returns the sequence of command line arguments
# from a few configuration options.
command = self._args()
output = subprocess.PIPE
_log.info('MovieWriter.run: running command: %s', command)
self._proc = subprocess.Popen(command, shell=False,
stdout=output, stderr=output,
stdin=subprocess.PIPE,
creationflags=subprocess_creation_flags)
def finish(self):
'''Finish any processing for writing the movie.'''
self.cleanup()
def grab_frame(self, **savefig_kwargs):
'''
Grab the image information from the figure and save as a movie frame.
All keyword arguments in savefig_kwargs are passed on to the `savefig`
command that saves the figure.
'''
_log.debug('MovieWriter.grab_frame: Grabbing frame.')
try:
# re-adjust the figure size in case it has been changed by the
# user. We must ensure that every frame is the same size or
# the movie will not save correctly.
self.fig.set_size_inches(self._w, self._h)
# Tell the figure to save its data to the sink, using the
# frame format and dpi.
self.fig.savefig(self._frame_sink(), format=self.frame_format,
dpi=self.dpi, **savefig_kwargs)
except (RuntimeError, IOError) as e:
out, err = self._proc.communicate()
_log.info('MovieWriter -- Error '
'running proc:\n%s\n%s' % (out, err))
raise IOError('Error saving animation to file (cause: {0}) '
'Stdout: {1} StdError: {2}. It may help to re-run '
'with logging level set to '
'DEBUG.'.format(e, out, err))
def _frame_sink(self):
'''Returns the place to which frames should be written.'''
return self._proc.stdin
def _args(self):
'''Assemble list of utility-specific command-line arguments.'''
return NotImplementedError("args needs to be implemented by subclass.")
def cleanup(self):
'''Clean-up and collect the process used to write the movie file.'''
out, err = self._proc.communicate()
self._frame_sink().close()
_log.debug('MovieWriter -- Command stdout:\n%s', out)
_log.debug('MovieWriter -- Command stderr:\n%s', err)
@classmethod
def bin_path(cls):
'''
Returns the binary path to the commandline tool used by a specific
subclass. This is a class method so that the tool can be looked for
before making a particular MovieWriter subclass available.
'''
return str(rcParams[cls.exec_key])
@classmethod
def isAvailable(cls):
'''
Check to see if a MovieWriter subclass is actually available by
running the commandline tool.
'''
bin_path = cls.bin_path()
if not bin_path:
return False
try:
p = subprocess.Popen(
bin_path,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
creationflags=subprocess_creation_flags)
return cls._handle_subprocess(p)
except OSError:
return False
@classmethod
def _handle_subprocess(cls, process):
process.communicate()
return True
class FileMovieWriter(MovieWriter):
'''`MovieWriter` for writing to individual files and stitching at the end.
This must be sub-classed to be useful.
'''
def __init__(self, *args, **kwargs):
MovieWriter.__init__(self, *args, **kwargs)
self.frame_format = rcParams['animation.frame_format']
def setup(self, fig, outfile, dpi=None, frame_prefix='_tmp',
clear_temp=True):
'''Perform setup for writing the movie file.
Parameters
----------
fig : matplotlib.figure.Figure
The figure to grab the rendered frames from.
outfile : str
The filename of the resulting movie file.
dpi : number, optional
The dpi of the output file. This, with the figure size,
controls the size in pixels of the resulting movie file.
Default is fig.dpi.
frame_prefix : str, optional
The filename prefix to use for temporary files. Defaults to
``'_tmp'``.
clear_temp : bool, optional
If the temporary files should be deleted after stitching
the final result. Setting this to ``False`` can be useful for
debugging. Defaults to ``True``.
'''
self.fig = fig
self.outfile = outfile
if dpi is None:
dpi = self.fig.dpi
self.dpi = dpi
self._adjust_frame_size()
self.clear_temp = clear_temp
self.temp_prefix = frame_prefix
self._frame_counter = 0 # used for generating sequential file names
self._temp_names = list()
self.fname_format_str = '%s%%07d.%s'
@property
def frame_format(self):
'''
Format (png, jpeg, etc.) to use for saving the frames, which can be
decided by the individual subclasses.
'''
return self._frame_format
@frame_format.setter
def frame_format(self, frame_format):
if frame_format in self.supported_formats:
self._frame_format = frame_format
else:
self._frame_format = self.supported_formats[0]
def _base_temp_name(self):
# Generates a template name (without number) given the frame format
# for extension and the prefix.
return self.fname_format_str % (self.temp_prefix, self.frame_format)
def _frame_sink(self):
# Creates a filename for saving using the basename and the current
# counter.
fname = self._base_temp_name() % self._frame_counter
# Save the filename so we can delete it later if necessary
self._temp_names.append(fname)
_log.debug('FileMovieWriter.frame_sink: saving frame %d to fname=%s',
self._frame_counter, fname)
self._frame_counter += 1 # Ensures each created name is 'unique'
# This file returned here will be closed once it's used by savefig()
# because it will no longer be referenced and will be gc-ed.
return open(fname, 'wb')
def grab_frame(self, **savefig_kwargs):
'''
Grab the image information from the figure and save as a movie frame.
All keyword arguments in savefig_kwargs are passed on to the `savefig`
command that saves the figure.
'''
# Overloaded to explicitly close temp file.
_log.debug('MovieWriter.grab_frame: Grabbing frame.')
try:
# Tell the figure to save its data to the sink, using the
# frame format and dpi.
with self._frame_sink() as myframesink:
self.fig.savefig(myframesink, format=self.frame_format,
dpi=self.dpi, **savefig_kwargs)
except RuntimeError:
out, err = self._proc.communicate()
_log.info('MovieWriter -- Error '
'running proc:\n%s\n%s' % (out, err))
raise
def finish(self):
# Call run here now that all frame grabbing is done. All temp files
# are available to be assembled.
self._run()
MovieWriter.finish(self) # Will call clean-up
# Check error code for creating file here, since we just run
# the process here, rather than having an open pipe.
if self._proc.returncode:
try:
stdout = [s.decode() for s in self._proc._stdout_buff]
stderr = [s.decode() for s in self._proc._stderr_buff]
_log.info("MovieWriter.finish: stdout: %s", stdout)
_log.info("MovieWriter.finish: stderr: %s", stderr)
except Exception as e:
pass
raise RuntimeError('Error creating movie, return code: {}'
.format(self._proc.returncode))
def cleanup(self):
MovieWriter.cleanup(self)
# Delete temporary files
if self.clear_temp:
_log.debug('MovieWriter: clearing temporary fnames=%s',
self._temp_names)
for fname in self._temp_names:
os.remove(fname)
@writers.register('pillow')
class PillowWriter(MovieWriter):
@classmethod
def isAvailable(cls):
try:
import PIL
except ImportError:
return False
return True
def __init__(self, *args, **kwargs):
if kwargs.get("extra_args") is None:
kwargs["extra_args"] = ()
super(PillowWriter, self).__init__(*args, **kwargs)
def setup(self, fig, outfile, dpi=None):
self._frames = []
self._outfile = outfile
self._dpi = dpi
self._fig = fig
def grab_frame(self, **savefig_kwargs):
from PIL import Image
buf = BytesIO()
self._fig.savefig(buf, **dict(savefig_kwargs, format="rgba"))
renderer = self._fig.canvas.get_renderer()
# Using frombuffer / getbuffer may be slightly more efficient, but
# Py3-only.
self._frames.append(Image.frombytes(
"RGBA",
(int(renderer.width), int(renderer.height)),
buf.getvalue()))
def finish(self):
self._frames[0].save(
self._outfile, save_all=True, append_images=self._frames[1:],
duration=int(1000 / self.fps))
# Base class of ffmpeg information. Has the config keys and the common set
# of arguments that controls the *output* side of things.
class FFMpegBase(object):
'''Mixin class for FFMpeg output.
To be useful this must be multiply-inherited from with a
`MovieWriterBase` sub-class.
'''
exec_key = 'animation.ffmpeg_path'
args_key = 'animation.ffmpeg_args'
@property
def output_args(self):
args = ['-vcodec', self.codec]
# For h264, the default format is yuv444p, which is not compatible
# with quicktime (and others). Specifying yuv420p fixes playback on
# iOS,as well as HTML5 video in firefox and safari (on both Win and
# OSX). Also fixes internet explorer. This is as of 2015/10/29.
if self.codec == 'h264' and '-pix_fmt' not in self.extra_args:
args.extend(['-pix_fmt', 'yuv420p'])
# The %dk adds 'k' as a suffix so that ffmpeg treats our bitrate as in
# kbps
if self.bitrate > 0:
args.extend(['-b', '%dk' % self.bitrate])
if self.extra_args:
args.extend(self.extra_args)
for k, v in six.iteritems(self.metadata):
args.extend(['-metadata', '%s=%s' % (k, v)])
return args + ['-y', self.outfile]
@classmethod
def _handle_subprocess(cls, process):
_, err = process.communicate()
# Ubuntu 12.04 ships a broken ffmpeg binary which we shouldn't use
# NOTE : when removed, remove the same method in AVConvBase.
if 'Libav' in err.decode():
return False
return True
# Combine FFMpeg options with pipe-based writing
@writers.register('ffmpeg')
class FFMpegWriter(FFMpegBase, MovieWriter):
'''Pipe-based ffmpeg writer.
Frames are streamed directly to ffmpeg via a pipe and written in a single
pass.
'''
def _args(self):
# Returns the command line parameters for subprocess to use
# ffmpeg to create a movie using a pipe.
args = [self.bin_path(), '-f', 'rawvideo', '-vcodec', 'rawvideo',
'-s', '%dx%d' % self.frame_size, '-pix_fmt', self.frame_format,
'-r', str(self.fps)]
# Logging is quieted because subprocess.PIPE has limited buffer size.
# If you have a lot of frames in your animation and set logging to
# DEBUG, you will have a buffer overrun.
if (_log.getEffectiveLevel() > logging.DEBUG):
args += ['-loglevel', 'quiet']
args += ['-i', 'pipe:'] + self.output_args
return args
# Combine FFMpeg options with temp file-based writing
@writers.register('ffmpeg_file')
class FFMpegFileWriter(FFMpegBase, FileMovieWriter):
'''File-based ffmpeg writer.
Frames are written to temporary files on disk and then stitched
together at the end.
'''
supported_formats = ['png', 'jpeg', 'ppm', 'tiff', 'sgi', 'bmp',
'pbm', 'raw', 'rgba']
def _args(self):
# Returns the command line parameters for subprocess to use
# ffmpeg to create a movie using a collection of temp images
return [self.bin_path(), '-r', str(self.fps),
'-i', self._base_temp_name(),
'-vframes', str(self._frame_counter)] + self.output_args
# Base class of avconv information. AVConv has identical arguments to
# FFMpeg
class AVConvBase(FFMpegBase):
'''Mixin class for avconv output.
To be useful this must be multiply-inherited from with a
`MovieWriterBase` sub-class.
'''
exec_key = 'animation.avconv_path'
args_key = 'animation.avconv_args'
# NOTE : should be removed when the same method is removed in FFMpegBase.
@classmethod
def _handle_subprocess(cls, process):
return MovieWriter._handle_subprocess(process)
# Combine AVConv options with pipe-based writing
@writers.register('avconv')
class AVConvWriter(AVConvBase, FFMpegWriter):
'''Pipe-based avconv writer.
Frames are streamed directly to avconv via a pipe and written in a single
pass.
'''
# Combine AVConv options with file-based writing
@writers.register('avconv_file')
class AVConvFileWriter(AVConvBase, FFMpegFileWriter):
'''File-based avconv writer.
Frames are written to temporary files on disk and then stitched
together at the end.
'''
# Base class for animated GIFs with convert utility
class ImageMagickBase(object):
'''Mixin class for ImageMagick output.
To be useful this must be multiply-inherited from with a
`MovieWriterBase` sub-class.
'''
exec_key = 'animation.convert_path'
args_key = 'animation.convert_args'
@property
def delay(self):
return 100. / self.fps
@property
def output_args(self):
return [self.outfile]
@classmethod
def _init_from_registry(cls):
if sys.platform != 'win32' or rcParams[cls.exec_key] != 'convert':
return
from six.moves import winreg
for flag in (0, winreg.KEY_WOW64_32KEY, winreg.KEY_WOW64_64KEY):
try:
hkey = winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE,
'Software\\Imagemagick\\Current',
0, winreg.KEY_QUERY_VALUE | flag)
binpath = winreg.QueryValueEx(hkey, 'BinPath')[0]
winreg.CloseKey(hkey)
binpath += '\\convert.exe'
break
except Exception:
binpath = ''
rcParams[cls.exec_key] = rcParamsDefault[cls.exec_key] = binpath
@classmethod
def isAvailable(cls):
'''
Check to see if a ImageMagickWriter is actually available.
Done by first checking the windows registry (if applicable) and then
running the commandline tool.
'''
bin_path = cls.bin_path()
if bin_path == "convert":
cls._init_from_registry()
return super(ImageMagickBase, cls).isAvailable()
ImageMagickBase._init_from_registry()
# Note: the base classes need to be in that order to get
# isAvailable() from ImageMagickBase called and not the
# one from MovieWriter. The latter is then called by the
# former.
@writers.register('imagemagick')
class ImageMagickWriter(ImageMagickBase, MovieWriter):
'''Pipe-based animated gif.
Frames are streamed directly to ImageMagick via a pipe and written
in a single pass.
'''
def _args(self):
return ([self.bin_path(),
'-size', '%ix%i' % self.frame_size, '-depth', '8',
'-delay', str(self.delay), '-loop', '0',
'%s:-' % self.frame_format]
+ self.output_args)
# Note: the base classes need to be in that order to get
# isAvailable() from ImageMagickBase called and not the
# one from MovieWriter. The latter is then called by the
# former.
@writers.register('imagemagick_file')
class ImageMagickFileWriter(ImageMagickBase, FileMovieWriter):
'''File-based animated gif writer.
Frames are written to temporary files on disk and then stitched
together at the end.
'''
supported_formats = ['png', 'jpeg', 'ppm', 'tiff', 'sgi', 'bmp',
'pbm', 'raw', 'rgba']
def _args(self):
return ([self.bin_path(), '-delay', str(self.delay), '-loop', '0',
'%s*.%s' % (self.temp_prefix, self.frame_format)]
+ self.output_args)
# Taken directly from jakevdp's JSAnimation package at
# http://github.com/jakevdp/JSAnimation
def _included_frames(frame_list, frame_format):
"""frame_list should be a list of filenames"""
return INCLUDED_FRAMES.format(Nframes=len(frame_list),
frame_dir=os.path.dirname(frame_list[0]),
frame_format=frame_format)
def _embedded_frames(frame_list, frame_format):
"""frame_list should be a list of base64-encoded png files"""
template = ' frames[{0}] = "data:image/{1};base64,{2}"\n'
return "\n" + "".join(
template.format(i, frame_format, frame_data.replace('\n', '\\\n'))
for i, frame_data in enumerate(frame_list))
@writers.register('html')
class HTMLWriter(FileMovieWriter):
supported_formats = ['png', 'jpeg', 'tiff', 'svg']
args_key = 'animation.html_args'
@classmethod
def isAvailable(cls):
return True
def __init__(self, fps=30, codec=None, bitrate=None, extra_args=None,
metadata=None, embed_frames=False, default_mode='loop',
embed_limit=None):
self.embed_frames = embed_frames
self.default_mode = default_mode.lower()
# Save embed limit, which is given in MB
if embed_limit is None:
self._bytes_limit = rcParams['animation.embed_limit']
else:
self._bytes_limit = embed_limit
# Convert from MB to bytes
self._bytes_limit *= 1024 * 1024
if self.default_mode not in ['loop', 'once', 'reflect']:
self.default_mode = 'loop'
_log.warning("unrecognized default_mode: using 'loop'")
self._saved_frames = []
self._total_bytes = 0
self._hit_limit = False
super(HTMLWriter, self).__init__(fps, codec, bitrate,
extra_args, metadata)
def setup(self, fig, outfile, dpi, frame_dir=None):
root, ext = os.path.splitext(outfile)
if ext not in ['.html', '.htm']:
raise ValueError("outfile must be *.htm or *.html")
if not self.embed_frames:
if frame_dir is None:
frame_dir = root + '_frames'
if not os.path.exists(frame_dir):
os.makedirs(frame_dir)
frame_prefix = os.path.join(frame_dir, 'frame')
else:
frame_prefix = None
super(HTMLWriter, self).setup(fig, outfile, dpi,
frame_prefix, clear_temp=False)
def grab_frame(self, **savefig_kwargs):
if self.embed_frames:
# Just stop processing if we hit the limit
if self._hit_limit:
return
suffix = '.' + self.frame_format
f = BytesIO()
self.fig.savefig(f, format=self.frame_format,
dpi=self.dpi, **savefig_kwargs)
imgdata64 = encodebytes(f.getvalue()).decode('ascii')
self._total_bytes += len(imgdata64)
if self._total_bytes >= self._bytes_limit:
_log.warning(
"Animation size has reached %s bytes, exceeding the limit "
"of %s. If you're sure you want a larger animation "
"embedded, set the animation.embed_limit rc parameter to "
"a larger value (in MB). This and further frames will be "
"dropped.", self._total_bytes, self._bytes_limit)
self._hit_limit = True
else:
self._saved_frames.append(imgdata64)
else:
return super(HTMLWriter, self).grab_frame(**savefig_kwargs)
def _run(self):
# make a duck-typed subprocess stand in
# this is called by the MovieWriter base class, but not used here.
class ProcessStandin(object):
returncode = 0
def communicate(self):
return '', ''
self._proc = ProcessStandin()
# save the frames to an html file
if self.embed_frames:
fill_frames = _embedded_frames(self._saved_frames,
self.frame_format)
else:
# temp names is filled by FileMovieWriter
fill_frames = _included_frames(self._temp_names,
self.frame_format)
mode_dict = dict(once_checked='',
loop_checked='',
reflect_checked='')
mode_dict[self.default_mode + '_checked'] = 'checked'
interval = 1000 // self.fps
with open(self.outfile, 'w') as of:
of.write(JS_INCLUDE)
of.write(DISPLAY_TEMPLATE.format(id=uuid.uuid4().hex,
Nframes=len(self._temp_names),
fill_frames=fill_frames,
interval=interval,
**mode_dict))
class Animation(object):
'''This class wraps the creation of an animation using matplotlib.
It is only a base class which should be subclassed to provide
needed behavior.
This class is not typically used directly.
Parameters
----------
fig : matplotlib.figure.Figure
The figure object that is used to get draw, resize, and any
other needed events.
event_source : object, optional
A class that can run a callback when desired events
are generated, as well as be stopped and started.
Examples include timers (see :class:`TimedAnimation`) and file
system notifications.
blit : bool, optional
controls whether blitting is used to optimize drawing. Defaults
to ``False``.
See Also
--------
FuncAnimation, ArtistAnimation
'''
def __init__(self, fig, event_source=None, blit=False):
self._fig = fig
# Disables blitting for backends that don't support it. This
# allows users to request it if available, but still have a
# fallback that works if it is not.
self._blit = blit and fig.canvas.supports_blit
# These are the basics of the animation. The frame sequence represents
# information for each frame of the animation and depends on how the
# drawing is handled by the subclasses. The event source fires events
# that cause the frame sequence to be iterated.
self.frame_seq = self.new_frame_seq()
self.event_source = event_source
# Instead of starting the event source now, we connect to the figure's
# draw_event, so that we only start once the figure has been drawn.
self._first_draw_id = fig.canvas.mpl_connect('draw_event', self._start)
# Connect to the figure's close_event so that we don't continue to
# fire events and try to draw to a deleted figure.
self._close_id = self._fig.canvas.mpl_connect('close_event',
self._stop)
if self._blit:
self._setup_blit()
def _start(self, *args):
'''
Starts interactive animation. Adds the draw frame command to the GUI
handler, calls show to start the event loop.
'''
# First disconnect our draw event handler
self._fig.canvas.mpl_disconnect(self._first_draw_id)
self._first_draw_id = None # So we can check on save
# Now do any initial draw
self._init_draw()
# Add our callback for stepping the animation and
# actually start the event_source.
self.event_source.add_callback(self._step)
self.event_source.start()
def _stop(self, *args):
# On stop we disconnect all of our events.
if self._blit:
self._fig.canvas.mpl_disconnect(self._resize_id)
self._fig.canvas.mpl_disconnect(self._close_id)
self.event_source.remove_callback(self._step)
self.event_source = None
def save(self, filename, writer=None, fps=None, dpi=None, codec=None,
bitrate=None, extra_args=None, metadata=None, extra_anim=None,
savefig_kwargs=None):
'''Saves a movie file by drawing every frame.
Parameters
----------
filename : str
The output filename, e.g., :file:`mymovie.mp4`.
writer : :class:`MovieWriter` or str, optional
A `MovieWriter` instance to use or a key that identifies a
class to use, such as 'ffmpeg'. If ``None``, defaults to
:rc:`animation.writer`.
fps : number, optional
Frames per second in the movie. Defaults to ``None``, which will use
the animation's specified interval to set the frames per second.
dpi : number, optional
Controls the dots per inch for the movie frames. This combined with
the figure's size in inches controls the size of the movie. If
``None``, defaults to :rc:`savefig.dpi`.
codec : str, optional
The video codec to be used. Not all codecs are supported
by a given :class:`MovieWriter`. If ``None``, default to
:rc:`animation.codec`.
bitrate : number, optional
Specifies the number of bits used per second in the compressed
movie, in kilobits per second. A higher number means a higher
quality movie, but at the cost of increased file size. If ``None``,
defaults to :rc:`animation.bitrate`.
extra_args : list, optional
List of extra string arguments to be passed to the underlying movie
utility. If ``None``, defaults to :rc:`animation.extra_args`.
metadata : Dict[str, str], optional
Dictionary of keys and values for metadata to include in
the output file. Some keys that may be of use include:
title, artist, genre, subject, copyright, srcform, comment.
extra_anim : list, optional
Additional `Animation` objects that should be included
in the saved movie file. These need to be from the same
`matplotlib.figure.Figure` instance. Also, animation frames will
just be simply combined, so there should be a 1:1 correspondence
between the frames from the different animations.
savefig_kwargs : dict, optional
Is a dictionary containing keyword arguments to be passed
on to the `savefig` command which is called repeatedly to
save the individual frames.
Notes
-----
fps, codec, bitrate, extra_args, metadata are used to
construct a :class:`MovieWriter` instance and can only be
passed if `writer` is a string. If they are passed as
non-`None` and ``writer`` is a :class:`MovieWriter`, a
`RuntimeError` will be raised.
'''
# If the writer is None, use the rc param to find the name of the one
# to use
if writer is None:
writer = rcParams['animation.writer']
elif (not isinstance(writer, six.string_types) and
any(arg is not None
for arg in (fps, codec, bitrate, extra_args, metadata))):
raise RuntimeError('Passing in values for arguments '
'fps, codec, bitrate, extra_args, or metadata '
'is not supported when writer is an existing '
'MovieWriter instance. These should instead be '
'passed as arguments when creating the '
'MovieWriter instance.')
if savefig_kwargs is None:
savefig_kwargs = {}
# Need to disconnect the first draw callback, since we'll be doing
# draws. Otherwise, we'll end up starting the animation.
if self._first_draw_id is not None:
self._fig.canvas.mpl_disconnect(self._first_draw_id)
reconnect_first_draw = True
else:
reconnect_first_draw = False
if fps is None and hasattr(self, '_interval'):
# Convert interval in ms to frames per second
fps = 1000. / self._interval
# Re-use the savefig DPI for ours if none is given
if dpi is None:
dpi = rcParams['savefig.dpi']
if dpi == 'figure':
dpi = self._fig.dpi
if codec is None:
codec = rcParams['animation.codec']
if bitrate is None:
bitrate = rcParams['animation.bitrate']
all_anim = [self]
if extra_anim is not None:
all_anim.extend(anim
for anim
in extra_anim if anim._fig is self._fig)
# If we have the name of a writer, instantiate an instance of the
# registered class.
if isinstance(writer, six.string_types):
if writer in writers.avail:
writer = writers[writer](fps, codec, bitrate,
extra_args=extra_args,
metadata=metadata)
else:
_log.warning("MovieWriter %s unavailable.", writer)
try:
writer = writers[writers.list()[0]](fps, codec, bitrate,
extra_args=extra_args,
metadata=metadata)
except IndexError:
raise ValueError("Cannot save animation: no writers are "
"available. Please install ffmpeg to "
"save animations.")
_log.info('Animation.save using %s', type(writer))
if 'bbox_inches' in savefig_kwargs:
_log.warning("Warning: discarding the 'bbox_inches' argument in "
"'savefig_kwargs' as it may cause frame size "
"to vary, which is inappropriate for animation.")
savefig_kwargs.pop('bbox_inches')
# Create a new sequence of frames for saved data. This is different
# from new_frame_seq() to give the ability to save 'live' generated
# frame information to be saved later.
# TODO: Right now, after closing the figure, saving a movie won't work
# since GUI widgets are gone. Either need to remove extra code to
# allow for this non-existent use case or find a way to make it work.
with rc_context():
if rcParams['savefig.bbox'] == 'tight':
_log.info("Disabling savefig.bbox = 'tight', as it may cause "
"frame size to vary, which is inappropriate for "
"animation.")
rcParams['savefig.bbox'] = None
with writer.saving(self._fig, filename, dpi):
for anim in all_anim:
# Clear the initial frame
anim._init_draw()
for data in zip(*[a.new_saved_frame_seq() for a in all_anim]):
for anim, d in zip(all_anim, data):
# TODO: See if turning off blit is really necessary
anim._draw_next_frame(d, blit=False)
writer.grab_frame(**savefig_kwargs)
# Reconnect signal for first draw if necessary
if reconnect_first_draw:
self._first_draw_id = self._fig.canvas.mpl_connect('draw_event',
self._start)
def _step(self, *args):
'''
Handler for getting events. By default, gets the next frame in the
sequence and hands the data off to be drawn.
'''
# Returns True to indicate that the event source should continue to
# call _step, until the frame sequence reaches the end of iteration,
# at which point False will be returned.
try:
framedata = next(self.frame_seq)
self._draw_next_frame(framedata, self._blit)
return True
except StopIteration:
return False
def new_frame_seq(self):
'''Creates a new sequence of frame information.'''
# Default implementation is just an iterator over self._framedata
return iter(self._framedata)
def new_saved_frame_seq(self):
'''Creates a new sequence of saved/cached frame information.'''
# Default is the same as the regular frame sequence
return self.new_frame_seq()
def _draw_next_frame(self, framedata, blit):
# Breaks down the drawing of the next frame into steps of pre- and
# post- draw, as well as the drawing of the frame itself.
self._pre_draw(framedata, blit)
self._draw_frame(framedata)
self._post_draw(framedata, blit)
def _init_draw(self):
# Initial draw to clear the frame. Also used by the blitting code
# when a clean base is required.
pass
def _pre_draw(self, framedata, blit):
# Perform any cleaning or whatnot before the drawing of the frame.
# This default implementation allows blit to clear the frame.
if blit:
self._blit_clear(self._drawn_artists, self._blit_cache)
def _draw_frame(self, framedata):
# Performs actual drawing of the frame.
raise NotImplementedError('Needs to be implemented by subclasses to'
' actually make an animation.')
def _post_draw(self, framedata, blit):
# After the frame is rendered, this handles the actual flushing of
# the draw, which can be a direct draw_idle() or make use of the
# blitting.
if blit and self._drawn_artists:
self._blit_draw(self._drawn_artists, self._blit_cache)
else:
self._fig.canvas.draw_idle()
# The rest of the code in this class is to facilitate easy blitting
def _blit_draw(self, artists, bg_cache):
# Handles blitted drawing, which renders only the artists given instead
# of the entire figure.
updated_ax = []
for a in artists:
# If we haven't cached the background for this axes object, do
# so now. This might not always be reliable, but it's an attempt
# to automate the process.
if a.axes not in bg_cache:
bg_cache[a.axes] = a.figure.canvas.copy_from_bbox(a.axes.bbox)
a.axes.draw_artist(a)
updated_ax.append(a.axes)
# After rendering all the needed artists, blit each axes individually.
for ax in set(updated_ax):
ax.figure.canvas.blit(ax.bbox)
def _blit_clear(self, artists, bg_cache):
# Get a list of the axes that need clearing from the artists that
# have been drawn. Grab the appropriate saved background from the
# cache and restore.
axes = set(a.axes for a in artists)
for a in axes:
if a in bg_cache:
a.figure.canvas.restore_region(bg_cache[a])
def _setup_blit(self):
# Setting up the blit requires: a cache of the background for the
# axes
self._blit_cache = dict()
self._drawn_artists = []
self._resize_id = self._fig.canvas.mpl_connect('resize_event',
self._handle_resize)
self._post_draw(None, self._blit)
def _handle_resize(self, *args):
# On resize, we need to disable the resize event handling so we don't
# get too many events. Also stop the animation events, so that
# we're paused. Reset the cache and re-init. Set up an event handler
# to catch once the draw has actually taken place.
self._fig.canvas.mpl_disconnect(self._resize_id)
self.event_source.stop()
self._blit_cache.clear()
self._init_draw()
self._resize_id = self._fig.canvas.mpl_connect('draw_event',
self._end_redraw)
def _end_redraw(self, evt):
# Now that the redraw has happened, do the post draw flushing and
# blit handling. Then re-enable all of the original events.
self._post_draw(None, False)
self.event_source.start()
self._fig.canvas.mpl_disconnect(self._resize_id)
self._resize_id = self._fig.canvas.mpl_connect('resize_event',
self._handle_resize)
def to_html5_video(self, embed_limit=None):
'''Returns animation as an HTML5 video tag.
This saves the animation as an h264 video, encoded in base64
directly into the HTML5 video tag. This respects the rc parameters
for the writer as well as the bitrate. This also makes use of the
``interval`` to control the speed, and uses the ``repeat``
parameter to decide whether to loop.
'''
VIDEO_TAG = r'''<video {size} {options}>
<source type="video/mp4" src="data:video/mp4;base64,{video}">
Your browser does not support the video tag.
</video>'''
# Cache the rendering of the video as HTML
if not hasattr(self, '_base64_video'):
# Save embed limit, which is given in MB
if embed_limit is None:
embed_limit = rcParams['animation.embed_limit']
# Convert from MB to bytes
embed_limit *= 1024 * 1024
# First write the video to a tempfile. Set delete to False
# so we can re-open to read binary data.
with tempfile.NamedTemporaryFile(suffix='.m4v',
delete=False) as f:
# We create a writer manually so that we can get the
# appropriate size for the tag
Writer = writers[rcParams['animation.writer']]
writer = Writer(codec='h264',
bitrate=rcParams['animation.bitrate'],
fps=1000. / self._interval)
self.save(f.name, writer=writer)
# Now open and base64 encode
with open(f.name, 'rb') as video:
vid64 = encodebytes(video.read())
vid_len = len(vid64)
if vid_len >= embed_limit:
_log.warning(
"Animation movie is %s bytes, exceeding the limit of "
"%s. If you're sure you want a large animation "
"embedded, set the animation.embed_limit rc parameter "
"to a larger value (in MB).", vid_len, embed_limit)
else:
self._base64_video = vid64.decode('ascii')
self._video_size = 'width="{}" height="{}"'.format(
*writer.frame_size)
# Now we can remove
os.remove(f.name)
# If we exceeded the size, this attribute won't exist
if hasattr(self, '_base64_video'):
# Default HTML5 options are to autoplay and display video controls
options = ['controls', 'autoplay']
# If we're set to repeat, make it loop
if hasattr(self, 'repeat') and self.repeat:
options.append('loop')
return VIDEO_TAG.format(video=self._base64_video,
size=self._video_size,
options=' '.join(options))
else:
return 'Video too large to embed.'
def to_jshtml(self, fps=None, embed_frames=True, default_mode=None):
"""Generate HTML representation of the animation"""
if fps is None and hasattr(self, '_interval'):
# Convert interval in ms to frames per second
fps = 1000 / self._interval
# If we're not given a default mode, choose one base on the value of
# the repeat attribute
if default_mode is None:
default_mode = 'loop' if self.repeat else 'once'
if hasattr(self, "_html_representation"):
return self._html_representation
else:
# Can't open a second time while opened on windows. So we avoid
# deleting when closed, and delete manually later.
with tempfile.NamedTemporaryFile(suffix='.html',
delete=False) as f:
self.save(f.name, writer=HTMLWriter(fps=fps,
embed_frames=embed_frames,
default_mode=default_mode))
# Re-open and get content
with open(f.name) as fobj:
html = fobj.read()
# Now we can delete
os.remove(f.name)
self._html_representation = html
return html
def _repr_html_(self):
'''IPython display hook for rendering.'''
fmt = rcParams['animation.html']
if fmt == 'html5':
return self.to_html5_video()
elif fmt == 'jshtml':
return self.to_jshtml()
class TimedAnimation(Animation):
''':class:`Animation` subclass for time-based animation.
A new frame is drawn every *interval* milliseconds.
Parameters
----------
fig : matplotlib.figure.Figure
The figure object that is used to get draw, resize, and any
other needed events.
interval : number, optional
Delay between frames in milliseconds. Defaults to 200.
repeat_delay : number, optional
If the animation in repeated, adds a delay in milliseconds
before repeating the animation. Defaults to ``None``.
repeat : bool, optional
Controls whether the animation should repeat when the sequence
of frames is completed. Defaults to ``True``.
blit : bool, optional
Controls whether blitting is used to optimize drawing. Defaults
to ``False``.
'''
def __init__(self, fig, interval=200, repeat_delay=None, repeat=True,
event_source=None, *args, **kwargs):
# Store the timing information
self._interval = interval
self._repeat_delay = repeat_delay
self.repeat = repeat
# If we're not given an event source, create a new timer. This permits
# sharing timers between animation objects for syncing animations.
if event_source is None:
event_source = fig.canvas.new_timer()
event_source.interval = self._interval
Animation.__init__(self, fig, event_source=event_source,
*args, **kwargs)
def _step(self, *args):
'''
Handler for getting events.
'''
# Extends the _step() method for the Animation class. If
# Animation._step signals that it reached the end and we want to
# repeat, we refresh the frame sequence and return True. If
# _repeat_delay is set, change the event_source's interval to our loop
# delay and set the callback to one which will then set the interval
# back.
still_going = Animation._step(self, *args)
if not still_going and self.repeat:
self._init_draw()
self.frame_seq = self.new_frame_seq()
if self._repeat_delay:
self.event_source.remove_callback(self._step)
self.event_source.add_callback(self._loop_delay)
self.event_source.interval = self._repeat_delay
return True
else:
return Animation._step(self, *args)
else:
return still_going
def _stop(self, *args):
# If we stop in the middle of a loop delay (which is relatively likely
# given the potential pause here, remove the loop_delay callback as
# well.
self.event_source.remove_callback(self._loop_delay)
Animation._stop(self)
def _loop_delay(self, *args):
# Reset the interval and change callbacks after the delay.
self.event_source.remove_callback(self._loop_delay)
self.event_source.interval = self._interval
self.event_source.add_callback(self._step)
Animation._step(self)
class ArtistAnimation(TimedAnimation):
'''Animation using a fixed set of `Artist` objects.
Before creating an instance, all plotting should have taken place
and the relevant artists saved.
Parameters
----------
fig : matplotlib.figure.Figure
The figure object that is used to get draw, resize, and any
other needed events.
artists : list
Each list entry a collection of artists that represent what
needs to be enabled on each frame. These will be disabled for
other frames.
interval : number, optional
Delay between frames in milliseconds. Defaults to 200.
repeat_delay : number, optional
If the animation in repeated, adds a delay in milliseconds
before repeating the animation. Defaults to ``None``.
repeat : bool, optional
Controls whether the animation should repeat when the sequence
of frames is completed. Defaults to ``True``.
blit : bool, optional
Controls whether blitting is used to optimize drawing. Defaults
to ``False``.
'''
def __init__(self, fig, artists, *args, **kwargs):
# Internal list of artists drawn in the most recent frame.
self._drawn_artists = []
# Use the list of artists as the framedata, which will be iterated
# over by the machinery.
self._framedata = artists
TimedAnimation.__init__(self, fig, *args, **kwargs)
def _init_draw(self):
# Make all the artists involved in *any* frame invisible
figs = set()
for f in self.new_frame_seq():
for artist in f:
artist.set_visible(False)
artist.set_animated(self._blit)
# Assemble a list of unique figures that need flushing
if artist.get_figure() not in figs:
figs.add(artist.get_figure())
# Flush the needed figures
for fig in figs:
fig.canvas.draw_idle()
def _pre_draw(self, framedata, blit):
'''
Clears artists from the last frame.
'''
if blit:
# Let blit handle clearing
self._blit_clear(self._drawn_artists, self._blit_cache)
else:
# Otherwise, make all the artists from the previous frame invisible
for artist in self._drawn_artists:
artist.set_visible(False)
def _draw_frame(self, artists):
# Save the artists that were passed in as framedata for the other
# steps (esp. blitting) to use.
self._drawn_artists = artists
# Make all the artists from the current frame visible
for artist in artists:
artist.set_visible(True)
class FuncAnimation(TimedAnimation):
'''
Makes an animation by repeatedly calling a function ``func``.
Parameters
----------
fig : matplotlib.figure.Figure
The figure object that is used to get draw, resize, and any
other needed events.
func : callable
The function to call at each frame. The first argument will
be the next value in ``frames``. Any additional positional
arguments can be supplied via the ``fargs`` parameter.
The required signature is::
def func(frame, *fargs) -> iterable_of_artists:
frames : iterable, int, generator function, or None, optional
Source of data to pass ``func`` and each frame of the animation
If an iterable, then simply use the values provided. If the
iterable has a length, it will override the ``save_count`` kwarg.
If an integer, then equivalent to passing ``range(frames)``
If a generator function, then must have the signature::
def gen_function() -> obj:
If ``None``, then equivalent to passing ``itertools.count``.
In all of these cases, the values in *frames* is simply passed through
to the user-supplied *func* and thus can be of any type.
init_func : callable, optional
A function used to draw a clear frame. If not given, the
results of drawing from the first item in the frames sequence
will be used. This function will be called once before the
first frame.
If ``blit == True``, ``init_func`` must return an iterable of artists
to be re-drawn.
The required signature is::
def init_func() -> iterable_of_artists:
fargs : tuple or None, optional
Additional arguments to pass to each call to *func*.
save_count : int, optional
The number of values from *frames* to cache.
interval : number, optional
Delay between frames in milliseconds. Defaults to 200.
repeat_delay : number, optional
If the animation in repeated, adds a delay in milliseconds
before repeating the animation. Defaults to ``None``.
repeat : bool, optional
Controls whether the animation should repeat when the sequence
of frames is completed. Defaults to ``True``.
blit : bool, optional
Controls whether blitting is used to optimize drawing. Defaults
to ``False``.
'''
def __init__(self, fig, func, frames=None, init_func=None, fargs=None,
save_count=None, **kwargs):
if fargs:
self._args = fargs
else:
self._args = ()
self._func = func
# Amount of framedata to keep around for saving movies. This is only
# used if we don't know how many frames there will be: in the case
# of no generator or in the case of a callable.
self.save_count = save_count
# Set up a function that creates a new iterable when needed. If nothing
# is passed in for frames, just use itertools.count, which will just
# keep counting from 0. A callable passed in for frames is assumed to
# be a generator. An iterable will be used as is, and anything else
# will be treated as a number of frames.
if frames is None:
self._iter_gen = itertools.count
elif callable(frames):
self._iter_gen = frames
elif cbook.iterable(frames):
self._iter_gen = lambda: iter(frames)
if hasattr(frames, '__len__'):
self.save_count = len(frames)
else:
self._iter_gen = lambda: iter(xrange(frames))
self.save_count = frames
if self.save_count is None:
# If we're passed in and using the default, set save_count to 100.
self.save_count = 100
else:
# itertools.islice returns an error when passed a numpy int instead
# of a native python int (http://bugs.python.org/issue30537).
# As a workaround, convert save_count to a native python int.
self.save_count = int(self.save_count)
self._init_func = init_func
# Needs to be initialized so the draw functions work without checking
self._save_seq = []
TimedAnimation.__init__(self, fig, **kwargs)
# Need to reset the saved seq, since right now it will contain data
# for a single frame from init, which is not what we want.
self._save_seq = []
def new_frame_seq(self):
# Use the generating function to generate a new frame sequence
return self._iter_gen()
def new_saved_frame_seq(self):
# Generate an iterator for the sequence of saved data. If there are
# no saved frames, generate a new frame sequence and take the first
# save_count entries in it.
if self._save_seq:
# While iterating we are going to update _save_seq
# so make a copy to safely iterate over
self._old_saved_seq = list(self._save_seq)
return iter(self._old_saved_seq)
else:
if self.save_count is not None:
return itertools.islice(self.new_frame_seq(), self.save_count)
else:
frame_seq = self.new_frame_seq()
def gen():
try:
for _ in range(100):
yield next(frame_seq)
except StopIteration:
pass
else:
cbook.warn_deprecated(
"2.2", "FuncAnimation.save has truncated your "
"animation to 100 frames. In the future, no such "
"truncation will occur; please pass 'save_count' "
"accordingly.")
return gen()
def _init_draw(self):
# Initialize the drawing either using the given init_func or by
# calling the draw function with the first item of the frame sequence.
# For blitting, the init_func should return a sequence of modified
# artists.
if self._init_func is None:
self._draw_frame(next(self.new_frame_seq()))
else:
self._drawn_artists = self._init_func()
if self._blit:
if self._drawn_artists is None:
raise RuntimeError('The init_func must return a '
'sequence of Artist objects.')
for a in self._drawn_artists:
a.set_animated(self._blit)
self._save_seq = []
def _draw_frame(self, framedata):
# Save the data for potential saving of movies.
self._save_seq.append(framedata)
# Make sure to respect save_count (keep only the last save_count
# around)
self._save_seq = self._save_seq[-self.save_count:]
# Call the func with framedata and args. If blitting is desired,
# func needs to return a sequence of any artists that were modified.
self._drawn_artists = self._func(framedata, *self._args)
if self._blit:
if self._drawn_artists is None:
raise RuntimeError('The animation function must return a '
'sequence of Artist objects.')
for a in self._drawn_artists:
a.set_animated(self._blit)
| 67,426 | 36.90163 | 116 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/colors.py
|
"""
A module for converting numbers or color arguments to *RGB* or *RGBA*
*RGB* and *RGBA* are sequences of, respectively, 3 or 4 floats in the
range 0-1.
This module includes functions and classes for color specification
conversions, and for mapping numbers to colors in a 1-D array of colors called
a colormap. Colormapping typically involves two steps: a data array is first
mapped onto the range 0-1 using an instance of :class:`Normalize` or of a
subclass; then this number in the 0-1 range is mapped to a color using an
instance of a subclass of :class:`Colormap`. Two are provided here:
:class:`LinearSegmentedColormap`, which is used to generate all the built-in
colormap instances, but is also useful for making custom colormaps, and
:class:`ListedColormap`, which is used for generating a custom colormap from a
list of color specifications.
The module also provides functions for checking whether an object can be
interpreted as a color (:func:`is_color_like`), for converting such an object
to an RGBA tuple (:func:`to_rgba`) or to an HTML-like hex string in the
`#rrggbb` format (:func:`to_hex`), and a sequence of colors to an `(n, 4)`
RGBA array (:func:`to_rgba_array`). Caching is used for efficiency.
Matplotlib recognizes the following formats to specify a color:
* an RGB or RGBA tuple of float values in ``[0, 1]`` (e.g., ``(0.1, 0.2, 0.5)``
or ``(0.1, 0.2, 0.5, 0.3)``);
* a hex RGB or RGBA string (e.g., ``'#0F0F0F'`` or ``'#0F0F0F0F'``);
* a string representation of a float value in ``[0, 1]`` inclusive for gray
level (e.g., ``'0.5'``);
* one of ``{'b', 'g', 'r', 'c', 'm', 'y', 'k', 'w'}``;
* a X11/CSS4 color name;
* a name from the `xkcd color survey <https://xkcd.com/color/rgb/>`__;
prefixed with ``'xkcd:'`` (e.g., ``'xkcd:sky blue'``);
* one of ``{'tab:blue', 'tab:orange', 'tab:green',
'tab:red', 'tab:purple', 'tab:brown', 'tab:pink',
'tab:gray', 'tab:olive', 'tab:cyan'}`` which are the Tableau Colors from the
'T10' categorical palette (which is the default color cycle);
* a "CN" color spec, i.e. `'C'` followed by a single digit, which is an index
into the default property cycle (``matplotlib.rcParams['axes.prop_cycle']``);
the indexing occurs at artist creation time and defaults to black if the
cycle does not include color.
All string specifications of color, other than "CN", are case-insensitive.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import zip
from collections import Sized
import itertools
import re
import warnings
import numpy as np
import matplotlib.cbook as cbook
from ._color_data import BASE_COLORS, TABLEAU_COLORS, CSS4_COLORS, XKCD_COLORS
class _ColorMapping(dict):
def __init__(self, mapping):
super(_ColorMapping, self).__init__(mapping)
self.cache = {}
def __setitem__(self, key, value):
super(_ColorMapping, self).__setitem__(key, value)
self.cache.clear()
def __delitem__(self, key):
super(_ColorMapping, self).__delitem__(key)
self.cache.clear()
_colors_full_map = {}
# Set by reverse priority order.
_colors_full_map.update(XKCD_COLORS)
_colors_full_map.update({k.replace('grey', 'gray'): v
for k, v in XKCD_COLORS.items()
if 'grey' in k})
_colors_full_map.update(CSS4_COLORS)
_colors_full_map.update(TABLEAU_COLORS)
_colors_full_map.update({k.replace('gray', 'grey'): v
for k, v in TABLEAU_COLORS.items()
if 'gray' in k})
_colors_full_map.update(BASE_COLORS)
_colors_full_map = _ColorMapping(_colors_full_map)
def get_named_colors_mapping():
"""Return the global mapping of names to named colors."""
return _colors_full_map
def _sanitize_extrema(ex):
if ex is None:
return ex
try:
ret = np.asscalar(ex)
except AttributeError:
ret = float(ex)
return ret
def _is_nth_color(c):
"""Return whether *c* can be interpreted as an item in the color cycle."""
return isinstance(c, six.string_types) and re.match(r"\AC[0-9]\Z", c)
def is_color_like(c):
"""Return whether *c* can be interpreted as an RGB(A) color."""
# Special-case nth color syntax because it cannot be parsed during
# setup.
if _is_nth_color(c):
return True
try:
to_rgba(c)
except ValueError:
return False
else:
return True
def same_color(c1, c2):
"""
Compare two colors to see if they are the same.
Parameters
----------
c1, c2 : Matplotlib colors
Returns
-------
bool
``True`` if *c1* and *c2* are the same color, otherwise ``False``.
"""
return (to_rgba_array(c1) == to_rgba_array(c2)).all()
def to_rgba(c, alpha=None):
"""
Convert *c* to an RGBA color.
Parameters
----------
c : Matplotlib color
alpha : scalar, optional
If *alpha* is not ``None``, it forces the alpha value, except if *c* is
``"none"`` (case-insensitive), which always maps to ``(0, 0, 0, 0)``.
Returns
-------
tuple
Tuple of ``(r, g, b, a)`` scalars.
"""
# Special-case nth color syntax because it should not be cached.
if _is_nth_color(c):
from matplotlib import rcParams
prop_cycler = rcParams['axes.prop_cycle']
colors = prop_cycler.by_key().get('color', ['k'])
c = colors[int(c[1]) % len(colors)]
try:
rgba = _colors_full_map.cache[c, alpha]
except (KeyError, TypeError): # Not in cache, or unhashable.
rgba = _to_rgba_no_colorcycle(c, alpha)
try:
_colors_full_map.cache[c, alpha] = rgba
except TypeError:
pass
return rgba
def _to_rgba_no_colorcycle(c, alpha=None):
"""Convert *c* to an RGBA color, with no support for color-cycle syntax.
If *alpha* is not ``None``, it forces the alpha value, except if *c* is
``"none"`` (case-insensitive), which always maps to ``(0, 0, 0, 0)``.
"""
orig_c = c
if isinstance(c, six.string_types):
if c.lower() == "none":
return (0., 0., 0., 0.)
# Named color.
try:
# This may turn c into a non-string, so we check again below.
c = _colors_full_map[c.lower()]
except KeyError:
pass
if isinstance(c, six.string_types):
# hex color with no alpha.
match = re.match(r"\A#[a-fA-F0-9]{6}\Z", c)
if match:
return (tuple(int(n, 16) / 255
for n in [c[1:3], c[3:5], c[5:7]])
+ (alpha if alpha is not None else 1.,))
# hex color with alpha.
match = re.match(r"\A#[a-fA-F0-9]{8}\Z", c)
if match:
color = [int(n, 16) / 255
for n in [c[1:3], c[3:5], c[5:7], c[7:9]]]
if alpha is not None:
color[-1] = alpha
return tuple(color)
# string gray.
try:
return (float(c),) * 3 + (alpha if alpha is not None else 1.,)
except ValueError:
pass
raise ValueError("Invalid RGBA argument: {!r}".format(orig_c))
# tuple color.
c = np.array(c)
if not np.can_cast(c.dtype, float, "same_kind") or c.ndim != 1:
# Test the dtype explicitly as `map(float, ...)`, `np.array(...,
# float)` and `np.array(...).astype(float)` all convert "0.5" to 0.5.
# Test dimensionality to reject single floats.
raise ValueError("Invalid RGBA argument: {!r}".format(orig_c))
# Return a tuple to prevent the cached value from being modified.
c = tuple(c.astype(float))
if len(c) not in [3, 4]:
raise ValueError("RGBA sequence should have length 3 or 4")
if len(c) == 3 and alpha is None:
alpha = 1
if alpha is not None:
c = c[:3] + (alpha,)
if any(elem < 0 or elem > 1 for elem in c):
raise ValueError("RGBA values should be within 0-1 range")
return c
def to_rgba_array(c, alpha=None):
"""Convert *c* to a (n, 4) array of RGBA colors.
If *alpha* is not ``None``, it forces the alpha value. If *c* is
``"none"`` (case-insensitive) or an empty list, an empty array is returned.
"""
# Special-case inputs that are already arrays, for performance. (If the
# array has the wrong kind or shape, raise the error during one-at-a-time
# conversion.)
if (isinstance(c, np.ndarray) and c.dtype.kind in "if"
and c.ndim == 2 and c.shape[1] in [3, 4]):
if c.shape[1] == 3:
result = np.column_stack([c, np.zeros(len(c))])
result[:, -1] = alpha if alpha is not None else 1.
elif c.shape[1] == 4:
result = c.copy()
if alpha is not None:
result[:, -1] = alpha
if np.any((result < 0) | (result > 1)):
raise ValueError("RGBA values should be within 0-1 range")
return result
# Handle single values.
# Note that this occurs *after* handling inputs that are already arrays, as
# `to_rgba(c, alpha)` (below) is expensive for such inputs, due to the need
# to format the array in the ValueError message(!).
if isinstance(c, six.string_types) and c.lower() == "none":
return np.zeros((0, 4), float)
try:
return np.array([to_rgba(c, alpha)], float)
except (ValueError, TypeError):
pass
# Convert one at a time.
result = np.empty((len(c), 4), float)
for i, cc in enumerate(c):
result[i] = to_rgba(cc, alpha)
return result
def to_rgb(c):
"""Convert *c* to an RGB color, silently dropping the alpha channel."""
return to_rgba(c)[:3]
def to_hex(c, keep_alpha=False):
"""Convert *c* to a hex color.
Uses the ``#rrggbb`` format if *keep_alpha* is False (the default),
``#rrggbbaa`` otherwise.
"""
c = to_rgba(c)
if not keep_alpha:
c = c[:3]
return "#" + "".join(format(int(np.round(val * 255)), "02x")
for val in c)
### Backwards-compatible color-conversion API
cnames = CSS4_COLORS
hexColorPattern = re.compile(r"\A#[a-fA-F0-9]{6}\Z")
rgb2hex = to_hex
hex2color = to_rgb
class ColorConverter(object):
"""
Provides methods for converting color specifications to *RGB* or *RGBA*
Caching is used for more efficient conversion upon repeated calls
with the same argument.
Ordinarily only the single instance instantiated in this module,
*colorConverter*, is needed.
"""
colors = _colors_full_map
cache = _colors_full_map.cache
@staticmethod
def to_rgb(arg):
"""
Returns an *RGB* tuple of three floats from 0-1.
*arg* can be an *RGB* or *RGBA* sequence or a string in any of
several forms:
1) a letter from the set 'rgbcmykw'
2) a hex color string, like '#00FFFF'
3) a standard name, like 'aqua'
4) a string representation of a float, like '0.4',
indicating gray on a 0-1 scale
if *arg* is *RGBA*, the *A* will simply be discarded.
"""
return to_rgb(arg)
@staticmethod
def to_rgba(arg, alpha=None):
"""
Returns an *RGBA* tuple of four floats from 0-1.
For acceptable values of *arg*, see :meth:`to_rgb`.
In addition, if *arg* is "none" (case-insensitive),
then (0,0,0,0) will be returned.
If *arg* is an *RGBA* sequence and *alpha* is not *None*,
*alpha* will replace the original *A*.
"""
return to_rgba(arg, alpha)
@staticmethod
def to_rgba_array(arg, alpha=None):
"""
Returns a numpy array of *RGBA* tuples.
Accepts a single mpl color spec or a sequence of specs.
Special case to handle "no color": if *c* is "none" (case-insensitive),
then an empty array will be returned. Same for an empty list.
"""
return to_rgba_array(arg, alpha)
colorConverter = ColorConverter()
### End of backwards-compatible color-conversion API
def makeMappingArray(N, data, gamma=1.0):
"""Create an *N* -element 1-d lookup table
*data* represented by a list of x,y0,y1 mapping correspondences.
Each element in this list represents how a value between 0 and 1
(inclusive) represented by x is mapped to a corresponding value
between 0 and 1 (inclusive). The two values of y are to allow
for discontinuous mapping functions (say as might be found in a
sawtooth) where y0 represents the value of y for values of x
<= to that given, and y1 is the value to be used for x > than
that given). The list must start with x=0, end with x=1, and
all values of x must be in increasing order. Values between
the given mapping points are determined by simple linear interpolation.
Alternatively, data can be a function mapping values between 0 - 1
to 0 - 1.
The function returns an array "result" where ``result[x*(N-1)]``
gives the closest value for values of x between 0 and 1.
"""
if callable(data):
xind = np.linspace(0, 1, N) ** gamma
lut = np.clip(np.array(data(xind), dtype=float), 0, 1)
return lut
try:
adata = np.array(data)
except Exception:
raise TypeError("data must be convertible to an array")
shape = adata.shape
if len(shape) != 2 or shape[1] != 3:
raise ValueError("data must be nx3 format")
x = adata[:, 0]
y0 = adata[:, 1]
y1 = adata[:, 2]
if x[0] != 0. or x[-1] != 1.0:
raise ValueError(
"data mapping points must start with x=0 and end with x=1")
if (np.diff(x) < 0).any():
raise ValueError("data mapping points must have x in increasing order")
# begin generation of lookup table
x = x * (N - 1)
lut = np.zeros((N,), float)
xind = (N - 1) * np.linspace(0, 1, N) ** gamma
ind = np.searchsorted(x, xind)[1:-1]
distance = (xind[1:-1] - x[ind - 1]) / (x[ind] - x[ind - 1])
lut[1:-1] = distance * (y0[ind] - y1[ind - 1]) + y1[ind - 1]
lut[0] = y1[0]
lut[-1] = y0[-1]
# ensure that the lut is confined to values between 0 and 1 by clipping it
return np.clip(lut, 0.0, 1.0)
class Colormap(object):
"""
Baseclass for all scalar to RGBA mappings.
Typically Colormap instances are used to convert data values (floats) from
the interval ``[0, 1]`` to the RGBA color that the respective Colormap
represents. For scaling of data into the ``[0, 1]`` interval see
:class:`matplotlib.colors.Normalize`. It is worth noting that
:class:`matplotlib.cm.ScalarMappable` subclasses make heavy use of this
``data->normalize->map-to-color`` processing chain.
"""
def __init__(self, name, N=256):
"""
Parameters
----------
name : str
The name of the colormap.
N : int
The number of rgb quantization levels.
"""
self.name = name
self.N = int(N) # ensure that N is always int
self._rgba_bad = (0.0, 0.0, 0.0, 0.0) # If bad, don't paint anything.
self._rgba_under = None
self._rgba_over = None
self._i_under = self.N
self._i_over = self.N + 1
self._i_bad = self.N + 2
self._isinit = False
#: When this colormap exists on a scalar mappable and colorbar_extend
#: is not False, colorbar creation will pick up ``colorbar_extend`` as
#: the default value for the ``extend`` keyword in the
#: :class:`matplotlib.colorbar.Colorbar` constructor.
self.colorbar_extend = False
def __call__(self, X, alpha=None, bytes=False):
"""
Parameters
----------
X : scalar, ndarray
The data value(s) to convert to RGBA.
For floats, X should be in the interval ``[0.0, 1.0]`` to
return the RGBA values ``X*100`` percent along the Colormap line.
For integers, X should be in the interval ``[0, Colormap.N)`` to
return RGBA values *indexed* from the Colormap with index ``X``.
alpha : float, None
Alpha must be a scalar between 0 and 1, or None.
bytes : bool
If False (default), the returned RGBA values will be floats in the
interval ``[0, 1]`` otherwise they will be uint8s in the interval
``[0, 255]``.
Returns
-------
Tuple of RGBA values if X is scalar, otherwise an array of
RGBA values with a shape of ``X.shape + (4, )``.
"""
# See class docstring for arg/kwarg documentation.
if not self._isinit:
self._init()
mask_bad = None
if not cbook.iterable(X):
vtype = 'scalar'
xa = np.array([X])
else:
vtype = 'array'
xma = np.ma.array(X, copy=True) # Copy here to avoid side effects.
mask_bad = xma.mask # Mask will be used below.
xa = xma.filled() # Fill to avoid infs, etc.
del xma
# Calculations with native byteorder are faster, and avoid a
# bug that otherwise can occur with putmask when the last
# argument is a numpy scalar.
if not xa.dtype.isnative:
xa = xa.byteswap().newbyteorder()
if xa.dtype.kind == "f":
xa *= self.N
# Negative values are out of range, but astype(int) would truncate
# them towards zero.
xa[xa < 0] = -1
# xa == 1 (== N after multiplication) is not out of range.
xa[xa == self.N] = self.N - 1
# Avoid converting large positive values to negative integers.
np.clip(xa, -1, self.N, out=xa)
xa = xa.astype(int)
# Set the over-range indices before the under-range;
# otherwise the under-range values get converted to over-range.
xa[xa > self.N - 1] = self._i_over
xa[xa < 0] = self._i_under
if mask_bad is not None:
if mask_bad.shape == xa.shape:
np.copyto(xa, self._i_bad, where=mask_bad)
elif mask_bad:
xa.fill(self._i_bad)
if bytes:
lut = (self._lut * 255).astype(np.uint8)
else:
lut = self._lut.copy() # Don't let alpha modify original _lut.
if alpha is not None:
alpha = min(alpha, 1.0) # alpha must be between 0 and 1
alpha = max(alpha, 0.0)
if bytes:
alpha = int(alpha * 255)
if (lut[-1] == 0).all():
lut[:-1, -1] = alpha
# All zeros is taken as a flag for the default bad
# color, which is no color--fully transparent. We
# don't want to override this.
else:
lut[:, -1] = alpha
# If the bad value is set to have a color, then we
# override its alpha just as for any other value.
rgba = np.empty(shape=xa.shape + (4,), dtype=lut.dtype)
lut.take(xa, axis=0, mode='clip', out=rgba)
if vtype == 'scalar':
rgba = tuple(rgba[0, :])
return rgba
def __copy__(self):
"""Create new object with the same class, update attributes
"""
cls = self.__class__
cmapobject = cls.__new__(cls)
cmapobject.__dict__.update(self.__dict__)
if self._isinit:
cmapobject._lut = np.copy(self._lut)
return cmapobject
def set_bad(self, color='k', alpha=None):
"""Set color to be used for masked values.
"""
self._rgba_bad = colorConverter.to_rgba(color, alpha)
if self._isinit:
self._set_extremes()
def set_under(self, color='k', alpha=None):
"""Set color to be used for low out-of-range values.
Requires norm.clip = False
"""
self._rgba_under = colorConverter.to_rgba(color, alpha)
if self._isinit:
self._set_extremes()
def set_over(self, color='k', alpha=None):
"""Set color to be used for high out-of-range values.
Requires norm.clip = False
"""
self._rgba_over = colorConverter.to_rgba(color, alpha)
if self._isinit:
self._set_extremes()
def _set_extremes(self):
if self._rgba_under:
self._lut[self._i_under] = self._rgba_under
else:
self._lut[self._i_under] = self._lut[0]
if self._rgba_over:
self._lut[self._i_over] = self._rgba_over
else:
self._lut[self._i_over] = self._lut[self.N - 1]
self._lut[self._i_bad] = self._rgba_bad
def _init(self):
"""Generate the lookup table, self._lut"""
raise NotImplementedError("Abstract class only")
def is_gray(self):
if not self._isinit:
self._init()
return (np.all(self._lut[:, 0] == self._lut[:, 1]) and
np.all(self._lut[:, 0] == self._lut[:, 2]))
def _resample(self, lutsize):
"""
Return a new color map with *lutsize* entries.
"""
raise NotImplementedError()
def reversed(self, name=None):
"""
Make a reversed instance of the Colormap.
.. note :: Function not implemented for base class.
Parameters
----------
name : str, optional
The name for the reversed colormap. If it's None the
name will be the name of the parent colormap + "_r".
Notes
-----
See :meth:`LinearSegmentedColormap.reversed` and
:meth:`ListedColormap.reversed`
"""
raise NotImplementedError()
class LinearSegmentedColormap(Colormap):
"""Colormap objects based on lookup tables using linear segments.
The lookup table is generated using linear interpolation for each
primary color, with the 0-1 domain divided into any number of
segments.
"""
def __init__(self, name, segmentdata, N=256, gamma=1.0):
"""Create color map from linear mapping segments
segmentdata argument is a dictionary with a red, green and blue
entries. Each entry should be a list of *x*, *y0*, *y1* tuples,
forming rows in a table. Entries for alpha are optional.
Example: suppose you want red to increase from 0 to 1 over
the bottom half, green to do the same over the middle half,
and blue over the top half. Then you would use::
cdict = {'red': [(0.0, 0.0, 0.0),
(0.5, 1.0, 1.0),
(1.0, 1.0, 1.0)],
'green': [(0.0, 0.0, 0.0),
(0.25, 0.0, 0.0),
(0.75, 1.0, 1.0),
(1.0, 1.0, 1.0)],
'blue': [(0.0, 0.0, 0.0),
(0.5, 0.0, 0.0),
(1.0, 1.0, 1.0)]}
Each row in the table for a given color is a sequence of
*x*, *y0*, *y1* tuples. In each sequence, *x* must increase
monotonically from 0 to 1. For any input value *z* falling
between *x[i]* and *x[i+1]*, the output value of a given color
will be linearly interpolated between *y1[i]* and *y0[i+1]*::
row i: x y0 y1
/
/
row i+1: x y0 y1
Hence y0 in the first row and y1 in the last row are never used.
.. seealso::
:meth:`LinearSegmentedColormap.from_list`
Static method; factory function for generating a
smoothly-varying LinearSegmentedColormap.
:func:`makeMappingArray`
For information about making a mapping array.
"""
# True only if all colors in map are identical; needed for contouring.
self.monochrome = False
Colormap.__init__(self, name, N)
self._segmentdata = segmentdata
self._gamma = gamma
def _init(self):
self._lut = np.ones((self.N + 3, 4), float)
self._lut[:-3, 0] = makeMappingArray(
self.N, self._segmentdata['red'], self._gamma)
self._lut[:-3, 1] = makeMappingArray(
self.N, self._segmentdata['green'], self._gamma)
self._lut[:-3, 2] = makeMappingArray(
self.N, self._segmentdata['blue'], self._gamma)
if 'alpha' in self._segmentdata:
self._lut[:-3, 3] = makeMappingArray(
self.N, self._segmentdata['alpha'], 1)
self._isinit = True
self._set_extremes()
def set_gamma(self, gamma):
"""
Set a new gamma value and regenerate color map.
"""
self._gamma = gamma
self._init()
@staticmethod
def from_list(name, colors, N=256, gamma=1.0):
"""
Make a linear segmented colormap with *name* from a sequence
of *colors* which evenly transitions from colors[0] at val=0
to colors[-1] at val=1. *N* is the number of rgb quantization
levels.
Alternatively, a list of (value, color) tuples can be given
to divide the range unevenly.
"""
if not cbook.iterable(colors):
raise ValueError('colors must be iterable')
if (isinstance(colors[0], Sized) and len(colors[0]) == 2
and not isinstance(colors[0], six.string_types)):
# List of value, color pairs
vals, colors = zip(*colors)
else:
vals = np.linspace(0, 1, len(colors))
cdict = dict(red=[], green=[], blue=[], alpha=[])
for val, color in zip(vals, colors):
r, g, b, a = colorConverter.to_rgba(color)
cdict['red'].append((val, r, r))
cdict['green'].append((val, g, g))
cdict['blue'].append((val, b, b))
cdict['alpha'].append((val, a, a))
return LinearSegmentedColormap(name, cdict, N, gamma)
def _resample(self, lutsize):
"""
Return a new color map with *lutsize* entries.
"""
return LinearSegmentedColormap(self.name, self._segmentdata, lutsize)
def reversed(self, name=None):
"""
Make a reversed instance of the Colormap.
Parameters
----------
name : str, optional
The name for the reversed colormap. If it's None the
name will be the name of the parent colormap + "_r".
Returns
-------
LinearSegmentedColormap
The reversed colormap.
"""
if name is None:
name = self.name + "_r"
# Function factory needed to deal with 'late binding' issue.
def factory(dat):
def func_r(x):
return dat(1.0 - x)
return func_r
data_r = dict()
for key, data in six.iteritems(self._segmentdata):
if callable(data):
data_r[key] = factory(data)
else:
new_data = [(1.0 - x, y1, y0) for x, y0, y1 in reversed(data)]
data_r[key] = new_data
return LinearSegmentedColormap(name, data_r, self.N, self._gamma)
class ListedColormap(Colormap):
"""Colormap object generated from a list of colors.
This may be most useful when indexing directly into a colormap,
but it can also be used to generate special colormaps for ordinary
mapping.
"""
def __init__(self, colors, name='from_list', N=None):
"""
Make a colormap from a list of colors.
*colors*
a list of matplotlib color specifications,
or an equivalent Nx3 or Nx4 floating point array
(*N* rgb or rgba values)
*name*
a string to identify the colormap
*N*
the number of entries in the map. The default is *None*,
in which case there is one colormap entry for each
element in the list of colors. If::
N < len(colors)
the list will be truncated at *N*. If::
N > len(colors)
the list will be extended by repetition.
"""
self.monochrome = False # True only if all colors in map are
# identical; needed for contouring.
if N is None:
self.colors = colors
N = len(colors)
else:
if isinstance(colors, six.string_types):
self.colors = [colors] * N
self.monochrome = True
elif cbook.iterable(colors):
if len(colors) == 1:
self.monochrome = True
self.colors = list(
itertools.islice(itertools.cycle(colors), N))
else:
try:
gray = float(colors)
except TypeError:
pass
else:
self.colors = [gray] * N
self.monochrome = True
Colormap.__init__(self, name, N)
def _init(self):
rgba = colorConverter.to_rgba_array(self.colors)
self._lut = np.zeros((self.N + 3, 4), float)
self._lut[:-3] = rgba
self._isinit = True
self._set_extremes()
def _resample(self, lutsize):
"""
Return a new color map with *lutsize* entries.
"""
colors = self(np.linspace(0, 1, lutsize))
return ListedColormap(colors, name=self.name)
def reversed(self, name=None):
"""
Make a reversed instance of the Colormap.
Parameters
----------
name : str, optional
The name for the reversed colormap. If it's None the
name will be the name of the parent colormap + "_r".
Returns
-------
ListedColormap
A reversed instance of the colormap.
"""
if name is None:
name = self.name + "_r"
colors_r = list(reversed(self.colors))
return ListedColormap(colors_r, name=name, N=self.N)
class Normalize(object):
"""
A class which, when called, can normalize data into
the ``[0.0, 1.0]`` interval.
"""
def __init__(self, vmin=None, vmax=None, clip=False):
"""
If *vmin* or *vmax* is not given, they are initialized from the
minimum and maximum value respectively of the first input
processed. That is, *__call__(A)* calls *autoscale_None(A)*.
If *clip* is *True* and the given value falls outside the range,
the returned value will be 0 or 1, whichever is closer.
Returns 0 if::
vmin==vmax
Works with scalars or arrays, including masked arrays. If
*clip* is *True*, masked values are set to 1; otherwise they
remain masked. Clipping silently defeats the purpose of setting
the over, under, and masked colors in the colormap, so it is
likely to lead to surprises; therefore the default is
*clip* = *False*.
"""
self.vmin = _sanitize_extrema(vmin)
self.vmax = _sanitize_extrema(vmax)
self.clip = clip
@staticmethod
def process_value(value):
"""
Homogenize the input *value* for easy and efficient normalization.
*value* can be a scalar or sequence.
Returns *result*, *is_scalar*, where *result* is a
masked array matching *value*. Float dtypes are preserved;
integer types with two bytes or smaller are converted to
np.float32, and larger types are converted to np.float64.
Preserving float32 when possible, and using in-place operations,
can greatly improve speed for large arrays.
Experimental; we may want to add an option to force the
use of float32.
"""
is_scalar = not cbook.iterable(value)
if is_scalar:
value = [value]
dtype = np.min_scalar_type(value)
if np.issubdtype(dtype, np.integer) or dtype.type is np.bool_:
# bool_/int8/int16 -> float32; int32/int64 -> float64
dtype = np.promote_types(dtype, np.float32)
# ensure data passed in as an ndarray subclass are interpreted as
# an ndarray. See issue #6622.
mask = np.ma.getmask(value)
data = np.asarray(np.ma.getdata(value))
result = np.ma.array(data, mask=mask, dtype=dtype, copy=True)
return result, is_scalar
def __call__(self, value, clip=None):
"""
Normalize *value* data in the ``[vmin, vmax]`` interval into
the ``[0.0, 1.0]`` interval and return it. *clip* defaults
to *self.clip* (which defaults to *False*). If not already
initialized, *vmin* and *vmax* are initialized using
*autoscale_None(value)*.
"""
if clip is None:
clip = self.clip
result, is_scalar = self.process_value(value)
self.autoscale_None(result)
# Convert at least to float, without losing precision.
(vmin,), _ = self.process_value(self.vmin)
(vmax,), _ = self.process_value(self.vmax)
if vmin == vmax:
result.fill(0) # Or should it be all masked? Or 0.5?
elif vmin > vmax:
raise ValueError("minvalue must be less than or equal to maxvalue")
else:
if clip:
mask = np.ma.getmask(result)
result = np.ma.array(np.clip(result.filled(vmax), vmin, vmax),
mask=mask)
# ma division is very slow; we can take a shortcut
resdat = result.data
resdat -= vmin
resdat /= (vmax - vmin)
result = np.ma.array(resdat, mask=result.mask, copy=False)
# Agg cannot handle float128. We actually only need 32-bit of
# precision, but on Windows, `np.dtype(np.longdouble) == np.float64`,
# so casting to float32 would lose precision on float64s as well.
if result.dtype == np.longdouble:
result = result.astype(np.float64)
if is_scalar:
result = result[0]
return result
def inverse(self, value):
if not self.scaled():
raise ValueError("Not invertible until scaled")
(vmin,), _ = self.process_value(self.vmin)
(vmax,), _ = self.process_value(self.vmax)
if cbook.iterable(value):
val = np.ma.asarray(value)
return vmin + val * (vmax - vmin)
else:
return vmin + value * (vmax - vmin)
def autoscale(self, A):
"""
Set *vmin*, *vmax* to min, max of *A*.
"""
A = np.asanyarray(A)
self.vmin = A.min()
self.vmax = A.max()
def autoscale_None(self, A):
"""autoscale only None-valued vmin or vmax."""
A = np.asanyarray(A)
if self.vmin is None and A.size:
self.vmin = A.min()
if self.vmax is None and A.size:
self.vmax = A.max()
def scaled(self):
'return true if vmin and vmax set'
return (self.vmin is not None and self.vmax is not None)
class LogNorm(Normalize):
"""
Normalize a given value to the 0-1 range on a log scale
"""
def __call__(self, value, clip=None):
if clip is None:
clip = self.clip
result, is_scalar = self.process_value(value)
result = np.ma.masked_less_equal(result, 0, copy=False)
self.autoscale_None(result)
vmin, vmax = self.vmin, self.vmax
if vmin > vmax:
raise ValueError("minvalue must be less than or equal to maxvalue")
elif vmin <= 0:
raise ValueError("values must all be positive")
elif vmin == vmax:
result.fill(0)
else:
if clip:
mask = np.ma.getmask(result)
result = np.ma.array(np.clip(result.filled(vmax), vmin, vmax),
mask=mask)
# in-place equivalent of above can be much faster
resdat = result.data
mask = result.mask
if mask is np.ma.nomask:
mask = (resdat <= 0)
else:
mask |= resdat <= 0
np.copyto(resdat, 1, where=mask)
np.log(resdat, resdat)
resdat -= np.log(vmin)
resdat /= (np.log(vmax) - np.log(vmin))
result = np.ma.array(resdat, mask=mask, copy=False)
if is_scalar:
result = result[0]
return result
def inverse(self, value):
if not self.scaled():
raise ValueError("Not invertible until scaled")
vmin, vmax = self.vmin, self.vmax
if cbook.iterable(value):
val = np.ma.asarray(value)
return vmin * np.ma.power((vmax / vmin), val)
else:
return vmin * pow((vmax / vmin), value)
def autoscale(self, A):
"""
Set *vmin*, *vmax* to min, max of *A*.
"""
A = np.ma.masked_less_equal(A, 0, copy=False)
self.vmin = np.ma.min(A)
self.vmax = np.ma.max(A)
def autoscale_None(self, A):
"""autoscale only None-valued vmin or vmax."""
if self.vmin is not None and self.vmax is not None:
return
A = np.ma.masked_less_equal(A, 0, copy=False)
if self.vmin is None and A.size:
self.vmin = A.min()
if self.vmax is None and A.size:
self.vmax = A.max()
class SymLogNorm(Normalize):
"""
The symmetrical logarithmic scale is logarithmic in both the
positive and negative directions from the origin.
Since the values close to zero tend toward infinity, there is a
need to have a range around zero that is linear. The parameter
*linthresh* allows the user to specify the size of this range
(-*linthresh*, *linthresh*).
"""
def __init__(self, linthresh, linscale=1.0,
vmin=None, vmax=None, clip=False):
"""
*linthresh*:
The range within which the plot is linear (to
avoid having the plot go to infinity around zero).
*linscale*:
This allows the linear range (-*linthresh* to *linthresh*)
to be stretched relative to the logarithmic range. Its
value is the number of decades to use for each half of the
linear range. For example, when *linscale* == 1.0 (the
default), the space used for the positive and negative
halves of the linear range will be equal to one decade in
the logarithmic range. Defaults to 1.
"""
Normalize.__init__(self, vmin, vmax, clip)
self.linthresh = float(linthresh)
self._linscale_adj = (linscale / (1.0 - np.e ** -1))
if vmin is not None and vmax is not None:
self._transform_vmin_vmax()
def __call__(self, value, clip=None):
if clip is None:
clip = self.clip
result, is_scalar = self.process_value(value)
self.autoscale_None(result)
vmin, vmax = self.vmin, self.vmax
if vmin > vmax:
raise ValueError("minvalue must be less than or equal to maxvalue")
elif vmin == vmax:
result.fill(0)
else:
if clip:
mask = np.ma.getmask(result)
result = np.ma.array(np.clip(result.filled(vmax), vmin, vmax),
mask=mask)
# in-place equivalent of above can be much faster
resdat = self._transform(result.data)
resdat -= self._lower
resdat /= (self._upper - self._lower)
if is_scalar:
result = result[0]
return result
def _transform(self, a):
"""
Inplace transformation.
"""
masked = np.abs(a) > self.linthresh
sign = np.sign(a[masked])
log = (self._linscale_adj + np.log(np.abs(a[masked]) / self.linthresh))
log *= sign * self.linthresh
a[masked] = log
a[~masked] *= self._linscale_adj
return a
def _inv_transform(self, a):
"""
Inverse inplace Transformation.
"""
masked = np.abs(a) > (self.linthresh * self._linscale_adj)
sign = np.sign(a[masked])
exp = np.exp(sign * a[masked] / self.linthresh - self._linscale_adj)
exp *= sign * self.linthresh
a[masked] = exp
a[~masked] /= self._linscale_adj
return a
def _transform_vmin_vmax(self):
"""
Calculates vmin and vmax in the transformed system.
"""
vmin, vmax = self.vmin, self.vmax
arr = np.array([vmax, vmin]).astype(float)
self._upper, self._lower = self._transform(arr)
def inverse(self, value):
if not self.scaled():
raise ValueError("Not invertible until scaled")
val = np.ma.asarray(value)
val = val * (self._upper - self._lower) + self._lower
return self._inv_transform(val)
def autoscale(self, A):
"""
Set *vmin*, *vmax* to min, max of *A*.
"""
self.vmin = np.ma.min(A)
self.vmax = np.ma.max(A)
self._transform_vmin_vmax()
def autoscale_None(self, A):
"""autoscale only None-valued vmin or vmax."""
if self.vmin is not None and self.vmax is not None:
pass
A = np.asanyarray(A)
if self.vmin is None and A.size:
self.vmin = A.min()
if self.vmax is None and A.size:
self.vmax = A.max()
self._transform_vmin_vmax()
class PowerNorm(Normalize):
"""
Normalize a given value to the ``[0, 1]`` interval with a power-law
scaling. This will clip any negative data points to 0.
"""
def __init__(self, gamma, vmin=None, vmax=None, clip=False):
Normalize.__init__(self, vmin, vmax, clip)
self.gamma = gamma
def __call__(self, value, clip=None):
if clip is None:
clip = self.clip
result, is_scalar = self.process_value(value)
self.autoscale_None(result)
gamma = self.gamma
vmin, vmax = self.vmin, self.vmax
if vmin > vmax:
raise ValueError("minvalue must be less than or equal to maxvalue")
elif vmin == vmax:
result.fill(0)
else:
res_mask = result.data < 0
if clip:
mask = np.ma.getmask(result)
result = np.ma.array(np.clip(result.filled(vmax), vmin, vmax),
mask=mask)
resdat = result.data
resdat -= vmin
np.power(resdat, gamma, resdat)
resdat /= (vmax - vmin) ** gamma
result = np.ma.array(resdat, mask=result.mask, copy=False)
result[res_mask] = 0
if is_scalar:
result = result[0]
return result
def inverse(self, value):
if not self.scaled():
raise ValueError("Not invertible until scaled")
gamma = self.gamma
vmin, vmax = self.vmin, self.vmax
if cbook.iterable(value):
val = np.ma.asarray(value)
return np.ma.power(val, 1. / gamma) * (vmax - vmin) + vmin
else:
return pow(value, 1. / gamma) * (vmax - vmin) + vmin
def autoscale(self, A):
"""
Set *vmin*, *vmax* to min, max of *A*.
"""
self.vmin = np.ma.min(A)
if self.vmin < 0:
self.vmin = 0
warnings.warn("Power-law scaling on negative values is "
"ill-defined, clamping to 0.")
self.vmax = np.ma.max(A)
def autoscale_None(self, A):
"""autoscale only None-valued vmin or vmax."""
A = np.asanyarray(A)
if self.vmin is None and A.size:
self.vmin = A.min()
if self.vmin < 0:
self.vmin = 0
warnings.warn("Power-law scaling on negative values is "
"ill-defined, clamping to 0.")
if self.vmax is None and A.size:
self.vmax = A.max()
class BoundaryNorm(Normalize):
"""
Generate a colormap index based on discrete intervals.
Unlike :class:`Normalize` or :class:`LogNorm`,
:class:`BoundaryNorm` maps values to integers instead of to the
interval 0-1.
Mapping to the 0-1 interval could have been done via
piece-wise linear interpolation, but using integers seems
simpler, and reduces the number of conversions back and forth
between integer and floating point.
"""
def __init__(self, boundaries, ncolors, clip=False):
"""
Parameters
----------
boundaries : array-like
Monotonically increasing sequence of boundaries
ncolors : int
Number of colors in the colormap to be used
clip : bool, optional
If clip is ``True``, out of range values are mapped to 0 if they
are below ``boundaries[0]`` or mapped to ncolors - 1 if they are
above ``boundaries[-1]``.
If clip is ``False``, out of range values are mapped to -1 if
they are below ``boundaries[0]`` or mapped to ncolors if they are
above ``boundaries[-1]``. These are then converted to valid indices
by :meth:`Colormap.__call__`.
Notes
-----
*boundaries* defines the edges of bins, and data falling within a bin
is mapped to the color with the same index.
If the number of bins doesn't equal *ncolors*, the color is chosen
by linear interpolation of the bin number onto color numbers.
"""
self.clip = clip
self.vmin = boundaries[0]
self.vmax = boundaries[-1]
self.boundaries = np.asarray(boundaries)
self.N = len(self.boundaries)
self.Ncmap = ncolors
if self.N - 1 == self.Ncmap:
self._interp = False
else:
self._interp = True
def __call__(self, value, clip=None):
if clip is None:
clip = self.clip
xx, is_scalar = self.process_value(value)
mask = np.ma.getmaskarray(xx)
xx = np.atleast_1d(xx.filled(self.vmax + 1))
if clip:
np.clip(xx, self.vmin, self.vmax, out=xx)
max_col = self.Ncmap - 1
else:
max_col = self.Ncmap
iret = np.zeros(xx.shape, dtype=np.int16)
for i, b in enumerate(self.boundaries):
iret[xx >= b] = i
if self._interp:
scalefac = (self.Ncmap - 1) / (self.N - 2)
iret = (iret * scalefac).astype(np.int16)
iret[xx < self.vmin] = -1
iret[xx >= self.vmax] = max_col
ret = np.ma.array(iret, mask=mask)
if is_scalar:
ret = int(ret[0]) # assume python scalar
return ret
def inverse(self, value):
"""
Raises
------
ValueError
BoundaryNorm is not invertible, so calling this method will always
raise an error
"""
return ValueError("BoundaryNorm is not invertible")
class NoNorm(Normalize):
"""
Dummy replacement for Normalize, for the case where we
want to use indices directly in a
:class:`~matplotlib.cm.ScalarMappable` .
"""
def __call__(self, value, clip=None):
return value
def inverse(self, value):
return value
def rgb_to_hsv(arr):
"""
convert float rgb values (in the range [0, 1]), in a numpy array to hsv
values.
Parameters
----------
arr : (..., 3) array-like
All values must be in the range [0, 1]
Returns
-------
hsv : (..., 3) ndarray
Colors converted to hsv values in range [0, 1]
"""
# make sure it is an ndarray
arr = np.asarray(arr)
# check length of the last dimension, should be _some_ sort of rgb
if arr.shape[-1] != 3:
raise ValueError("Last dimension of input array must be 3; "
"shape {} was found.".format(arr.shape))
in_ndim = arr.ndim
if arr.ndim == 1:
arr = np.array(arr, ndmin=2)
# make sure we don't have an int image
arr = arr.astype(np.promote_types(arr.dtype, np.float32))
out = np.zeros_like(arr)
arr_max = arr.max(-1)
ipos = arr_max > 0
delta = arr.ptp(-1)
s = np.zeros_like(delta)
s[ipos] = delta[ipos] / arr_max[ipos]
ipos = delta > 0
# red is max
idx = (arr[..., 0] == arr_max) & ipos
out[idx, 0] = (arr[idx, 1] - arr[idx, 2]) / delta[idx]
# green is max
idx = (arr[..., 1] == arr_max) & ipos
out[idx, 0] = 2. + (arr[idx, 2] - arr[idx, 0]) / delta[idx]
# blue is max
idx = (arr[..., 2] == arr_max) & ipos
out[idx, 0] = 4. + (arr[idx, 0] - arr[idx, 1]) / delta[idx]
out[..., 0] = (out[..., 0] / 6.0) % 1.0
out[..., 1] = s
out[..., 2] = arr_max
if in_ndim == 1:
out.shape = (3,)
return out
def hsv_to_rgb(hsv):
"""
convert hsv values in a numpy array to rgb values
all values assumed to be in range [0, 1]
Parameters
----------
hsv : (..., 3) array-like
All values assumed to be in range [0, 1]
Returns
-------
rgb : (..., 3) ndarray
Colors converted to RGB values in range [0, 1]
"""
hsv = np.asarray(hsv)
# check length of the last dimension, should be _some_ sort of rgb
if hsv.shape[-1] != 3:
raise ValueError("Last dimension of input array must be 3; "
"shape {shp} was found.".format(shp=hsv.shape))
# if we got passed a 1D array, try to treat as
# a single color and reshape as needed
in_ndim = hsv.ndim
if in_ndim == 1:
hsv = np.array(hsv, ndmin=2)
# make sure we don't have an int image
hsv = hsv.astype(np.promote_types(hsv.dtype, np.float32))
h = hsv[..., 0]
s = hsv[..., 1]
v = hsv[..., 2]
r = np.empty_like(h)
g = np.empty_like(h)
b = np.empty_like(h)
i = (h * 6.0).astype(int)
f = (h * 6.0) - i
p = v * (1.0 - s)
q = v * (1.0 - s * f)
t = v * (1.0 - s * (1.0 - f))
idx = i % 6 == 0
r[idx] = v[idx]
g[idx] = t[idx]
b[idx] = p[idx]
idx = i == 1
r[idx] = q[idx]
g[idx] = v[idx]
b[idx] = p[idx]
idx = i == 2
r[idx] = p[idx]
g[idx] = v[idx]
b[idx] = t[idx]
idx = i == 3
r[idx] = p[idx]
g[idx] = q[idx]
b[idx] = v[idx]
idx = i == 4
r[idx] = t[idx]
g[idx] = p[idx]
b[idx] = v[idx]
idx = i == 5
r[idx] = v[idx]
g[idx] = p[idx]
b[idx] = q[idx]
idx = s == 0
r[idx] = v[idx]
g[idx] = v[idx]
b[idx] = v[idx]
# `np.stack([r, g, b], axis=-1)` (numpy 1.10).
rgb = np.concatenate([r[..., None], g[..., None], b[..., None]], -1)
if in_ndim == 1:
rgb.shape = (3,)
return rgb
def _vector_magnitude(arr):
# things that don't work here:
# * np.linalg.norm
# - doesn't broadcast in numpy 1.7
# - drops the mask from ma.array
# * using keepdims - broken on ma.array until 1.11.2
# * using sum - discards mask on ma.array unless entire vector is masked
sum_sq = 0
for i in range(arr.shape[-1]):
sum_sq += np.square(arr[..., i, np.newaxis])
return np.sqrt(sum_sq)
def _vector_dot(a, b):
# things that don't work here:
# * a.dot(b) - fails on masked arrays until 1.10
# * np.ma.dot(a, b) - doesn't mask enough things
# * np.ma.dot(a, b, strict=True) - returns a maskedarray with no mask
dot = 0
for i in range(a.shape[-1]):
dot += a[..., i] * b[..., i]
return dot
class LightSource(object):
"""
Create a light source coming from the specified azimuth and elevation.
Angles are in degrees, with the azimuth measured
clockwise from north and elevation up from the zero plane of the surface.
The :meth:`shade` is used to produce "shaded" rgb values for a data array.
:meth:`shade_rgb` can be used to combine an rgb image with
The :meth:`shade_rgb`
The :meth:`hillshade` produces an illumination map of a surface.
"""
def __init__(self, azdeg=315, altdeg=45, hsv_min_val=0, hsv_max_val=1,
hsv_min_sat=1, hsv_max_sat=0):
"""
Specify the azimuth (measured clockwise from south) and altitude
(measured up from the plane of the surface) of the light source
in degrees.
Parameters
----------
azdeg : number, optional
The azimuth (0-360, degrees clockwise from North) of the light
source. Defaults to 315 degrees (from the northwest).
altdeg : number, optional
The altitude (0-90, degrees up from horizontal) of the light
source. Defaults to 45 degrees from horizontal.
Notes
-----
For backwards compatibility, the parameters *hsv_min_val*,
*hsv_max_val*, *hsv_min_sat*, and *hsv_max_sat* may be supplied at
initialization as well. However, these parameters will only be used if
"blend_mode='hsv'" is passed into :meth:`shade` or :meth:`shade_rgb`.
See the documentation for :meth:`blend_hsv` for more details.
"""
self.azdeg = azdeg
self.altdeg = altdeg
self.hsv_min_val = hsv_min_val
self.hsv_max_val = hsv_max_val
self.hsv_min_sat = hsv_min_sat
self.hsv_max_sat = hsv_max_sat
@property
def direction(self):
""" The unit vector direction towards the light source """
# Azimuth is in degrees clockwise from North. Convert to radians
# counterclockwise from East (mathematical notation).
az = np.radians(90 - self.azdeg)
alt = np.radians(self.altdeg)
return np.array([
np.cos(az) * np.cos(alt),
np.sin(az) * np.cos(alt),
np.sin(alt)
])
def hillshade(self, elevation, vert_exag=1, dx=1, dy=1, fraction=1.):
"""
Calculates the illumination intensity for a surface using the defined
azimuth and elevation for the light source.
This computes the normal vectors for the surface, and then passes them
on to `shade_normals`
Parameters
----------
elevation : array-like
A 2d array (or equivalent) of the height values used to generate an
illumination map
vert_exag : number, optional
The amount to exaggerate the elevation values by when calculating
illumination. This can be used either to correct for differences in
units between the x-y coordinate system and the elevation
coordinate system (e.g. decimal degrees vs meters) or to exaggerate
or de-emphasize topographic effects.
dx : number, optional
The x-spacing (columns) of the input *elevation* grid.
dy : number, optional
The y-spacing (rows) of the input *elevation* grid.
fraction : number, optional
Increases or decreases the contrast of the hillshade. Values
greater than one will cause intermediate values to move closer to
full illumination or shadow (and clipping any values that move
beyond 0 or 1). Note that this is not visually or mathematically
the same as vertical exaggeration.
Returns
-------
intensity : ndarray
A 2d array of illumination values between 0-1, where 0 is
completely in shadow and 1 is completely illuminated.
"""
# Because most image and raster GIS data has the first row in the array
# as the "top" of the image, dy is implicitly negative. This is
# consistent to what `imshow` assumes, as well.
dy = -dy
# compute the normal vectors from the partial derivatives
e_dy, e_dx = np.gradient(vert_exag * elevation, dy, dx)
# .view is to keep subclasses
normal = np.empty(elevation.shape + (3,)).view(type(elevation))
normal[..., 0] = -e_dx
normal[..., 1] = -e_dy
normal[..., 2] = 1
normal /= _vector_magnitude(normal)
return self.shade_normals(normal, fraction)
def shade_normals(self, normals, fraction=1.):
"""
Calculates the illumination intensity for the normal vectors of a
surface using the defined azimuth and elevation for the light source.
Imagine an artificial sun placed at infinity in some azimuth and
elevation position illuminating our surface. The parts of the surface
that slope toward the sun should brighten while those sides facing away
should become darker.
Parameters
----------
fraction : number, optional
Increases or decreases the contrast of the hillshade. Values
greater than one will cause intermediate values to move closer to
full illumination or shadow (and clipping any values that move
beyond 0 or 1). Note that this is not visually or mathematically
the same as vertical exaggeration.
Returns
-------
intensity : ndarray
A 2d array of illumination values between 0-1, where 0 is
completely in shadow and 1 is completely illuminated.
"""
intensity = _vector_dot(normals, self.direction)
# Apply contrast stretch
imin, imax = intensity.min(), intensity.max()
intensity *= fraction
# Rescale to 0-1, keeping range before contrast stretch
# If constant slope, keep relative scaling (i.e. flat should be 0.5,
# fully occluded 0, etc.)
if (imax - imin) > 1e-6:
# Strictly speaking, this is incorrect. Negative values should be
# clipped to 0 because they're fully occluded. However, rescaling
# in this manner is consistent with the previous implementation and
# visually appears better than a "hard" clip.
intensity -= imin
intensity /= (imax - imin)
intensity = np.clip(intensity, 0, 1, intensity)
return intensity
def shade(self, data, cmap, norm=None, blend_mode='overlay', vmin=None,
vmax=None, vert_exag=1, dx=1, dy=1, fraction=1, **kwargs):
"""
Combine colormapped data values with an illumination intensity map
(a.k.a. "hillshade") of the values.
Parameters
----------
data : array-like
A 2d array (or equivalent) of the height values used to generate a
shaded map.
cmap : `~matplotlib.colors.Colormap` instance
The colormap used to color the *data* array. Note that this must be
a `~matplotlib.colors.Colormap` instance. For example, rather than
passing in `cmap='gist_earth'`, use
`cmap=plt.get_cmap('gist_earth')` instead.
norm : `~matplotlib.colors.Normalize` instance, optional
The normalization used to scale values before colormapping. If
None, the input will be linearly scaled between its min and max.
blend_mode : {'hsv', 'overlay', 'soft'} or callable, optional
The type of blending used to combine the colormapped data
values with the illumination intensity. Default is
"overlay". Note that for most topographic surfaces,
"overlay" or "soft" appear more visually realistic. If a
user-defined function is supplied, it is expected to
combine an MxNx3 RGB array of floats (ranging 0 to 1) with
an MxNx1 hillshade array (also 0 to 1). (Call signature
`func(rgb, illum, **kwargs)`) Additional kwargs supplied
to this function will be passed on to the *blend_mode*
function.
vmin : scalar or None, optional
The minimum value used in colormapping *data*. If *None* the
minimum value in *data* is used. If *norm* is specified, then this
argument will be ignored.
vmax : scalar or None, optional
The maximum value used in colormapping *data*. If *None* the
maximum value in *data* is used. If *norm* is specified, then this
argument will be ignored.
vert_exag : number, optional
The amount to exaggerate the elevation values by when calculating
illumination. This can be used either to correct for differences in
units between the x-y coordinate system and the elevation
coordinate system (e.g. decimal degrees vs meters) or to exaggerate
or de-emphasize topography.
dx : number, optional
The x-spacing (columns) of the input *elevation* grid.
dy : number, optional
The y-spacing (rows) of the input *elevation* grid.
fraction : number, optional
Increases or decreases the contrast of the hillshade. Values
greater than one will cause intermediate values to move closer to
full illumination or shadow (and clipping any values that move
beyond 0 or 1). Note that this is not visually or mathematically
the same as vertical exaggeration.
Additional kwargs are passed on to the *blend_mode* function.
Returns
-------
rgba : ndarray
An MxNx4 array of floats ranging between 0-1.
"""
if vmin is None:
vmin = data.min()
if vmax is None:
vmax = data.max()
if norm is None:
norm = Normalize(vmin=vmin, vmax=vmax)
rgb0 = cmap(norm(data))
rgb1 = self.shade_rgb(rgb0, elevation=data, blend_mode=blend_mode,
vert_exag=vert_exag, dx=dx, dy=dy,
fraction=fraction, **kwargs)
# Don't overwrite the alpha channel, if present.
rgb0[..., :3] = rgb1[..., :3]
return rgb0
def shade_rgb(self, rgb, elevation, fraction=1., blend_mode='hsv',
vert_exag=1, dx=1, dy=1, **kwargs):
"""
Take the input RGB array (ny*nx*3) adjust their color values
to given the impression of a shaded relief map with a
specified light source using the elevation (ny*nx).
A new RGB array ((ny*nx*3)) is returned.
Parameters
----------
rgb : array-like
An MxNx3 RGB array, assumed to be in the range of 0 to 1.
elevation : array-like
A 2d array (or equivalent) of the height values used to generate a
shaded map.
fraction : number
Increases or decreases the contrast of the hillshade. Values
greater than one will cause intermediate values to move closer to
full illumination or shadow (and clipping any values that move
beyond 0 or 1). Note that this is not visually or mathematically
the same as vertical exaggeration.
blend_mode : {'hsv', 'overlay', 'soft'} or callable, optional
The type of blending used to combine the colormapped data values
with the illumination intensity. For backwards compatibility, this
defaults to "hsv". Note that for most topographic surfaces,
"overlay" or "soft" appear more visually realistic. If a
user-defined function is supplied, it is expected to combine an
MxNx3 RGB array of floats (ranging 0 to 1) with an MxNx1 hillshade
array (also 0 to 1). (Call signature `func(rgb, illum, **kwargs)`)
Additional kwargs supplied to this function will be passed on to
the *blend_mode* function.
vert_exag : number, optional
The amount to exaggerate the elevation values by when calculating
illumination. This can be used either to correct for differences in
units between the x-y coordinate system and the elevation
coordinate system (e.g. decimal degrees vs meters) or to exaggerate
or de-emphasize topography.
dx : number, optional
The x-spacing (columns) of the input *elevation* grid.
dy : number, optional
The y-spacing (rows) of the input *elevation* grid.
Additional kwargs are passed on to the *blend_mode* function.
Returns
-------
shaded_rgb : ndarray
An MxNx3 array of floats ranging between 0-1.
"""
# Calculate the "hillshade" intensity.
intensity = self.hillshade(elevation, vert_exag, dx, dy, fraction)
intensity = intensity[..., np.newaxis]
# Blend the hillshade and rgb data using the specified mode
lookup = {
'hsv': self.blend_hsv,
'soft': self.blend_soft_light,
'overlay': self.blend_overlay,
}
if blend_mode in lookup:
blend = lookup[blend_mode](rgb, intensity, **kwargs)
else:
try:
blend = blend_mode(rgb, intensity, **kwargs)
except TypeError:
raise ValueError('"blend_mode" must be callable or one of {}'
.format(lookup.keys))
# Only apply result where hillshade intensity isn't masked
if hasattr(intensity, 'mask'):
mask = intensity.mask[..., 0]
for i in range(3):
blend[..., i][mask] = rgb[..., i][mask]
return blend
def blend_hsv(self, rgb, intensity, hsv_max_sat=None, hsv_max_val=None,
hsv_min_val=None, hsv_min_sat=None):
"""
Take the input data array, convert to HSV values in the given colormap,
then adjust those color values to give the impression of a shaded
relief map with a specified light source. RGBA values are returned,
which can then be used to plot the shaded image with imshow.
The color of the resulting image will be darkened by moving the (s,v)
values (in hsv colorspace) toward (hsv_min_sat, hsv_min_val) in the
shaded regions, or lightened by sliding (s,v) toward (hsv_max_sat
hsv_max_val) in regions that are illuminated. The default extremes are
chose so that completely shaded points are nearly black (s = 1, v = 0)
and completely illuminated points are nearly white (s = 0, v = 1).
Parameters
----------
rgb : ndarray
An MxNx3 RGB array of floats ranging from 0 to 1 (color image).
intensity : ndarray
An MxNx1 array of floats ranging from 0 to 1 (grayscale image).
hsv_max_sat : number, optional
The maximum saturation value that the *intensity* map can shift the
output image to. Defaults to 1.
hsv_min_sat : number, optional
The minimum saturation value that the *intensity* map can shift the
output image to. Defaults to 0.
hsv_max_val : number, optional
The maximum value ("v" in "hsv") that the *intensity* map can shift
the output image to. Defaults to 1.
hsv_min_val: number, optional
The minimum value ("v" in "hsv") that the *intensity* map can shift
the output image to. Defaults to 0.
Returns
-------
rgb : ndarray
An MxNx3 RGB array representing the combined images.
"""
# Backward compatibility...
if hsv_max_sat is None:
hsv_max_sat = self.hsv_max_sat
if hsv_max_val is None:
hsv_max_val = self.hsv_max_val
if hsv_min_sat is None:
hsv_min_sat = self.hsv_min_sat
if hsv_min_val is None:
hsv_min_val = self.hsv_min_val
# Expects a 2D intensity array scaled between -1 to 1...
intensity = intensity[..., 0]
intensity = 2 * intensity - 1
# convert to rgb, then rgb to hsv
hsv = rgb_to_hsv(rgb[:, :, 0:3])
# modify hsv values to simulate illumination.
hsv[:, :, 1] = np.where(np.logical_and(np.abs(hsv[:, :, 1]) > 1.e-10,
intensity > 0),
((1. - intensity) * hsv[:, :, 1] +
intensity * hsv_max_sat),
hsv[:, :, 1])
hsv[:, :, 2] = np.where(intensity > 0,
((1. - intensity) * hsv[:, :, 2] +
intensity * hsv_max_val),
hsv[:, :, 2])
hsv[:, :, 1] = np.where(np.logical_and(np.abs(hsv[:, :, 1]) > 1.e-10,
intensity < 0),
((1. + intensity) * hsv[:, :, 1] -
intensity * hsv_min_sat),
hsv[:, :, 1])
hsv[:, :, 2] = np.where(intensity < 0,
((1. + intensity) * hsv[:, :, 2] -
intensity * hsv_min_val),
hsv[:, :, 2])
hsv[:, :, 1:] = np.where(hsv[:, :, 1:] < 0., 0, hsv[:, :, 1:])
hsv[:, :, 1:] = np.where(hsv[:, :, 1:] > 1., 1, hsv[:, :, 1:])
# convert modified hsv back to rgb.
return hsv_to_rgb(hsv)
def blend_soft_light(self, rgb, intensity):
"""
Combines an rgb image with an intensity map using "soft light"
blending. Uses the "pegtop" formula.
Parameters
----------
rgb : ndarray
An MxNx3 RGB array of floats ranging from 0 to 1 (color image).
intensity : ndarray
An MxNx1 array of floats ranging from 0 to 1 (grayscale image).
Returns
-------
rgb : ndarray
An MxNx3 RGB array representing the combined images.
"""
return 2 * intensity * rgb + (1 - 2 * intensity) * rgb**2
def blend_overlay(self, rgb, intensity):
"""
Combines an rgb image with an intensity map using "overlay" blending.
Parameters
----------
rgb : ndarray
An MxNx3 RGB array of floats ranging from 0 to 1 (color image).
intensity : ndarray
An MxNx1 array of floats ranging from 0 to 1 (grayscale image).
Returns
-------
rgb : ndarray
An MxNx3 RGB array representing the combined images.
"""
low = 2 * intensity * rgb
high = 1 - 2 * (1 - intensity) * (1 - rgb)
return np.where(rgb <= 0.5, low, high)
def from_levels_and_colors(levels, colors, extend='neither'):
"""
A helper routine to generate a cmap and a norm instance which
behave similar to contourf's levels and colors arguments.
Parameters
----------
levels : sequence of numbers
The quantization levels used to construct the :class:`BoundaryNorm`.
Values ``v`` are quantizized to level ``i`` if
``lev[i] <= v < lev[i+1]``.
colors : sequence of colors
The fill color to use for each level. If `extend` is "neither" there
must be ``n_level - 1`` colors. For an `extend` of "min" or "max" add
one extra color, and for an `extend` of "both" add two colors.
extend : {'neither', 'min', 'max', 'both'}, optional
The behaviour when a value falls out of range of the given levels.
See :func:`~matplotlib.pyplot.contourf` for details.
Returns
-------
(cmap, norm) : tuple containing a :class:`Colormap` and a \
:class:`Normalize` instance
"""
colors_i0 = 0
colors_i1 = None
if extend == 'both':
colors_i0 = 1
colors_i1 = -1
extra_colors = 2
elif extend == 'min':
colors_i0 = 1
extra_colors = 1
elif extend == 'max':
colors_i1 = -1
extra_colors = 1
elif extend == 'neither':
extra_colors = 0
else:
raise ValueError('Unexpected value for extend: {0!r}'.format(extend))
n_data_colors = len(levels) - 1
n_expected_colors = n_data_colors + extra_colors
if len(colors) != n_expected_colors:
raise ValueError('With extend == {0!r} and n_levels == {1!r} expected'
' n_colors == {2!r}. Got {3!r}.'
''.format(extend, len(levels), n_expected_colors,
len(colors)))
cmap = ListedColormap(colors[colors_i0:colors_i1], N=n_data_colors)
if extend in ['min', 'both']:
cmap.set_under(colors[0])
else:
cmap.set_under('none')
if extend in ['max', 'both']:
cmap.set_over(colors[-1])
else:
cmap.set_over('none')
cmap.colorbar_extend = extend
norm = BoundaryNorm(levels, ncolors=n_data_colors)
return cmap, norm
| 73,071 | 35.031558 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/dates.py
|
"""
Matplotlib provides sophisticated date plotting capabilities, standing on the
shoulders of python :mod:`datetime`, the add-on modules :mod:`pytz` and
:mod:`dateutil`.
.. _date-format:
Matplotlib date format
----------------------
Matplotlib represents dates using floating point numbers specifying the number
of days since 0001-01-01 UTC, plus 1. For example, 0001-01-01, 06:00 is 1.25,
not 0.25. Values < 1, i.e. dates before 0001-01-01 UTC are not supported.
There are a number of helper functions to convert between :mod:`datetime`
objects and Matplotlib dates:
.. currentmodule:: matplotlib.dates
.. autosummary::
:nosignatures:
date2num
num2date
num2timedelta
epoch2num
num2epoch
mx2num
drange
.. note::
Like Python's datetime, mpl uses the Gregorian calendar for all
conversions between dates and floating point numbers. This practice
is not universal, and calendar differences can cause confusing
differences between what Python and mpl give as the number of days
since 0001-01-01 and what other software and databases yield. For
example, the US Naval Observatory uses a calendar that switches
from Julian to Gregorian in October, 1582. Hence, using their
calculator, the number of days between 0001-01-01 and 2006-04-01 is
732403, whereas using the Gregorian calendar via the datetime
module we find::
In [1]: date(2006, 4, 1).toordinal() - date(1, 1, 1).toordinal()
Out[1]: 732401
All the Matplotlib date converters, tickers and formatters are timezone aware.
If no explicit timezone is provided, the rcParam ``timezone`` is assumend. If
you want to use a custom time zone, pass a :class:`pytz.timezone` instance
with the tz keyword argument to :func:`num2date`, :func:`.plot_date`, and any
custom date tickers or locators you create.
See `pytz <http://pythonhosted.org/pytz/>`_ for information on :mod:`pytz` and
timezone handling.
A wide range of specific and general purpose date tick locators and
formatters are provided in this module. See
:mod:`matplotlib.ticker` for general information on tick locators
and formatters. These are described below.
The `dateutil module <https://dateutil.readthedocs.io/en/stable/>`_ provides
additional code to handle date ticking, making it easy to place ticks
on any kinds of dates. See examples below.
Date tickers
------------
Most of the date tickers can locate single or multiple values. For
example::
# import constants for the days of the week
from matplotlib.dates import MO, TU, WE, TH, FR, SA, SU
# tick on mondays every week
loc = WeekdayLocator(byweekday=MO, tz=tz)
# tick on mondays and saturdays
loc = WeekdayLocator(byweekday=(MO, SA))
In addition, most of the constructors take an interval argument::
# tick on mondays every second week
loc = WeekdayLocator(byweekday=MO, interval=2)
The rrule locator allows completely general date ticking::
# tick every 5th easter
rule = rrulewrapper(YEARLY, byeaster=1, interval=5)
loc = RRuleLocator(rule)
Here are all the date tickers:
* :class:`MicrosecondLocator`: locate microseconds
* :class:`SecondLocator`: locate seconds
* :class:`MinuteLocator`: locate minutes
* :class:`HourLocator`: locate hours
* :class:`DayLocator`: locate specified days of the month
* :class:`WeekdayLocator`: Locate days of the week, e.g., MO, TU
* :class:`MonthLocator`: locate months, e.g., 7 for july
* :class:`YearLocator`: locate years that are multiples of base
* :class:`RRuleLocator`: locate using a
:class:`matplotlib.dates.rrulewrapper`. The
:class:`rrulewrapper` is a simple wrapper around a
:class:`dateutil.rrule` (`dateutil
<https://dateutil.readthedocs.io/en/stable/>`_) which allow almost
arbitrary date tick specifications. See `rrule example
<../gallery/ticks_and_spines/date_demo_rrule.html>`_.
* :class:`AutoDateLocator`: On autoscale, this class picks the best
:class:`DateLocator` (e.g., :class:`RRuleLocator`)
to set the view limits and the tick
locations. If called with ``interval_multiples=True`` it will
make ticks line up with sensible multiples of the tick intervals. E.g.
if the interval is 4 hours, it will pick hours 0, 4, 8, etc as ticks.
This behaviour is not guaranteed by default.
Date formatters
---------------
Here all all the date formatters:
* :class:`AutoDateFormatter`: attempts to figure out the best format
to use. This is most useful when used with the :class:`AutoDateLocator`.
* :class:`DateFormatter`: use :func:`strftime` format strings
* :class:`IndexDateFormatter`: date plots with implicit *x*
indexing.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import zip
import re
import time
import math
import datetime
import functools
import warnings
import logging
from dateutil.rrule import (rrule, MO, TU, WE, TH, FR, SA, SU, YEARLY,
MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY,
SECONDLY)
from dateutil.relativedelta import relativedelta
import dateutil.parser
import logging
import numpy as np
import matplotlib
from matplotlib import rcParams
import matplotlib.units as units
import matplotlib.cbook as cbook
import matplotlib.ticker as ticker
_log = logging.getLogger(__name__)
__all__ = ('date2num', 'num2date', 'num2timedelta', 'drange', 'epoch2num',
'num2epoch', 'mx2num', 'DateFormatter',
'IndexDateFormatter', 'AutoDateFormatter', 'DateLocator',
'RRuleLocator', 'AutoDateLocator', 'YearLocator',
'MonthLocator', 'WeekdayLocator',
'DayLocator', 'HourLocator', 'MinuteLocator',
'SecondLocator', 'MicrosecondLocator',
'rrule', 'MO', 'TU', 'WE', 'TH', 'FR', 'SA', 'SU',
'YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY',
'HOURLY', 'MINUTELY', 'SECONDLY', 'MICROSECONDLY', 'relativedelta',
'seconds', 'minutes', 'hours', 'weeks')
_log = logging.getLogger(__name__)
# Make a simple UTC instance so we don't always have to import
# pytz. From the python datetime library docs:
class _UTC(datetime.tzinfo):
"""UTC"""
def utcoffset(self, dt):
return datetime.timedelta(0)
def tzname(self, dt):
return str("UTC")
def dst(self, dt):
return datetime.timedelta(0)
UTC = _UTC()
def _get_rc_timezone():
"""
Retrieve the preferred timeszone from the rcParams dictionary.
"""
s = matplotlib.rcParams['timezone']
if s == 'UTC':
return UTC
import pytz
return pytz.timezone(s)
"""
Time-related constants.
"""
EPOCH_OFFSET = float(datetime.datetime(1970, 1, 1).toordinal())
JULIAN_OFFSET = 1721424.5 # Julian date at 0001-01-01
MICROSECONDLY = SECONDLY + 1
HOURS_PER_DAY = 24.
MIN_PER_HOUR = 60.
SEC_PER_MIN = 60.
MONTHS_PER_YEAR = 12.
DAYS_PER_WEEK = 7.
DAYS_PER_MONTH = 30.
DAYS_PER_YEAR = 365.0
MINUTES_PER_DAY = MIN_PER_HOUR * HOURS_PER_DAY
SEC_PER_HOUR = SEC_PER_MIN * MIN_PER_HOUR
SEC_PER_DAY = SEC_PER_HOUR * HOURS_PER_DAY
SEC_PER_WEEK = SEC_PER_DAY * DAYS_PER_WEEK
MUSECONDS_PER_DAY = 1e6 * SEC_PER_DAY
MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY = (
MO, TU, WE, TH, FR, SA, SU)
WEEKDAYS = (MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY)
def _to_ordinalf(dt):
"""
Convert :mod:`datetime` or :mod:`date` to the Gregorian date as UTC float
days, preserving hours, minutes, seconds and microseconds. Return value
is a :func:`float`.
"""
# Convert to UTC
tzi = getattr(dt, 'tzinfo', None)
if tzi is not None:
dt = dt.astimezone(UTC)
tzi = UTC
base = float(dt.toordinal())
# If it's sufficiently datetime-like, it will have a `date()` method
cdate = getattr(dt, 'date', lambda: None)()
if cdate is not None:
# Get a datetime object at midnight UTC
midnight_time = datetime.time(0, tzinfo=tzi)
rdt = datetime.datetime.combine(cdate, midnight_time)
# Append the seconds as a fraction of a day
base += (dt - rdt).total_seconds() / SEC_PER_DAY
return base
# a version of _to_ordinalf that can operate on numpy arrays
_to_ordinalf_np_vectorized = np.vectorize(_to_ordinalf)
def _dt64_to_ordinalf(d):
"""
Convert `numpy.datetime64` or an ndarray of those types to Gregorian
date as UTC float. Roundoff is via float64 precision. Practically:
microseconds for dates between 290301 BC, 294241 AD, milliseconds for
larger dates (see `numpy.datetime64`). Nanoseconds aren't possible
because we do times compared to ``0001-01-01T00:00:00`` (plus one day).
"""
# the "extra" ensures that we at least allow the dynamic range out to
# seconds. That should get out to +/-2e11 years.
extra = d - d.astype('datetime64[s]')
extra = extra.astype('timedelta64[ns]')
t0 = np.datetime64('0001-01-01T00:00:00').astype('datetime64[s]')
dt = (d.astype('datetime64[s]') - t0).astype(np.float64)
dt += extra.astype(np.float64) / 1.0e9
dt = dt / SEC_PER_DAY + 1.0
NaT_int = np.datetime64('NaT').astype(np.int64)
d_int = d.astype(np.int64)
try:
dt[d_int == NaT_int] = np.nan
except TypeError:
if d_int == NaT_int:
dt = np.nan
return dt
def _from_ordinalf(x, tz=None):
"""
Convert Gregorian float of the date, preserving hours, minutes,
seconds and microseconds. Return value is a `.datetime`.
The input date *x* is a float in ordinal days at UTC, and the output will
be the specified `.datetime` object corresponding to that time in
timezone *tz*, or if *tz* is ``None``, in the timezone specified in
:rc:`timezone`.
"""
if tz is None:
tz = _get_rc_timezone()
ix, remainder = divmod(x, 1)
ix = int(ix)
if ix < 1:
raise ValueError('Cannot convert {} to a date. This often happens if '
'non-datetime values are passed to an axis that '
'expects datetime objects.'.format(ix))
dt = datetime.datetime.fromordinal(ix).replace(tzinfo=UTC)
# Since the input date `x` float is unable to preserve microsecond
# precision of time representation in non-antique years, the
# resulting datetime is rounded to the nearest multiple of
# `musec_prec`. A value of 20 is appropriate for current dates.
musec_prec = 20
remainder_musec = int(round(remainder * MUSECONDS_PER_DAY / musec_prec)
* musec_prec)
# For people trying to plot with full microsecond precision, enable
# an early-year workaround
if x < 30 * 365:
remainder_musec = int(round(remainder * MUSECONDS_PER_DAY))
# add hours, minutes, seconds, microseconds
dt += datetime.timedelta(microseconds=remainder_musec)
return dt.astimezone(tz)
# a version of _from_ordinalf that can operate on numpy arrays
_from_ordinalf_np_vectorized = np.vectorize(_from_ordinalf)
class strpdate2num(object):
"""
Use this class to parse date strings to matplotlib datenums when
you know the date format string of the date you are parsing.
"""
def __init__(self, fmt):
""" fmt: any valid strptime format is supported """
self.fmt = fmt
def __call__(self, s):
"""s : string to be converted
return value: a date2num float
"""
return date2num(datetime.datetime(*time.strptime(s, self.fmt)[:6]))
class bytespdate2num(strpdate2num):
"""
Use this class to parse date strings to matplotlib datenums when
you know the date format string of the date you are parsing. See
:file:`examples/misc/load_converter.py`.
"""
def __init__(self, fmt, encoding='utf-8'):
"""
Args:
fmt: any valid strptime format is supported
encoding: encoding to use on byte input (default: 'utf-8')
"""
super(bytespdate2num, self).__init__(fmt)
self.encoding = encoding
def __call__(self, b):
"""
Args:
b: byte input to be converted
Returns:
A date2num float
"""
s = b.decode(self.encoding)
return super(bytespdate2num, self).__call__(s)
# a version of dateutil.parser.parse that can operate on nump0y arrays
_dateutil_parser_parse_np_vectorized = np.vectorize(dateutil.parser.parse)
def datestr2num(d, default=None):
"""
Convert a date string to a datenum using
:func:`dateutil.parser.parse`.
Parameters
----------
d : string or sequence of strings
The dates to convert.
default : datetime instance, optional
The default date to use when fields are missing in *d*.
"""
if isinstance(d, six.string_types):
dt = dateutil.parser.parse(d, default=default)
return date2num(dt)
else:
if default is not None:
d = [dateutil.parser.parse(s, default=default) for s in d]
d = np.asarray(d)
if not d.size:
return d
return date2num(_dateutil_parser_parse_np_vectorized(d))
def date2num(d):
"""
Convert datetime objects to Matplotlib dates.
Parameters
----------
d : `datetime.datetime` or `numpy.datetime64` or sequences of these
Returns
-------
float or sequence of floats
Number of days (fraction part represents hours, minutes, seconds, ms)
since 0001-01-01 00:00:00 UTC, plus one.
Notes
-----
The addition of one here is a historical artifact. Also, note that the
Gregorian calendar is assumed; this is not universal practice.
For details see the module docstring.
"""
if hasattr(d, "values"):
# this unpacks pandas series or dataframes...
d = d.values
if ((isinstance(d, np.ndarray) and np.issubdtype(d.dtype, np.datetime64))
or isinstance(d, np.datetime64)):
return _dt64_to_ordinalf(d)
if not cbook.iterable(d):
return _to_ordinalf(d)
else:
d = np.asarray(d)
if not d.size:
return d
return _to_ordinalf_np_vectorized(d)
def julian2num(j):
"""
Convert a Julian date (or sequence) to a Matplotlib date (or sequence).
Parameters
----------
j : float or sequence of floats
Julian date(s)
Returns
-------
float or sequence of floats
Matplotlib date(s)
"""
if cbook.iterable(j):
j = np.asarray(j)
return j - JULIAN_OFFSET
def num2julian(n):
"""
Convert a Matplotlib date (or sequence) to a Julian date (or sequence).
Parameters
----------
n : float or sequence of floats
Matplotlib date(s)
Returns
-------
float or sequence of floats
Julian date(s)
"""
if cbook.iterable(n):
n = np.asarray(n)
return n + JULIAN_OFFSET
def num2date(x, tz=None):
"""
Convert Matplotlib dates to `~datetime.datetime` objects.
Parameters
----------
x : float or sequence of floats
Number of days (fraction part represents hours, minutes, seconds)
since 0001-01-01 00:00:00 UTC, plus one.
tz : string, optional
Timezone of *x* (defaults to rcparams ``timezone``).
Returns
-------
`~datetime.datetime` or sequence of `~datetime.datetime`
Dates are returned in timezone *tz*.
If *x* is a sequence, a sequence of :class:`datetime` objects will
be returned.
Notes
-----
The addition of one here is a historical artifact. Also, note that the
Gregorian calendar is assumed; this is not universal practice.
For details, see the module docstring.
"""
if tz is None:
tz = _get_rc_timezone()
if not cbook.iterable(x):
return _from_ordinalf(x, tz)
else:
x = np.asarray(x)
if not x.size:
return x
return _from_ordinalf_np_vectorized(x, tz).tolist()
def _ordinalf_to_timedelta(x):
return datetime.timedelta(days=x)
_ordinalf_to_timedelta_np_vectorized = np.vectorize(_ordinalf_to_timedelta)
def num2timedelta(x):
"""
Convert number of days to a `~datetime.timedelta` object.
If *x* is a sequence, a sequence of `~datetime.timedelta` objects will
be returned.
Parameters
----------
x : float, sequence of floats
Number of days. The fraction part represents hours, minutes, seconds.
Returns
-------
`datetime.timedelta` or list[`datetime.timedelta`]
"""
if not cbook.iterable(x):
return _ordinalf_to_timedelta(x)
else:
x = np.asarray(x)
if not x.size:
return x
return _ordinalf_to_timedelta_np_vectorized(x).tolist()
def drange(dstart, dend, delta):
"""
Return a sequence of equally spaced Matplotlib dates.
The dates start at *dstart* and reach up to, but not including *dend*.
They are spaced by *delta*.
Parameters
----------
dstart, dend : `~datetime.datetime`
The date limits.
delta : `datetime.timedelta`
Spacing of the dates.
Returns
-------
drange : `numpy.array`
A list floats representing Matplotlib dates.
"""
f1 = date2num(dstart)
f2 = date2num(dend)
step = delta.total_seconds() / SEC_PER_DAY
# calculate the difference between dend and dstart in times of delta
num = int(np.ceil((f2 - f1) / step))
# calculate end of the interval which will be generated
dinterval_end = dstart + num * delta
# ensure, that an half open interval will be generated [dstart, dend)
if dinterval_end >= dend:
# if the endpoint is greated than dend, just subtract one delta
dinterval_end -= delta
num -= 1
f2 = date2num(dinterval_end) # new float-endpoint
return np.linspace(f1, f2, num + 1)
### date tickers and formatters ###
class DateFormatter(ticker.Formatter):
"""
Tick location is seconds since the epoch. Use a :func:`strftime`
format string.
Python only supports :mod:`datetime` :func:`strftime` formatting
for years greater than 1900. Thanks to Andrew Dalke, Dalke
Scientific Software who contributed the :func:`strftime` code
below to include dates earlier than this year.
"""
illegal_s = re.compile(r"((^|[^%])(%%)*%s)")
def __init__(self, fmt, tz=None):
"""
*fmt* is a :func:`strftime` format string; *tz* is the
:class:`tzinfo` instance.
"""
if tz is None:
tz = _get_rc_timezone()
self.fmt = fmt
self.tz = tz
def __call__(self, x, pos=0):
if x == 0:
raise ValueError('DateFormatter found a value of x=0, which is '
'an illegal date. This usually occurs because '
'you have not informed the axis that it is '
'plotting dates, e.g., with ax.xaxis_date()')
dt = num2date(x, self.tz)
return self.strftime(dt, self.fmt)
def set_tzinfo(self, tz):
self.tz = tz
def _replace_common_substr(self, s1, s2, sub1, sub2, replacement):
"""Helper function for replacing substrings sub1 and sub2
located at the same indexes in strings s1 and s2 respectively,
with the string replacement. It is expected that sub1 and sub2
have the same length. Returns the pair s1, s2 after the
substitutions.
"""
# Find common indexes of substrings sub1 in s1 and sub2 in s2
# and make substitutions inplace. Because this is inplace,
# it is okay if len(replacement) != len(sub1), len(sub2).
i = 0
while True:
j = s1.find(sub1, i)
if j == -1:
break
i = j + 1
if s2[j:j + len(sub2)] != sub2:
continue
s1 = s1[:j] + replacement + s1[j + len(sub1):]
s2 = s2[:j] + replacement + s2[j + len(sub2):]
return s1, s2
def strftime_pre_1900(self, dt, fmt=None):
"""Call time.strftime for years before 1900 by rolling
forward a multiple of 28 years.
*fmt* is a :func:`strftime` format string.
Dalke: I hope I did this math right. Every 28 years the
calendar repeats, except through century leap years excepting
the 400 year leap years. But only if you're using the Gregorian
calendar.
"""
if fmt is None:
fmt = self.fmt
# Since python's time module's strftime implementation does not
# support %f microsecond (but the datetime module does), use a
# regular expression substitution to replace instances of %f.
# Note that this can be useful since python's floating-point
# precision representation for datetime causes precision to be
# more accurate closer to year 0 (around the year 2000, precision
# can be at 10s of microseconds).
fmt = re.sub(r'((^|[^%])(%%)*)%f',
r'\g<1>{0:06d}'.format(dt.microsecond), fmt)
year = dt.year
# For every non-leap year century, advance by
# 6 years to get into the 28-year repeat cycle
delta = 2000 - year
off = 6 * (delta // 100 + delta // 400)
year = year + off
# Move to between the years 1973 and 2000
year1 = year + ((2000 - year) // 28) * 28
year2 = year1 + 28
timetuple = dt.timetuple()
# Generate timestamp string for year and year+28
s1 = time.strftime(fmt, (year1,) + timetuple[1:])
s2 = time.strftime(fmt, (year2,) + timetuple[1:])
# Replace instances of respective years (both 2-digit and 4-digit)
# that are located at the same indexes of s1, s2 with dt's year.
# Note that C++'s strftime implementation does not use padded
# zeros or padded whitespace for %y or %Y for years before 100, but
# uses padded zeros for %x. (For example, try the runnable examples
# with .tm_year in the interval [-1900, -1800] on
# http://en.cppreference.com/w/c/chrono/strftime.) For ease of
# implementation, we always use padded zeros for %y, %Y, and %x.
s1, s2 = self._replace_common_substr(s1, s2,
"{0:04d}".format(year1),
"{0:04d}".format(year2),
"{0:04d}".format(dt.year))
s1, s2 = self._replace_common_substr(s1, s2,
"{0:02d}".format(year1 % 100),
"{0:02d}".format(year2 % 100),
"{0:02d}".format(dt.year % 100))
return cbook.unicode_safe(s1)
def strftime(self, dt, fmt=None):
"""
Refer to documentation for :meth:`datetime.datetime.strftime`
*fmt* is a :meth:`datetime.datetime.strftime` format string.
Warning: For years before 1900, depending upon the current
locale it is possible that the year displayed with %x might
be incorrect. For years before 100, %y and %Y will yield
zero-padded strings.
"""
if fmt is None:
fmt = self.fmt
fmt = self.illegal_s.sub(r"\1", fmt)
fmt = fmt.replace("%s", "s")
if dt.year >= 1900:
# Note: in python 3.3 this is okay for years >= 1000,
# refer to http://bugs.python.org/issue1777412
return cbook.unicode_safe(dt.strftime(fmt))
return self.strftime_pre_1900(dt, fmt)
class IndexDateFormatter(ticker.Formatter):
"""
Use with :class:`~matplotlib.ticker.IndexLocator` to cycle format
strings by index.
"""
def __init__(self, t, fmt, tz=None):
"""
*t* is a sequence of dates (floating point days). *fmt* is a
:func:`strftime` format string.
"""
if tz is None:
tz = _get_rc_timezone()
self.t = t
self.fmt = fmt
self.tz = tz
def __call__(self, x, pos=0):
'Return the label for time *x* at position *pos*'
ind = int(np.round(x))
if ind >= len(self.t) or ind <= 0:
return ''
dt = num2date(self.t[ind], self.tz)
return cbook.unicode_safe(dt.strftime(self.fmt))
class AutoDateFormatter(ticker.Formatter):
"""
This class attempts to figure out the best format to use. This is
most useful when used with the :class:`AutoDateLocator`.
The AutoDateFormatter has a scale dictionary that maps the scale
of the tick (the distance in days between one major tick) and a
format string. The default looks like this::
self.scaled = {
DAYS_PER_YEAR: rcParams['date.autoformat.year'],
DAYS_PER_MONTH: rcParams['date.autoformat.month'],
1.0: rcParams['date.autoformat.day'],
1. / HOURS_PER_DAY: rcParams['date.autoformat.hour'],
1. / (MINUTES_PER_DAY): rcParams['date.autoformat.minute'],
1. / (SEC_PER_DAY): rcParams['date.autoformat.second'],
1. / (MUSECONDS_PER_DAY): rcParams['date.autoformat.microsecond'],
}
The algorithm picks the key in the dictionary that is >= the
current scale and uses that format string. You can customize this
dictionary by doing::
>>> locator = AutoDateLocator()
>>> formatter = AutoDateFormatter(locator)
>>> formatter.scaled[1/(24.*60.)] = '%M:%S' # only show min and sec
A custom :class:`~matplotlib.ticker.FuncFormatter` can also be used.
The following example shows how to use a custom format function to strip
trailing zeros from decimal seconds and adds the date to the first
ticklabel::
>>> def my_format_function(x, pos=None):
... x = matplotlib.dates.num2date(x)
... if pos == 0:
... fmt = '%D %H:%M:%S.%f'
... else:
... fmt = '%H:%M:%S.%f'
... label = x.strftime(fmt)
... label = label.rstrip("0")
... label = label.rstrip(".")
... return label
>>> from matplotlib.ticker import FuncFormatter
>>> formatter.scaled[1/(24.*60.)] = FuncFormatter(my_format_function)
"""
# This can be improved by providing some user-level direction on
# how to choose the best format (precedence, etc...)
# Perhaps a 'struct' that has a field for each time-type where a
# zero would indicate "don't show" and a number would indicate
# "show" with some sort of priority. Same priorities could mean
# show all with the same priority.
# Or more simply, perhaps just a format string for each
# possibility...
def __init__(self, locator, tz=None, defaultfmt='%Y-%m-%d'):
"""
Autoformat the date labels. The default format is the one to use
if none of the values in ``self.scaled`` are greater than the unit
returned by ``locator._get_unit()``.
"""
self._locator = locator
self._tz = tz
self.defaultfmt = defaultfmt
self._formatter = DateFormatter(self.defaultfmt, tz)
self.scaled = {DAYS_PER_YEAR: rcParams['date.autoformatter.year'],
DAYS_PER_MONTH: rcParams['date.autoformatter.month'],
1.0: rcParams['date.autoformatter.day'],
1. / HOURS_PER_DAY: rcParams['date.autoformatter.hour'],
1. / (MINUTES_PER_DAY):
rcParams['date.autoformatter.minute'],
1. / (SEC_PER_DAY):
rcParams['date.autoformatter.second'],
1. / (MUSECONDS_PER_DAY):
rcParams['date.autoformatter.microsecond']}
def __call__(self, x, pos=None):
locator_unit_scale = float(self._locator._get_unit())
# Pick the first scale which is greater than the locator unit.
fmt = next((fmt for scale, fmt in sorted(self.scaled.items())
if scale >= locator_unit_scale),
self.defaultfmt)
if isinstance(fmt, six.string_types):
self._formatter = DateFormatter(fmt, self._tz)
result = self._formatter(x, pos)
elif callable(fmt):
result = fmt(x, pos)
else:
raise TypeError('Unexpected type passed to {0!r}.'.format(self))
return result
class rrulewrapper(object):
def __init__(self, freq, tzinfo=None, **kwargs):
kwargs['freq'] = freq
self._base_tzinfo = tzinfo
self._update_rrule(**kwargs)
def set(self, **kwargs):
self._construct.update(kwargs)
self._update_rrule(**self._construct)
def _update_rrule(self, **kwargs):
tzinfo = self._base_tzinfo
# rrule does not play nicely with time zones - especially pytz time
# zones, it's best to use naive zones and attach timezones once the
# datetimes are returned
if 'dtstart' in kwargs:
dtstart = kwargs['dtstart']
if dtstart.tzinfo is not None:
if tzinfo is None:
tzinfo = dtstart.tzinfo
else:
dtstart = dtstart.astimezone(tzinfo)
kwargs['dtstart'] = dtstart.replace(tzinfo=None)
if 'until' in kwargs:
until = kwargs['until']
if until.tzinfo is not None:
if tzinfo is not None:
until = until.astimezone(tzinfo)
else:
raise ValueError('until cannot be aware if dtstart '
'is naive and tzinfo is None')
kwargs['until'] = until.replace(tzinfo=None)
self._construct = kwargs.copy()
self._tzinfo = tzinfo
self._rrule = rrule(**self._construct)
def _attach_tzinfo(self, dt, tzinfo):
# pytz zones are attached by "localizing" the datetime
if hasattr(tzinfo, 'localize'):
return tzinfo.localize(dt, is_dst=True)
return dt.replace(tzinfo=tzinfo)
def _aware_return_wrapper(self, f, returns_list=False):
"""Decorator function that allows rrule methods to handle tzinfo."""
# This is only necessary if we're actually attaching a tzinfo
if self._tzinfo is None:
return f
# All datetime arguments must be naive. If they are not naive, they are
# converted to the _tzinfo zone before dropping the zone.
def normalize_arg(arg):
if isinstance(arg, datetime.datetime) and arg.tzinfo is not None:
if arg.tzinfo is not self._tzinfo:
arg = arg.astimezone(self._tzinfo)
return arg.replace(tzinfo=None)
return arg
def normalize_args(args, kwargs):
args = tuple(normalize_arg(arg) for arg in args)
kwargs = {kw: normalize_arg(arg) for kw, arg in kwargs.items()}
return args, kwargs
# There are two kinds of functions we care about - ones that return
# dates and ones that return lists of dates.
if not returns_list:
def inner_func(*args, **kwargs):
args, kwargs = normalize_args(args, kwargs)
dt = f(*args, **kwargs)
return self._attach_tzinfo(dt, self._tzinfo)
else:
def inner_func(*args, **kwargs):
args, kwargs = normalize_args(args, kwargs)
dts = f(*args, **kwargs)
return [self._attach_tzinfo(dt, self._tzinfo) for dt in dts]
return functools.wraps(f)(inner_func)
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
f = getattr(self._rrule, name)
if name in {'after', 'before'}:
return self._aware_return_wrapper(f)
elif name in {'xafter', 'xbefore', 'between'}:
return self._aware_return_wrapper(f, returns_list=True)
else:
return f
def __setstate__(self, state):
self.__dict__.update(state)
class DateLocator(ticker.Locator):
"""
Determines the tick locations when plotting dates.
This class is subclassed by other Locators and
is not meant to be used on its own.
"""
hms0d = {'byhour': 0, 'byminute': 0, 'bysecond': 0}
def __init__(self, tz=None):
"""
*tz* is a :class:`tzinfo` instance.
"""
if tz is None:
tz = _get_rc_timezone()
self.tz = tz
def set_tzinfo(self, tz):
"""
Set time zone info.
"""
self.tz = tz
def datalim_to_dt(self):
"""
Convert axis data interval to datetime objects.
"""
dmin, dmax = self.axis.get_data_interval()
if dmin > dmax:
dmin, dmax = dmax, dmin
if dmin < 1:
raise ValueError('datalim minimum {} is less than 1 and '
'is an invalid Matplotlib date value. This often '
'happens if you pass a non-datetime '
'value to an axis that has datetime units'
.format(dmin))
return num2date(dmin, self.tz), num2date(dmax, self.tz)
def viewlim_to_dt(self):
"""
Converts the view interval to datetime objects.
"""
vmin, vmax = self.axis.get_view_interval()
if vmin > vmax:
vmin, vmax = vmax, vmin
if vmin < 1:
raise ValueError('view limit minimum {} is less than 1 and '
'is an invalid Matplotlib date value. This '
'often happens if you pass a non-datetime '
'value to an axis that has datetime units'
.format(vmin))
return num2date(vmin, self.tz), num2date(vmax, self.tz)
def _get_unit(self):
"""
Return how many days a unit of the locator is; used for
intelligent autoscaling.
"""
return 1
def _get_interval(self):
"""
Return the number of units for each tick.
"""
return 1
def nonsingular(self, vmin, vmax):
"""
Given the proposed upper and lower extent, adjust the range
if it is too close to being singular (i.e. a range of ~0).
"""
unit = self._get_unit()
interval = self._get_interval()
if abs(vmax - vmin) < 1e-6:
vmin -= 2 * unit * interval
vmax += 2 * unit * interval
return vmin, vmax
class RRuleLocator(DateLocator):
# use the dateutil rrule instance
def __init__(self, o, tz=None):
DateLocator.__init__(self, tz)
self.rule = o
def __call__(self):
# if no data have been set, this will tank with a ValueError
try:
dmin, dmax = self.viewlim_to_dt()
except ValueError:
return []
return self.tick_values(dmin, dmax)
def tick_values(self, vmin, vmax):
delta = relativedelta(vmax, vmin)
# We need to cap at the endpoints of valid datetime
try:
start = vmin - delta
except (ValueError, OverflowError):
start = _from_ordinalf(1.0)
try:
stop = vmax + delta
except (ValueError, OverflowError):
# The magic number!
stop = _from_ordinalf(3652059.9999999)
self.rule.set(dtstart=start, until=stop)
dates = self.rule.between(vmin, vmax, True)
if len(dates) == 0:
return date2num([vmin, vmax])
return self.raise_if_exceeds(date2num(dates))
def _get_unit(self):
"""
Return how many days a unit of the locator is; used for
intelligent autoscaling.
"""
freq = self.rule._rrule._freq
return self.get_unit_generic(freq)
@staticmethod
def get_unit_generic(freq):
if freq == YEARLY:
return DAYS_PER_YEAR
elif freq == MONTHLY:
return DAYS_PER_MONTH
elif freq == WEEKLY:
return DAYS_PER_WEEK
elif freq == DAILY:
return 1.0
elif freq == HOURLY:
return 1.0 / HOURS_PER_DAY
elif freq == MINUTELY:
return 1.0 / MINUTES_PER_DAY
elif freq == SECONDLY:
return 1.0 / SEC_PER_DAY
else:
# error
return -1 # or should this just return '1'?
def _get_interval(self):
return self.rule._rrule._interval
def autoscale(self):
"""
Set the view limits to include the data range.
"""
dmin, dmax = self.datalim_to_dt()
delta = relativedelta(dmax, dmin)
# We need to cap at the endpoints of valid datetime
try:
start = dmin - delta
except ValueError:
start = _from_ordinalf(1.0)
try:
stop = dmax + delta
except ValueError:
# The magic number!
stop = _from_ordinalf(3652059.9999999)
self.rule.set(dtstart=start, until=stop)
dmin, dmax = self.datalim_to_dt()
vmin = self.rule.before(dmin, True)
if not vmin:
vmin = dmin
vmax = self.rule.after(dmax, True)
if not vmax:
vmax = dmax
vmin = date2num(vmin)
vmax = date2num(vmax)
return self.nonsingular(vmin, vmax)
class AutoDateLocator(DateLocator):
"""
On autoscale, this class picks the best
:class:`DateLocator` to set the view limits and the tick
locations.
"""
def __init__(self, tz=None, minticks=5, maxticks=None,
interval_multiples=False):
"""
*minticks* is the minimum number of ticks desired, which is used to
select the type of ticking (yearly, monthly, etc.).
*maxticks* is the maximum number of ticks desired, which controls
any interval between ticks (ticking every other, every 3, etc.).
For really fine-grained control, this can be a dictionary mapping
individual rrule frequency constants (YEARLY, MONTHLY, etc.)
to their own maximum number of ticks. This can be used to keep
the number of ticks appropriate to the format chosen in
:class:`AutoDateFormatter`. Any frequency not specified in this
dictionary is given a default value.
*tz* is a :class:`tzinfo` instance.
*interval_multiples* is a boolean that indicates whether ticks
should be chosen to be multiple of the interval. This will lock
ticks to 'nicer' locations. For example, this will force the
ticks to be at hours 0,6,12,18 when hourly ticking is done at
6 hour intervals.
The AutoDateLocator has an interval dictionary that maps the
frequency of the tick (a constant from dateutil.rrule) and a
multiple allowed for that ticking. The default looks like this::
self.intervald = {
YEARLY : [1, 2, 4, 5, 10, 20, 40, 50, 100, 200, 400, 500,
1000, 2000, 4000, 5000, 10000],
MONTHLY : [1, 2, 3, 4, 6],
DAILY : [1, 2, 3, 7, 14],
HOURLY : [1, 2, 3, 4, 6, 12],
MINUTELY: [1, 5, 10, 15, 30],
SECONDLY: [1, 5, 10, 15, 30],
MICROSECONDLY: [1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000,
5000, 10000, 20000, 50000, 100000, 200000, 500000,
1000000],
}
The interval is used to specify multiples that are appropriate for
the frequency of ticking. For instance, every 7 days is sensible
for daily ticks, but for minutes/seconds, 15 or 30 make sense.
You can customize this dictionary by doing::
locator = AutoDateLocator()
locator.intervald[HOURLY] = [3] # only show every 3 hours
"""
DateLocator.__init__(self, tz)
self._locator = YearLocator()
self._freq = YEARLY
self._freqs = [YEARLY, MONTHLY, DAILY, HOURLY, MINUTELY,
SECONDLY, MICROSECONDLY]
self.minticks = minticks
self.maxticks = {YEARLY: 11, MONTHLY: 12, DAILY: 11, HOURLY: 12,
MINUTELY: 11, SECONDLY: 11, MICROSECONDLY: 8}
if maxticks is not None:
try:
self.maxticks.update(maxticks)
except TypeError:
# Assume we were given an integer. Use this as the maximum
# number of ticks for every frequency and create a
# dictionary for this
self.maxticks = dict.fromkeys(self._freqs, maxticks)
self.interval_multiples = interval_multiples
self.intervald = {
YEARLY: [1, 2, 4, 5, 10, 20, 40, 50, 100, 200, 400, 500,
1000, 2000, 4000, 5000, 10000],
MONTHLY: [1, 2, 3, 4, 6],
DAILY: [1, 2, 3, 7, 14, 21],
HOURLY: [1, 2, 3, 4, 6, 12],
MINUTELY: [1, 5, 10, 15, 30],
SECONDLY: [1, 5, 10, 15, 30],
MICROSECONDLY: [1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000,
5000, 10000, 20000, 50000, 100000, 200000, 500000,
1000000]}
self._byranges = [None, range(1, 13), range(1, 32),
range(0, 24), range(0, 60), range(0, 60), None]
def __call__(self):
'Return the locations of the ticks'
self.refresh()
return self._locator()
def tick_values(self, vmin, vmax):
return self.get_locator(vmin, vmax).tick_values(vmin, vmax)
def nonsingular(self, vmin, vmax):
# whatever is thrown at us, we can scale the unit.
# But default nonsingular date plots at an ~4 year period.
if vmin == vmax:
vmin = vmin - DAYS_PER_YEAR * 2
vmax = vmax + DAYS_PER_YEAR * 2
return vmin, vmax
def set_axis(self, axis):
DateLocator.set_axis(self, axis)
self._locator.set_axis(axis)
def refresh(self):
'Refresh internal information based on current limits.'
dmin, dmax = self.viewlim_to_dt()
self._locator = self.get_locator(dmin, dmax)
def _get_unit(self):
if self._freq in [MICROSECONDLY]:
return 1. / MUSECONDS_PER_DAY
else:
return RRuleLocator.get_unit_generic(self._freq)
def autoscale(self):
'Try to choose the view limits intelligently.'
dmin, dmax = self.datalim_to_dt()
self._locator = self.get_locator(dmin, dmax)
return self._locator.autoscale()
def get_locator(self, dmin, dmax):
'Pick the best locator based on a distance.'
delta = relativedelta(dmax, dmin)
tdelta = dmax - dmin
# take absolute difference
if dmin > dmax:
delta = -delta
tdelta = -tdelta
# The following uses a mix of calls to relativedelta and timedelta
# methods because there is incomplete overlap in the functionality of
# these similar functions, and it's best to avoid doing our own math
# whenever possible.
numYears = float(delta.years)
numMonths = numYears * MONTHS_PER_YEAR + delta.months
numDays = tdelta.days # Avoids estimates of days/month, days/year
numHours = numDays * HOURS_PER_DAY + delta.hours
numMinutes = numHours * MIN_PER_HOUR + delta.minutes
numSeconds = np.floor(tdelta.total_seconds())
numMicroseconds = np.floor(tdelta.total_seconds() * 1e6)
nums = [numYears, numMonths, numDays, numHours, numMinutes,
numSeconds, numMicroseconds]
use_rrule_locator = [True] * 6 + [False]
# Default setting of bymonth, etc. to pass to rrule
# [unused (for year), bymonth, bymonthday, byhour, byminute,
# bysecond, unused (for microseconds)]
byranges = [None, 1, 1, 0, 0, 0, None]
# Loop over all the frequencies and try to find one that gives at
# least a minticks tick positions. Once this is found, look for
# an interval from an list specific to that frequency that gives no
# more than maxticks tick positions. Also, set up some ranges
# (bymonth, etc.) as appropriate to be passed to rrulewrapper.
for i, (freq, num) in enumerate(zip(self._freqs, nums)):
# If this particular frequency doesn't give enough ticks, continue
if num < self.minticks:
# Since we're not using this particular frequency, set
# the corresponding by_ to None so the rrule can act as
# appropriate
byranges[i] = None
continue
# Find the first available interval that doesn't give too many
# ticks
for interval in self.intervald[freq]:
if num <= interval * (self.maxticks[freq] - 1):
break
else:
# We went through the whole loop without breaking, default to
# the last interval in the list and raise a warning
warnings.warn('AutoDateLocator was unable to pick an '
'appropriate interval for this date range. '
'It may be necessary to add an interval value '
"to the AutoDateLocator's intervald dictionary."
' Defaulting to {0}.'.format(interval))
# Set some parameters as appropriate
self._freq = freq
if self._byranges[i] and self.interval_multiples:
byranges[i] = self._byranges[i][::interval]
interval = 1
else:
byranges[i] = self._byranges[i]
break
else:
raise ValueError('No sensible date limit could be found in the '
'AutoDateLocator.')
if (freq == YEARLY) and self.interval_multiples:
locator = YearLocator(interval)
elif use_rrule_locator[i]:
_, bymonth, bymonthday, byhour, byminute, bysecond, _ = byranges
rrule = rrulewrapper(self._freq, interval=interval,
dtstart=dmin, until=dmax,
bymonth=bymonth, bymonthday=bymonthday,
byhour=byhour, byminute=byminute,
bysecond=bysecond)
locator = RRuleLocator(rrule, self.tz)
else:
locator = MicrosecondLocator(interval, tz=self.tz)
if dmin.year > 20 and interval < 1000:
_log.warn('Plotting microsecond time intervals is not'
' well supported. Please see the'
' MicrosecondLocator documentation'
' for details.')
locator.set_axis(self.axis)
if self.axis is not None:
locator.set_view_interval(*self.axis.get_view_interval())
locator.set_data_interval(*self.axis.get_data_interval())
return locator
class YearLocator(DateLocator):
"""
Make ticks on a given day of each year that is a multiple of base.
Examples::
# Tick every year on Jan 1st
locator = YearLocator()
# Tick every 5 years on July 4th
locator = YearLocator(5, month=7, day=4)
"""
def __init__(self, base=1, month=1, day=1, tz=None):
"""
Mark years that are multiple of base on a given month and day
(default jan 1).
"""
DateLocator.__init__(self, tz)
self.base = ticker.Base(base)
self.replaced = {'month': month,
'day': day,
'hour': 0,
'minute': 0,
'second': 0,
'tzinfo': tz
}
def __call__(self):
# if no data have been set, this will tank with a ValueError
try:
dmin, dmax = self.viewlim_to_dt()
except ValueError:
return []
return self.tick_values(dmin, dmax)
def tick_values(self, vmin, vmax):
ymin = self.base.le(vmin.year)
ymax = self.base.ge(vmax.year)
ticks = [vmin.replace(year=ymin, **self.replaced)]
while True:
dt = ticks[-1]
if dt.year >= ymax:
return date2num(ticks)
year = dt.year + self.base.get_base()
ticks.append(dt.replace(year=year, **self.replaced))
def autoscale(self):
"""
Set the view limits to include the data range.
"""
dmin, dmax = self.datalim_to_dt()
ymin = self.base.le(dmin.year)
ymax = self.base.ge(dmax.year)
vmin = dmin.replace(year=ymin, **self.replaced)
vmax = dmax.replace(year=ymax, **self.replaced)
vmin = date2num(vmin)
vmax = date2num(vmax)
return self.nonsingular(vmin, vmax)
class MonthLocator(RRuleLocator):
"""
Make ticks on occurrences of each month, e.g., 1, 3, 12.
"""
def __init__(self, bymonth=None, bymonthday=1, interval=1, tz=None):
"""
Mark every month in *bymonth*; *bymonth* can be an int or
sequence. Default is ``range(1,13)``, i.e. every month.
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second occurrence.
"""
if bymonth is None:
bymonth = range(1, 13)
elif isinstance(bymonth, np.ndarray):
# This fixes a bug in dateutil <= 2.3 which prevents the use of
# numpy arrays in (among other things) the bymonthday, byweekday
# and bymonth parameters.
bymonth = [x.item() for x in bymonth.astype(int)]
rule = rrulewrapper(MONTHLY, bymonth=bymonth, bymonthday=bymonthday,
interval=interval, **self.hms0d)
RRuleLocator.__init__(self, rule, tz)
class WeekdayLocator(RRuleLocator):
"""
Make ticks on occurrences of each weekday.
"""
def __init__(self, byweekday=1, interval=1, tz=None):
"""
Mark every weekday in *byweekday*; *byweekday* can be a number or
sequence.
Elements of *byweekday* must be one of MO, TU, WE, TH, FR, SA,
SU, the constants from :mod:`dateutil.rrule`, which have been
imported into the :mod:`matplotlib.dates` namespace.
*interval* specifies the number of weeks to skip. For example,
``interval=2`` plots every second week.
"""
if isinstance(byweekday, np.ndarray):
# This fixes a bug in dateutil <= 2.3 which prevents the use of
# numpy arrays in (among other things) the bymonthday, byweekday
# and bymonth parameters.
[x.item() for x in byweekday.astype(int)]
rule = rrulewrapper(DAILY, byweekday=byweekday,
interval=interval, **self.hms0d)
RRuleLocator.__init__(self, rule, tz)
class DayLocator(RRuleLocator):
"""
Make ticks on occurrences of each day of the month. For example,
1, 15, 30.
"""
def __init__(self, bymonthday=None, interval=1, tz=None):
"""
Mark every day in *bymonthday*; *bymonthday* can be an int or
sequence.
Default is to tick every day of the month: ``bymonthday=range(1,32)``
"""
if not interval == int(interval) or interval < 1:
raise ValueError("interval must be an integer greater than 0")
if bymonthday is None:
bymonthday = range(1, 32)
elif isinstance(bymonthday, np.ndarray):
# This fixes a bug in dateutil <= 2.3 which prevents the use of
# numpy arrays in (among other things) the bymonthday, byweekday
# and bymonth parameters.
bymonthday = [x.item() for x in bymonthday.astype(int)]
rule = rrulewrapper(DAILY, bymonthday=bymonthday,
interval=interval, **self.hms0d)
RRuleLocator.__init__(self, rule, tz)
class HourLocator(RRuleLocator):
"""
Make ticks on occurrences of each hour.
"""
def __init__(self, byhour=None, interval=1, tz=None):
"""
Mark every hour in *byhour*; *byhour* can be an int or sequence.
Default is to tick every hour: ``byhour=range(24)``
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second occurrence.
"""
if byhour is None:
byhour = range(24)
rule = rrulewrapper(HOURLY, byhour=byhour, interval=interval,
byminute=0, bysecond=0)
RRuleLocator.__init__(self, rule, tz)
class MinuteLocator(RRuleLocator):
"""
Make ticks on occurrences of each minute.
"""
def __init__(self, byminute=None, interval=1, tz=None):
"""
Mark every minute in *byminute*; *byminute* can be an int or
sequence. Default is to tick every minute: ``byminute=range(60)``
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second occurrence.
"""
if byminute is None:
byminute = range(60)
rule = rrulewrapper(MINUTELY, byminute=byminute, interval=interval,
bysecond=0)
RRuleLocator.__init__(self, rule, tz)
class SecondLocator(RRuleLocator):
"""
Make ticks on occurrences of each second.
"""
def __init__(self, bysecond=None, interval=1, tz=None):
"""
Mark every second in *bysecond*; *bysecond* can be an int or
sequence. Default is to tick every second: ``bysecond = range(60)``
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second occurrence.
"""
if bysecond is None:
bysecond = range(60)
rule = rrulewrapper(SECONDLY, bysecond=bysecond, interval=interval)
RRuleLocator.__init__(self, rule, tz)
class MicrosecondLocator(DateLocator):
"""
Make ticks on regular intervals of one or more microsecond(s).
.. note::
Due to the floating point representation of time in days since
0001-01-01 UTC (plus 1), plotting data with microsecond time
resolution does not work well with current dates.
If you want microsecond resolution time plots, it is strongly
recommended to use floating point seconds, not datetime-like
time representation.
If you really must use datetime.datetime() or similar and still
need microsecond precision, your only chance is to use very
early years; using year 0001 is recommended.
"""
def __init__(self, interval=1, tz=None):
"""
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second microsecond.
"""
self._interval = interval
self._wrapped_locator = ticker.MultipleLocator(interval)
self.tz = tz
def set_axis(self, axis):
self._wrapped_locator.set_axis(axis)
return DateLocator.set_axis(self, axis)
def set_view_interval(self, vmin, vmax):
self._wrapped_locator.set_view_interval(vmin, vmax)
return DateLocator.set_view_interval(self, vmin, vmax)
def set_data_interval(self, vmin, vmax):
self._wrapped_locator.set_data_interval(vmin, vmax)
return DateLocator.set_data_interval(self, vmin, vmax)
def __call__(self):
# if no data have been set, this will tank with a ValueError
try:
dmin, dmax = self.viewlim_to_dt()
except ValueError:
return []
return self.tick_values(dmin, dmax)
def tick_values(self, vmin, vmax):
nmin, nmax = date2num((vmin, vmax))
nmin *= MUSECONDS_PER_DAY
nmax *= MUSECONDS_PER_DAY
ticks = self._wrapped_locator.tick_values(nmin, nmax)
ticks = [tick / MUSECONDS_PER_DAY for tick in ticks]
return ticks
def _get_unit(self):
"""
Return how many days a unit of the locator is; used for
intelligent autoscaling.
"""
return 1. / MUSECONDS_PER_DAY
def _get_interval(self):
"""
Return the number of units for each tick.
"""
return self._interval
def _close_to_dt(d1, d2, epsilon=5):
"""
Assert that datetimes *d1* and *d2* are within *epsilon* microseconds.
"""
delta = d2 - d1
mus = abs(delta.total_seconds() * 1e6)
assert mus < epsilon
def _close_to_num(o1, o2, epsilon=5):
"""
Assert that float ordinals *o1* and *o2* are within *epsilon*
microseconds.
"""
delta = abs((o2 - o1) * MUSECONDS_PER_DAY)
assert delta < epsilon
def epoch2num(e):
"""
Convert an epoch or sequence of epochs to the new date format,
that is days since 0001.
"""
return EPOCH_OFFSET + np.asarray(e) / SEC_PER_DAY
def num2epoch(d):
"""
Convert days since 0001 to epoch. *d* can be a number or sequence.
"""
return (np.asarray(d) - EPOCH_OFFSET) * SEC_PER_DAY
def mx2num(mxdates):
"""
Convert mx :class:`datetime` instance (or sequence of mx
instances) to the new date format.
"""
scalar = False
if not cbook.iterable(mxdates):
scalar = True
mxdates = [mxdates]
ret = epoch2num([m.ticks() for m in mxdates])
if scalar:
return ret[0]
else:
return ret
def date_ticker_factory(span, tz=None, numticks=5):
"""
Create a date locator with *numticks* (approx) and a date formatter
for *span* in days. Return value is (locator, formatter).
"""
if span == 0:
span = 1 / HOURS_PER_DAY
mins = span * MINUTES_PER_DAY
hrs = span * HOURS_PER_DAY
days = span
wks = span / DAYS_PER_WEEK
months = span / DAYS_PER_MONTH # Approx
years = span / DAYS_PER_YEAR # Approx
if years > numticks:
locator = YearLocator(int(years / numticks), tz=tz) # define
fmt = '%Y'
elif months > numticks:
locator = MonthLocator(tz=tz)
fmt = '%b %Y'
elif wks > numticks:
locator = WeekdayLocator(tz=tz)
fmt = '%a, %b %d'
elif days > numticks:
locator = DayLocator(interval=int(math.ceil(days / numticks)), tz=tz)
fmt = '%b %d'
elif hrs > numticks:
locator = HourLocator(interval=int(math.ceil(hrs / numticks)), tz=tz)
fmt = '%H:%M\n%b %d'
elif mins > numticks:
locator = MinuteLocator(interval=int(math.ceil(mins / numticks)),
tz=tz)
fmt = '%H:%M:%S'
else:
locator = MinuteLocator(tz=tz)
fmt = '%H:%M:%S'
formatter = DateFormatter(fmt, tz=tz)
return locator, formatter
def seconds(s):
"""
Return seconds as days.
"""
return s / SEC_PER_DAY
def minutes(m):
"""
Return minutes as days.
"""
return m / MINUTES_PER_DAY
def hours(h):
"""
Return hours as days.
"""
return h / HOURS_PER_DAY
def weeks(w):
"""
Return weeks as days.
"""
return w * DAYS_PER_WEEK
class DateConverter(units.ConversionInterface):
"""
Converter for datetime.date and datetime.datetime data,
or for date/time data represented as it would be converted
by :func:`date2num`.
The 'unit' tag for such data is None or a tzinfo instance.
"""
@staticmethod
def axisinfo(unit, axis):
"""
Return the :class:`~matplotlib.units.AxisInfo` for *unit*.
*unit* is a tzinfo instance or None.
The *axis* argument is required but not used.
"""
tz = unit
majloc = AutoDateLocator(tz=tz)
majfmt = AutoDateFormatter(majloc, tz=tz)
datemin = datetime.date(2000, 1, 1)
datemax = datetime.date(2010, 1, 1)
return units.AxisInfo(majloc=majloc, majfmt=majfmt, label='',
default_limits=(datemin, datemax))
@staticmethod
def convert(value, unit, axis):
"""
If *value* is not already a number or sequence of numbers,
convert it with :func:`date2num`.
The *unit* and *axis* arguments are not used.
"""
return date2num(value)
@staticmethod
def default_units(x, axis):
"""
Return the tzinfo instance of *x* or of its first element, or None
"""
if isinstance(x, np.ndarray):
x = x.ravel()
try:
x = cbook.safe_first_element(x)
except (TypeError, StopIteration):
pass
try:
return x.tzinfo
except AttributeError:
pass
return None
units.registry[np.datetime64] = DateConverter()
units.registry[datetime.date] = DateConverter()
units.registry[datetime.datetime] = DateConverter()
| 61,509 | 32.41119 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/afm.py
|
"""
This is a python interface to Adobe Font Metrics Files. Although a
number of other python implementations exist, and may be more complete
than this, it was decided not to go with them because they were
either:
1) copyrighted or used a non-BSD compatible license
2) had too many dependencies and a free standing lib was needed
3) Did more than needed and it was easier to write afresh rather than
figure out how to get just what was needed.
It is pretty easy to use, and requires only built-in python libs:
>>> from matplotlib import rcParams
>>> import os.path
>>> afm_fname = os.path.join(rcParams['datapath'],
... 'fonts', 'afm', 'ptmr8a.afm')
>>>
>>> from matplotlib.afm import AFM
>>> with open(afm_fname, 'rb') as fh:
... afm = AFM(fh)
>>> afm.string_width_height('What the heck?')
(6220.0, 694)
>>> afm.get_fontname()
'Times-Roman'
>>> afm.get_kern_dist('A', 'f')
0
>>> afm.get_kern_dist('A', 'y')
-92.0
>>> afm.get_bbox_char('!')
[130, -9, 238, 676]
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import map
import sys
import re
from ._mathtext_data import uni2type1
# Convert string the a python type
# some afm files have floats where we are expecting ints -- there is
# probably a better way to handle this (support floats, round rather
# than truncate). But I don't know what the best approach is now and
# this change to _to_int should at least prevent mpl from crashing on
# these JDH (2009-11-06)
def _to_int(x):
return int(float(x))
_to_float = float
def _to_str(x):
return x.decode('utf8')
def _to_list_of_ints(s):
s = s.replace(b',', b' ')
return [_to_int(val) for val in s.split()]
def _to_list_of_floats(s):
return [_to_float(val) for val in s.split()]
def _to_bool(s):
if s.lower().strip() in (b'false', b'0', b'no'):
return False
else:
return True
def _sanity_check(fh):
"""
Check if the file at least looks like AFM.
If not, raise :exc:`RuntimeError`.
"""
# Remember the file position in case the caller wants to
# do something else with the file.
pos = fh.tell()
try:
line = next(fh)
finally:
fh.seek(pos, 0)
# AFM spec, Section 4: The StartFontMetrics keyword [followed by a
# version number] must be the first line in the file, and the
# EndFontMetrics keyword must be the last non-empty line in the
# file. We just check the first line.
if not line.startswith(b'StartFontMetrics'):
raise RuntimeError('Not an AFM file')
def _parse_header(fh):
"""
Reads the font metrics header (up to the char metrics) and returns
a dictionary mapping *key* to *val*. *val* will be converted to the
appropriate python type as necessary; e.g.:
* 'False'->False
* '0'->0
* '-168 -218 1000 898'-> [-168, -218, 1000, 898]
Dictionary keys are
StartFontMetrics, FontName, FullName, FamilyName, Weight,
ItalicAngle, IsFixedPitch, FontBBox, UnderlinePosition,
UnderlineThickness, Version, Notice, EncodingScheme, CapHeight,
XHeight, Ascender, Descender, StartCharMetrics
"""
headerConverters = {
b'StartFontMetrics': _to_float,
b'FontName': _to_str,
b'FullName': _to_str,
b'FamilyName': _to_str,
b'Weight': _to_str,
b'ItalicAngle': _to_float,
b'IsFixedPitch': _to_bool,
b'FontBBox': _to_list_of_ints,
b'UnderlinePosition': _to_int,
b'UnderlineThickness': _to_int,
b'Version': _to_str,
b'Notice': _to_str,
b'EncodingScheme': _to_str,
b'CapHeight': _to_float, # Is the second version a mistake, or
b'Capheight': _to_float, # do some AFM files contain 'Capheight'? -JKS
b'XHeight': _to_float,
b'Ascender': _to_float,
b'Descender': _to_float,
b'StdHW': _to_float,
b'StdVW': _to_float,
b'StartCharMetrics': _to_int,
b'CharacterSet': _to_str,
b'Characters': _to_int,
}
d = {}
for line in fh:
line = line.rstrip()
if line.startswith(b'Comment'):
continue
lst = line.split(b' ', 1)
key = lst[0]
if len(lst) == 2:
val = lst[1]
else:
val = b''
try:
d[key] = headerConverters[key](val)
except ValueError:
print('Value error parsing header in AFM:',
key, val, file=sys.stderr)
continue
except KeyError:
print('Found an unknown keyword in AFM header (was %r)' % key,
file=sys.stderr)
continue
if key == b'StartCharMetrics':
return d
raise RuntimeError('Bad parse')
def _parse_char_metrics(fh):
"""
Return a character metric dictionary. Keys are the ASCII num of
the character, values are a (*wx*, *name*, *bbox*) tuple, where
*wx* is the character width, *name* is the postscript language
name, and *bbox* is a (*llx*, *lly*, *urx*, *ury*) tuple.
This function is incomplete per the standard, but thus far parses
all the sample afm files tried.
"""
ascii_d = {}
name_d = {}
for line in fh:
# We are defensively letting values be utf8. The spec requires
# ascii, but there are non-compliant fonts in circulation
line = _to_str(line.rstrip()) # Convert from byte-literal
if line.startswith('EndCharMetrics'):
return ascii_d, name_d
# Split the metric line into a dictionary, keyed by metric identifiers
vals = dict(s.strip().split(' ', 1) for s in line.split(';') if s)
# There may be other metrics present, but only these are needed
if not {'C', 'WX', 'N', 'B'}.issubset(vals):
raise RuntimeError('Bad char metrics line: %s' % line)
num = _to_int(vals['C'])
wx = _to_float(vals['WX'])
name = vals['N']
bbox = _to_list_of_floats(vals['B'])
bbox = list(map(int, bbox))
# Workaround: If the character name is 'Euro', give it the
# corresponding character code, according to WinAnsiEncoding (see PDF
# Reference).
if name == 'Euro':
num = 128
if num != -1:
ascii_d[num] = (wx, name, bbox)
name_d[name] = (wx, bbox)
raise RuntimeError('Bad parse')
def _parse_kern_pairs(fh):
"""
Return a kern pairs dictionary; keys are (*char1*, *char2*) tuples and
values are the kern pair value. For example, a kern pairs line like
``KPX A y -50``
will be represented as::
d[ ('A', 'y') ] = -50
"""
line = next(fh)
if not line.startswith(b'StartKernPairs'):
raise RuntimeError('Bad start of kern pairs data: %s' % line)
d = {}
for line in fh:
line = line.rstrip()
if not line:
continue
if line.startswith(b'EndKernPairs'):
next(fh) # EndKernData
return d
vals = line.split()
if len(vals) != 4 or vals[0] != b'KPX':
raise RuntimeError('Bad kern pairs line: %s' % line)
c1, c2, val = _to_str(vals[1]), _to_str(vals[2]), _to_float(vals[3])
d[(c1, c2)] = val
raise RuntimeError('Bad kern pairs parse')
def _parse_composites(fh):
"""
Return a composites dictionary. Keys are the names of the
composites. Values are a num parts list of composite information,
with each element being a (*name*, *dx*, *dy*) tuple. Thus a
composites line reading:
CC Aacute 2 ; PCC A 0 0 ; PCC acute 160 170 ;
will be represented as::
d['Aacute'] = [ ('A', 0, 0), ('acute', 160, 170) ]
"""
d = {}
for line in fh:
line = line.rstrip()
if not line:
continue
if line.startswith(b'EndComposites'):
return d
vals = line.split(b';')
cc = vals[0].split()
name, numParts = cc[1], _to_int(cc[2])
pccParts = []
for s in vals[1:-1]:
pcc = s.split()
name, dx, dy = pcc[1], _to_float(pcc[2]), _to_float(pcc[3])
pccParts.append((name, dx, dy))
d[name] = pccParts
raise RuntimeError('Bad composites parse')
def _parse_optional(fh):
"""
Parse the optional fields for kern pair data and composites
return value is a (*kernDict*, *compositeDict*) which are the
return values from :func:`_parse_kern_pairs`, and
:func:`_parse_composites` if the data exists, or empty dicts
otherwise
"""
optional = {
b'StartKernData': _parse_kern_pairs,
b'StartComposites': _parse_composites,
}
d = {b'StartKernData': {}, b'StartComposites': {}}
for line in fh:
line = line.rstrip()
if not line:
continue
key = line.split()[0]
if key in optional:
d[key] = optional[key](fh)
l = (d[b'StartKernData'], d[b'StartComposites'])
return l
def parse_afm(fh):
"""
Parse the Adobe Font Metics file in file handle *fh*. Return value
is a (*dhead*, *dcmetrics_ascii*, *dmetrics_name*, *dkernpairs*,
*dcomposite*) tuple where
*dhead* is a :func:`_parse_header` dict,
*dcmetrics_ascii* and *dcmetrics_name* are the two resulting dicts
from :func:`_parse_char_metrics`,
*dkernpairs* is a :func:`_parse_kern_pairs` dict (possibly {}) and
*dcomposite* is a :func:`_parse_composites` dict (possibly {})
"""
_sanity_check(fh)
dhead = _parse_header(fh)
dcmetrics_ascii, dcmetrics_name = _parse_char_metrics(fh)
doptional = _parse_optional(fh)
return dhead, dcmetrics_ascii, dcmetrics_name, doptional[0], doptional[1]
class AFM(object):
def __init__(self, fh):
"""
Parse the AFM file in file object *fh*
"""
(dhead, dcmetrics_ascii, dcmetrics_name, dkernpairs, dcomposite) = \
parse_afm(fh)
self._header = dhead
self._kern = dkernpairs
self._metrics = dcmetrics_ascii
self._metrics_by_name = dcmetrics_name
self._composite = dcomposite
def get_bbox_char(self, c, isord=False):
if not isord:
c = ord(c)
wx, name, bbox = self._metrics[c]
return bbox
def string_width_height(self, s):
"""
Return the string width (including kerning) and string height
as a (*w*, *h*) tuple.
"""
if not len(s):
return 0, 0
totalw = 0
namelast = None
miny = 1e9
maxy = 0
for c in s:
if c == '\n':
continue
wx, name, bbox = self._metrics[ord(c)]
l, b, w, h = bbox
# find the width with kerning
try:
kp = self._kern[(namelast, name)]
except KeyError:
kp = 0
totalw += wx + kp
# find the max y
thismax = b + h
if thismax > maxy:
maxy = thismax
# find the min y
thismin = b
if thismin < miny:
miny = thismin
namelast = name
return totalw, maxy - miny
def get_str_bbox_and_descent(self, s):
"""
Return the string bounding box
"""
if not len(s):
return 0, 0, 0, 0
totalw = 0
namelast = None
miny = 1e9
maxy = 0
left = 0
if not isinstance(s, six.text_type):
s = _to_str(s)
for c in s:
if c == '\n':
continue
name = uni2type1.get(ord(c), 'question')
try:
wx, bbox = self._metrics_by_name[name]
except KeyError:
name = 'question'
wx, bbox = self._metrics_by_name[name]
l, b, w, h = bbox
if l < left:
left = l
# find the width with kerning
try:
kp = self._kern[(namelast, name)]
except KeyError:
kp = 0
totalw += wx + kp
# find the max y
thismax = b + h
if thismax > maxy:
maxy = thismax
# find the min y
thismin = b
if thismin < miny:
miny = thismin
namelast = name
return left, miny, totalw, maxy - miny, -miny
def get_str_bbox(self, s):
"""
Return the string bounding box
"""
return self.get_str_bbox_and_descent(s)[:4]
def get_name_char(self, c, isord=False):
"""
Get the name of the character, i.e., ';' is 'semicolon'
"""
if not isord:
c = ord(c)
wx, name, bbox = self._metrics[c]
return name
def get_width_char(self, c, isord=False):
"""
Get the width of the character from the character metric WX
field
"""
if not isord:
c = ord(c)
wx, name, bbox = self._metrics[c]
return wx
def get_width_from_char_name(self, name):
"""
Get the width of the character from a type1 character name
"""
wx, bbox = self._metrics_by_name[name]
return wx
def get_height_char(self, c, isord=False):
"""
Get the height of character *c* from the bounding box. This
is the ink height (space is 0)
"""
if not isord:
c = ord(c)
wx, name, bbox = self._metrics[c]
return bbox[-1]
def get_kern_dist(self, c1, c2):
"""
Return the kerning pair distance (possibly 0) for chars *c1*
and *c2*
"""
name1, name2 = self.get_name_char(c1), self.get_name_char(c2)
return self.get_kern_dist_from_name(name1, name2)
def get_kern_dist_from_name(self, name1, name2):
"""
Return the kerning pair distance (possibly 0) for chars
*name1* and *name2*
"""
return self._kern.get((name1, name2), 0)
def get_fontname(self):
"Return the font name, e.g., 'Times-Roman'"
return self._header[b'FontName']
def get_fullname(self):
"Return the font full name, e.g., 'Times-Roman'"
name = self._header.get(b'FullName')
if name is None: # use FontName as a substitute
name = self._header[b'FontName']
return name
def get_familyname(self):
"Return the font family name, e.g., 'Times'"
name = self._header.get(b'FamilyName')
if name is not None:
return name
# FamilyName not specified so we'll make a guess
name = self.get_fullname()
extras = (r'(?i)([ -](regular|plain|italic|oblique|bold|semibold|'
r'light|ultralight|extra|condensed))+$')
return re.sub(extras, '', name)
@property
def family_name(self):
return self.get_familyname()
def get_weight(self):
"Return the font weight, e.g., 'Bold' or 'Roman'"
return self._header[b'Weight']
def get_angle(self):
"Return the fontangle as float"
return self._header[b'ItalicAngle']
def get_capheight(self):
"Return the cap height as float"
return self._header[b'CapHeight']
def get_xheight(self):
"Return the xheight as float"
return self._header[b'XHeight']
def get_underline_thickness(self):
"Return the underline thickness as float"
return self._header[b'UnderlineThickness']
def get_horizontal_stem_width(self):
"""
Return the standard horizontal stem width as float, or *None* if
not specified in AFM file.
"""
return self._header.get(b'StdHW', None)
def get_vertical_stem_width(self):
"""
Return the standard vertical stem width as float, or *None* if
not specified in AFM file.
"""
return self._header.get(b'StdVW', None)
| 16,198 | 28.560219 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/docstring.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from matplotlib import cbook
import sys
import types
class Substitution(object):
"""
A decorator to take a function's docstring and perform string
substitution on it.
This decorator should be robust even if func.__doc__ is None
(for example, if -OO was passed to the interpreter)
Usage: construct a docstring.Substitution with a sequence or
dictionary suitable for performing substitution; then
decorate a suitable function with the constructed object. e.g.
sub_author_name = Substitution(author='Jason')
@sub_author_name
def some_function(x):
"%(author)s wrote this function"
# note that some_function.__doc__ is now "Jason wrote this function"
One can also use positional arguments.
sub_first_last_names = Substitution('Edgar Allen', 'Poe')
@sub_first_last_names
def some_function(x):
"%s %s wrote the Raven"
"""
def __init__(self, *args, **kwargs):
assert not (len(args) and len(kwargs)), \
"Only positional or keyword args are allowed"
self.params = args or kwargs
def __call__(self, func):
func.__doc__ = func.__doc__ and func.__doc__ % self.params
return func
def update(self, *args, **kwargs):
"Assume self.params is a dict and update it with supplied args"
self.params.update(*args, **kwargs)
@classmethod
def from_params(cls, params):
"""
In the case where the params is a mutable sequence (list or
dictionary) and it may change before this class is called, one may
explicitly use a reference to the params rather than using *args or
**kwargs which will copy the values and not reference them.
"""
result = cls()
result.params = params
return result
class Appender(object):
"""
A function decorator that will append an addendum to the docstring
of the target function.
This decorator should be robust even if func.__doc__ is None
(for example, if -OO was passed to the interpreter).
Usage: construct a docstring.Appender with a string to be joined to
the original docstring. An optional 'join' parameter may be supplied
which will be used to join the docstring and addendum. e.g.
add_copyright = Appender("Copyright (c) 2009", join='\n')
@add_copyright
def my_dog(has='fleas'):
"This docstring will have a copyright below"
pass
"""
def __init__(self, addendum, join=''):
self.addendum = addendum
self.join = join
def __call__(self, func):
docitems = [func.__doc__, self.addendum]
func.__doc__ = func.__doc__ and self.join.join(docitems)
return func
def dedent(func):
"Dedent a docstring (if present)"
func.__doc__ = func.__doc__ and cbook.dedent(func.__doc__)
return func
def copy(source):
"Copy a docstring from another source function (if present)"
def do_copy(target):
if source.__doc__:
target.__doc__ = source.__doc__
return target
return do_copy
# create a decorator that will house the various documentation that
# is reused throughout matplotlib
interpd = Substitution()
def dedent_interpd(func):
"""A special case of the interpd that first performs a dedent on
the incoming docstring"""
if isinstance(func, types.MethodType) and not six.PY3:
func = func.im_func
return interpd(dedent(func))
def copy_dedent(source):
"""A decorator that will copy the docstring from the source and
then dedent it"""
# note the following is ugly because "Python is not a functional
# language" - GVR. Perhaps one day, functools.compose will exist.
# or perhaps not.
# http://mail.python.org/pipermail/patches/2007-February/021687.html
return lambda target: dedent(copy(source)(target))
| 3,995 | 29.976744 | 75 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/transforms.py
|
"""
matplotlib includes a framework for arbitrary geometric
transformations that is used determine the final position of all
elements drawn on the canvas.
Transforms are composed into trees of :class:`TransformNode` objects
whose actual value depends on their children. When the contents of
children change, their parents are automatically invalidated. The
next time an invalidated transform is accessed, it is recomputed to
reflect those changes. This invalidation/caching approach prevents
unnecessary recomputations of transforms, and contributes to better
interactive performance.
For example, here is a graph of the transform tree used to plot data
to the graph:
.. image:: ../_static/transforms.png
The framework can be used for both affine and non-affine
transformations. However, for speed, we want use the backend
renderers to perform affine transformations whenever possible.
Therefore, it is possible to perform just the affine or non-affine
part of a transformation on a set of data. The affine is always
assumed to occur after the non-affine. For any transform::
full transform == non-affine part + affine part
The backends are not expected to handle non-affine transformations
themselves.
"""
# Note: There are a number of places in the code where we use `np.min` or
# `np.minimum` instead of the builtin `min`, and likewise for `max`. This is
# done so that `nan`s are propagated, instead of being silently dropped.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import numpy as np
from matplotlib._path import (affine_transform, count_bboxes_overlapping_bbox,
update_path_extents)
from numpy.linalg import inv
import re
import weakref
import warnings
from . import cbook
from .path import Path
DEBUG = False
def _indent_str(obj): # textwrap.indent(str(obj), 4) on Py3.
return re.sub("(^|\n)", r"\1 ", str(obj))
class TransformNode(object):
"""
:class:`TransformNode` is the base class for anything that
participates in the transform tree and needs to invalidate its
parents or be invalidated. This includes classes that are not
really transforms, such as bounding boxes, since some transforms
depend on bounding boxes to compute their values.
"""
_gid = 0
# Invalidation may affect only the affine part. If the
# invalidation was "affine-only", the _invalid member is set to
# INVALID_AFFINE_ONLY
INVALID_NON_AFFINE = 1
INVALID_AFFINE = 2
INVALID = INVALID_NON_AFFINE | INVALID_AFFINE
# Some metadata about the transform, used to determine whether an
# invalidation is affine-only
is_affine = False
is_bbox = False
pass_through = False
"""
If pass_through is True, all ancestors will always be
invalidated, even if 'self' is already invalid.
"""
def __init__(self, shorthand_name=None):
"""
Creates a new :class:`TransformNode`.
Parameters
----------
shorthand_name : str
A string representing the "name" of the transform. The name carries
no significance other than to improve the readability of
``str(transform)`` when DEBUG=True.
"""
self._parents = {}
# TransformNodes start out as invalid until their values are
# computed for the first time.
self._invalid = 1
self._shorthand_name = shorthand_name or ''
if DEBUG:
def __str__(self):
# either just return the name of this TransformNode, or it's repr
return self._shorthand_name or repr(self)
def __getstate__(self):
d = self.__dict__.copy()
# turn the dictionary with weak values into a normal dictionary
d['_parents'] = dict((k, v()) for (k, v) in
six.iteritems(self._parents))
return d
def __setstate__(self, data_dict):
self.__dict__ = data_dict
# turn the normal dictionary back into a dictionary with weak
# values
self._parents = dict((k, weakref.ref(v)) for (k, v) in
six.iteritems(self._parents) if v is not None)
def __copy__(self, *args):
raise NotImplementedError(
"TransformNode instances can not be copied. "
"Consider using frozen() instead.")
__deepcopy__ = __copy__
def invalidate(self):
"""
Invalidate this :class:`TransformNode` and triggers an
invalidation of its ancestors. Should be called any
time the transform changes.
"""
value = self.INVALID
if self.is_affine:
value = self.INVALID_AFFINE
return self._invalidate_internal(value, invalidating_node=self)
def _invalidate_internal(self, value, invalidating_node):
"""
Called by :meth:`invalidate` and subsequently ascends the transform
stack calling each TransformNode's _invalidate_internal method.
"""
# determine if this call will be an extension to the invalidation
# status. If not, then a shortcut means that we needn't invoke an
# invalidation up the transform stack as it will already have been
# invalidated.
# N.B This makes the invalidation sticky, once a transform has been
# invalidated as NON_AFFINE, then it will always be invalidated as
# NON_AFFINE even when triggered with a AFFINE_ONLY invalidation.
# In most cases this is not a problem (i.e. for interactive panning and
# zooming) and the only side effect will be on performance.
status_changed = self._invalid < value
if self.pass_through or status_changed:
self._invalid = value
for parent in list(six.itervalues(self._parents)):
# Dereference the weak reference
parent = parent()
if parent is not None:
parent._invalidate_internal(
value=value, invalidating_node=self)
def set_children(self, *children):
"""
Set the children of the transform, to let the invalidation
system know which transforms can invalidate this transform.
Should be called from the constructor of any transforms that
depend on other transforms.
"""
# Parents are stored as weak references, so that if the
# parents are destroyed, references from the children won't
# keep them alive.
for child in children:
child._parents[id(self)] = weakref.ref(self)
if DEBUG:
_set_children = set_children
def set_children(self, *children):
self._set_children(*children)
self._children = children
set_children.__doc__ = _set_children.__doc__
def frozen(self):
"""
Returns a frozen copy of this transform node. The frozen copy
will not update when its children change. Useful for storing
a previously known state of a transform where
``copy.deepcopy()`` might normally be used.
"""
return self
if DEBUG:
def write_graphviz(self, fobj, highlight=[]):
"""
For debugging purposes.
Writes the transform tree rooted at 'self' to a graphviz "dot"
format file. This file can be run through the "dot" utility
to produce a graph of the transform tree.
Affine transforms are marked in blue. Bounding boxes are
marked in yellow.
*fobj*: A Python file-like object
Once the "dot" file has been created, it can be turned into a
png easily with::
$> dot -Tpng -o $OUTPUT_FILE $DOT_FILE
"""
seen = set()
def recurse(root):
if root in seen:
return
seen.add(root)
props = {}
label = root.__class__.__name__
if root._invalid:
label = '[%s]' % label
if root in highlight:
props['style'] = 'bold'
props['shape'] = 'box'
props['label'] = '"%s"' % label
props = ' '.join(['%s=%s' % (key, val)
for key, val
in six.iteritems(props)])
fobj.write('%s [%s];\n' %
(hash(root), props))
if hasattr(root, '_children'):
for child in root._children:
name = '?'
for key, val in six.iteritems(root.__dict__):
if val is child:
name = key
break
fobj.write('"%s" -> "%s" [label="%s", fontsize=10];\n'
% (hash(root),
hash(child),
name))
recurse(child)
fobj.write("digraph G {\n")
recurse(self)
fobj.write("}\n")
class BboxBase(TransformNode):
"""
This is the base class of all bounding boxes, and provides
read-only access to its data. A mutable bounding box is provided
by the :class:`Bbox` class.
The canonical representation is as two points, with no
restrictions on their ordering. Convenience properties are
provided to get the left, bottom, right and top edges and width
and height, but these are not stored explicitly.
"""
is_bbox = True
is_affine = True
if DEBUG:
def _check(points):
if isinstance(points, np.ma.MaskedArray):
warnings.warn("Bbox bounds are a masked array.")
points = np.asarray(points)
if (points[1, 0] - points[0, 0] == 0 or
points[1, 1] - points[0, 1] == 0):
warnings.warn("Singular Bbox.")
_check = staticmethod(_check)
def frozen(self):
return Bbox(self.get_points().copy())
frozen.__doc__ = TransformNode.__doc__
def __array__(self, *args, **kwargs):
return self.get_points()
def is_unit(self):
"""
Returns True if the :class:`Bbox` is the unit bounding box
from (0, 0) to (1, 1).
"""
return list(self.get_points().flatten()) == [0., 0., 1., 1.]
@property
def x0(self):
"""
:attr:`x0` is the first of the pair of *x* coordinates that
define the bounding box. :attr:`x0` is not guaranteed to be less than
:attr:`x1`. If you require that, use :attr:`xmin`.
"""
return self.get_points()[0, 0]
@property
def y0(self):
"""
:attr:`y0` is the first of the pair of *y* coordinates that
define the bounding box. :attr:`y0` is not guaranteed to be less than
:attr:`y1`. If you require that, use :attr:`ymin`.
"""
return self.get_points()[0, 1]
@property
def x1(self):
"""
:attr:`x1` is the second of the pair of *x* coordinates that
define the bounding box. :attr:`x1` is not guaranteed to be greater
than :attr:`x0`. If you require that, use :attr:`xmax`.
"""
return self.get_points()[1, 0]
@property
def y1(self):
"""
:attr:`y1` is the second of the pair of *y* coordinates that
define the bounding box. :attr:`y1` is not guaranteed to be greater
than :attr:`y0`. If you require that, use :attr:`ymax`.
"""
return self.get_points()[1, 1]
@property
def p0(self):
"""
:attr:`p0` is the first pair of (*x*, *y*) coordinates that
define the bounding box. It is not guaranteed to be the bottom-left
corner. For that, use :attr:`min`.
"""
return self.get_points()[0]
@property
def p1(self):
"""
:attr:`p1` is the second pair of (*x*, *y*) coordinates that
define the bounding box. It is not guaranteed to be the top-right
corner. For that, use :attr:`max`.
"""
return self.get_points()[1]
@property
def xmin(self):
"""
:attr:`xmin` is the left edge of the bounding box.
"""
return np.min(self.get_points()[:, 0])
@property
def ymin(self):
"""
:attr:`ymin` is the bottom edge of the bounding box.
"""
return np.min(self.get_points()[:, 1])
@property
def xmax(self):
"""
:attr:`xmax` is the right edge of the bounding box.
"""
return np.max(self.get_points()[:, 0])
@property
def ymax(self):
"""
:attr:`ymax` is the top edge of the bounding box.
"""
return np.max(self.get_points()[:, 1])
@property
def min(self):
"""
:attr:`min` is the bottom-left corner of the bounding box.
"""
return np.min(self.get_points(), axis=0)
@property
def max(self):
"""
:attr:`max` is the top-right corner of the bounding box.
"""
return np.max(self.get_points(), axis=0)
@property
def intervalx(self):
"""
:attr:`intervalx` is the pair of *x* coordinates that define
the bounding box. It is not guaranteed to be sorted from left to right.
"""
return self.get_points()[:, 0]
@property
def intervaly(self):
"""
:attr:`intervaly` is the pair of *y* coordinates that define
the bounding box. It is not guaranteed to be sorted from bottom to
top.
"""
return self.get_points()[:, 1]
@property
def width(self):
"""
The width of the bounding box. It may be negative if
:attr:`x1` < :attr:`x0`.
"""
points = self.get_points()
return points[1, 0] - points[0, 0]
@property
def height(self):
"""
The height of the bounding box. It may be negative if
:attr:`y1` < :attr:`y0`.
"""
points = self.get_points()
return points[1, 1] - points[0, 1]
@property
def size(self):
"""
The width and height of the bounding box. May be negative,
in the same way as :attr:`width` and :attr:`height`.
"""
points = self.get_points()
return points[1] - points[0]
@property
def bounds(self):
"""
Returns (:attr:`x0`, :attr:`y0`, :attr:`width`,
:attr:`height`).
"""
x0, y0, x1, y1 = self.get_points().flatten()
return (x0, y0, x1 - x0, y1 - y0)
@property
def extents(self):
"""
Returns (:attr:`x0`, :attr:`y0`, :attr:`x1`,
:attr:`y1`).
"""
return self.get_points().flatten().copy()
def get_points(self):
raise NotImplementedError
def containsx(self, x):
"""
Returns whether *x* is in the closed (:attr:`x0`, :attr:`x1`) interval.
"""
x0, x1 = self.intervalx
return x0 <= x <= x1 or x0 >= x >= x1
def containsy(self, y):
"""
Returns whether *y* is in the closed (:attr:`y0`, :attr:`y1`) interval.
"""
y0, y1 = self.intervaly
return y0 <= y <= y1 or y0 >= y >= y1
def contains(self, x, y):
"""
Returns whether ``(x, y)`` is in the bounding box or on its edge.
"""
return self.containsx(x) and self.containsy(y)
def overlaps(self, other):
"""
Returns whether this bounding box overlaps with the other bounding box.
Parameters
----------
other : BboxBase
"""
ax1, ay1, ax2, ay2 = self.extents
bx1, by1, bx2, by2 = other.extents
if ax2 < ax1:
ax2, ax1 = ax1, ax2
if ay2 < ay1:
ay2, ay1 = ay1, ay2
if bx2 < bx1:
bx2, bx1 = bx1, bx2
if by2 < by1:
by2, by1 = by1, by2
return ax1 <= bx2 and bx1 <= ax2 and ay1 <= by2 and by1 <= ay2
def fully_containsx(self, x):
"""
Returns whether *x* is in the open (:attr:`x0`, :attr:`x1`) interval.
"""
x0, x1 = self.intervalx
return x0 < x < x1 or x0 > x > x1
def fully_containsy(self, y):
"""
Returns whether *y* is in the open (:attr:`y0`, :attr:`y1`) interval.
"""
y0, y1 = self.intervaly
return y0 < y < y1 or y0 > y > y1
def fully_contains(self, x, y):
"""
Returns whether ``x, y`` is in the bounding box, but not on its edge.
"""
return self.fully_containsx(x) and self.fully_containsy(y)
def fully_overlaps(self, other):
"""
Returns whether this bounding box overlaps with the other bounding box,
not including the edges.
Parameters
----------
other : BboxBase
"""
ax1, ay1, ax2, ay2 = self.extents
bx1, by1, bx2, by2 = other.extents
if ax2 < ax1:
ax2, ax1 = ax1, ax2
if ay2 < ay1:
ay2, ay1 = ay1, ay2
if bx2 < bx1:
bx2, bx1 = bx1, bx2
if by2 < by1:
by2, by1 = by1, by2
return ax1 < bx2 and bx1 < ax2 and ay1 < by2 and by1 < ay2
def transformed(self, transform):
"""
Return a new :class:`Bbox` object, statically transformed by
the given transform.
"""
pts = self.get_points()
ll, ul, lr = transform.transform(np.array([pts[0],
[pts[0, 0], pts[1, 1]], [pts[1, 0], pts[0, 1]]]))
return Bbox([ll, [lr[0], ul[1]]])
def inverse_transformed(self, transform):
"""
Return a new :class:`Bbox` object, statically transformed by
the inverse of the given transform.
"""
return self.transformed(transform.inverted())
coefs = {'C': (0.5, 0.5),
'SW': (0, 0),
'S': (0.5, 0),
'SE': (1.0, 0),
'E': (1.0, 0.5),
'NE': (1.0, 1.0),
'N': (0.5, 1.0),
'NW': (0, 1.0),
'W': (0, 0.5)}
def anchored(self, c, container=None):
"""
Return a copy of the :class:`Bbox`, shifted to position *c*
within a container.
Parameters
----------
c :
May be either:
* A sequence (*cx*, *cy*) where *cx* and *cy* range from 0
to 1, where 0 is left or bottom and 1 is right or top
* a string:
- 'C' for centered
- 'S' for bottom-center
- 'SE' for bottom-left
- 'E' for left
- etc.
container : Bbox, optional
The box within which the :class:`Bbox` is positioned; it defaults
to the initial :class:`Bbox`.
"""
if container is None:
container = self
l, b, w, h = container.bounds
if isinstance(c, six.string_types):
cx, cy = self.coefs[c]
else:
cx, cy = c
L, B, W, H = self.bounds
return Bbox(self._points +
[(l + cx * (w - W)) - L,
(b + cy * (h - H)) - B])
def shrunk(self, mx, my):
"""
Return a copy of the :class:`Bbox`, shrunk by the factor *mx*
in the *x* direction and the factor *my* in the *y* direction.
The lower left corner of the box remains unchanged. Normally
*mx* and *my* will be less than 1, but this is not enforced.
"""
w, h = self.size
return Bbox([self._points[0],
self._points[0] + [mx * w, my * h]])
def shrunk_to_aspect(self, box_aspect, container=None, fig_aspect=1.0):
"""
Return a copy of the :class:`Bbox`, shrunk so that it is as
large as it can be while having the desired aspect ratio,
*box_aspect*. If the box coordinates are relative---that
is, fractions of a larger box such as a figure---then the
physical aspect ratio of that figure is specified with
*fig_aspect*, so that *box_aspect* can also be given as a
ratio of the absolute dimensions, not the relative dimensions.
"""
if box_aspect <= 0 or fig_aspect <= 0:
raise ValueError("'box_aspect' and 'fig_aspect' must be positive")
if container is None:
container = self
w, h = container.size
H = w * box_aspect / fig_aspect
if H <= h:
W = w
else:
W = h * fig_aspect / box_aspect
H = h
return Bbox([self._points[0],
self._points[0] + (W, H)])
def splitx(self, *args):
"""
e.g., ``bbox.splitx(f1, f2, ...)``
Returns a list of new :class:`Bbox` objects formed by
splitting the original one with vertical lines at fractional
positions *f1*, *f2*, ...
"""
xf = [0] + list(args) + [1]
x0, y0, x1, y1 = self.extents
w = x1 - x0
return [Bbox([[x0 + xf0 * w, y0], [x0 + xf1 * w, y1]])
for xf0, xf1 in zip(xf[:-1], xf[1:])]
def splity(self, *args):
"""
e.g., ``bbox.splitx(f1, f2, ...)``
Returns a list of new :class:`Bbox` objects formed by
splitting the original one with horizontal lines at fractional
positions *f1*, *f2*, ...
"""
yf = [0] + list(args) + [1]
x0, y0, x1, y1 = self.extents
h = y1 - y0
return [Bbox([[x0, y0 + yf0 * h], [x1, y0 + yf1 * h]])
for yf0, yf1 in zip(yf[:-1], yf[1:])]
def count_contains(self, vertices):
"""
Count the number of vertices contained in the :class:`Bbox`.
Any vertices with a non-finite x or y value are ignored.
Parameters
----------
vertices : Nx2 Numpy array.
"""
if len(vertices) == 0:
return 0
vertices = np.asarray(vertices)
with np.errstate(invalid='ignore'):
return (((self.min < vertices) &
(vertices < self.max)).all(axis=1).sum())
def count_overlaps(self, bboxes):
"""
Count the number of bounding boxes that overlap this one.
Parameters
----------
bboxes : sequence of :class:`BboxBase` objects
"""
return count_bboxes_overlapping_bbox(
self, np.atleast_3d([np.array(x) for x in bboxes]))
def expanded(self, sw, sh):
"""
Return a new :class:`Bbox` which is this :class:`Bbox`
expanded around its center by the given factors *sw* and
*sh*.
"""
width = self.width
height = self.height
deltaw = (sw * width - width) / 2.0
deltah = (sh * height - height) / 2.0
a = np.array([[-deltaw, -deltah], [deltaw, deltah]])
return Bbox(self._points + a)
def padded(self, p):
"""
Return a new :class:`Bbox` that is padded on all four sides by
the given value.
"""
points = self.get_points()
return Bbox(points + [[-p, -p], [p, p]])
def translated(self, tx, ty):
"""
Return a copy of the :class:`Bbox`, statically translated by
*tx* and *ty*.
"""
return Bbox(self._points + (tx, ty))
def corners(self):
"""
Return an array of points which are the four corners of this
rectangle. For example, if this :class:`Bbox` is defined by
the points (*a*, *b*) and (*c*, *d*), :meth:`corners` returns
(*a*, *b*), (*a*, *d*), (*c*, *b*) and (*c*, *d*).
"""
l, b, r, t = self.get_points().flatten()
return np.array([[l, b], [l, t], [r, b], [r, t]])
def rotated(self, radians):
"""
Return a new bounding box that bounds a rotated version of
this bounding box by the given radians. The new bounding box
is still aligned with the axes, of course.
"""
corners = self.corners()
corners_rotated = Affine2D().rotate(radians).transform(corners)
bbox = Bbox.unit()
bbox.update_from_data_xy(corners_rotated, ignore=True)
return bbox
@staticmethod
def union(bboxes):
"""
Return a :class:`Bbox` that contains all of the given bboxes.
"""
if not len(bboxes):
raise ValueError("'bboxes' cannot be empty")
x0 = np.min([bbox.xmin for bbox in bboxes])
x1 = np.max([bbox.xmax for bbox in bboxes])
y0 = np.min([bbox.ymin for bbox in bboxes])
y1 = np.max([bbox.ymax for bbox in bboxes])
return Bbox([[x0, y0], [x1, y1]])
@staticmethod
def intersection(bbox1, bbox2):
"""
Return the intersection of the two bboxes or None
if they do not intersect.
"""
x0 = np.maximum(bbox1.xmin, bbox2.xmin)
x1 = np.minimum(bbox1.xmax, bbox2.xmax)
y0 = np.maximum(bbox1.ymin, bbox2.ymin)
y1 = np.minimum(bbox1.ymax, bbox2.ymax)
return Bbox([[x0, y0], [x1, y1]]) if x0 <= x1 and y0 <= y1 else None
class Bbox(BboxBase):
"""
A mutable bounding box.
"""
def __init__(self, points, **kwargs):
"""
Parameters
----------
points : ndarray
A 2x2 numpy array of the form ``[[x0, y0], [x1, y1]]``.
Notes
-----
If you need to create a :class:`Bbox` object from another form
of data, consider the static methods :meth:`unit`,
:meth:`from_bounds` and :meth:`from_extents`.
"""
BboxBase.__init__(self, **kwargs)
points = np.asarray(points, float)
if points.shape != (2, 2):
raise ValueError('Bbox points must be of the form '
'"[[x0, y0], [x1, y1]]".')
self._points = points
self._minpos = np.array([np.inf, np.inf])
self._ignore = True
# it is helpful in some contexts to know if the bbox is a
# default or has been mutated; we store the orig points to
# support the mutated methods
self._points_orig = self._points.copy()
if DEBUG:
___init__ = __init__
def __init__(self, points, **kwargs):
self._check(points)
self.___init__(points, **kwargs)
def invalidate(self):
self._check(self._points)
TransformNode.invalidate(self)
@staticmethod
def unit():
"""
(staticmethod) Create a new unit :class:`Bbox` from (0, 0) to
(1, 1).
"""
return Bbox(np.array([[0.0, 0.0], [1.0, 1.0]], float))
@staticmethod
def null():
"""
(staticmethod) Create a new null :class:`Bbox` from (inf, inf) to
(-inf, -inf).
"""
return Bbox(np.array([[np.inf, np.inf], [-np.inf, -np.inf]], float))
@staticmethod
def from_bounds(x0, y0, width, height):
"""
(staticmethod) Create a new :class:`Bbox` from *x0*, *y0*,
*width* and *height*.
*width* and *height* may be negative.
"""
return Bbox.from_extents(x0, y0, x0 + width, y0 + height)
@staticmethod
def from_extents(*args):
"""
(staticmethod) Create a new Bbox from *left*, *bottom*,
*right* and *top*.
The *y*-axis increases upwards.
"""
points = np.array(args, dtype=float).reshape(2, 2)
return Bbox(points)
def __format__(self, fmt):
return (
'Bbox(x0={0.x0:{1}}, y0={0.y0:{1}}, x1={0.x1:{1}}, y1={0.y1:{1}})'.
format(self, fmt))
def __str__(self):
return format(self, '')
def __repr__(self):
return 'Bbox([[{0.x0}, {0.y0}], [{0.x1}, {0.y1}]])'.format(self)
def ignore(self, value):
"""
Set whether the existing bounds of the box should be ignored
by subsequent calls to :meth:`update_from_data_xy`.
value : bool
- When ``True``, subsequent calls to :meth:`update_from_data_xy`
will ignore the existing bounds of the :class:`Bbox`.
- When ``False``, subsequent calls to :meth:`update_from_data_xy`
will include the existing bounds of the :class:`Bbox`.
"""
self._ignore = value
def update_from_path(self, path, ignore=None, updatex=True, updatey=True):
"""
Update the bounds of the :class:`Bbox` based on the passed in
data. After updating, the bounds will have positive *width*
and *height*; *x0* and *y0* will be the minimal values.
Parameters
----------
path : :class:`~matplotlib.path.Path`
ignore : bool, optional
- when ``True``, ignore the existing bounds of the :class:`Bbox`.
- when ``False``, include the existing bounds of the :class:`Bbox`.
- when ``None``, use the last value passed to :meth:`ignore`.
updatex, updatey : bool, optional
When ``True``, update the x/y values.
"""
if ignore is None:
ignore = self._ignore
if path.vertices.size == 0:
return
points, minpos, changed = update_path_extents(
path, None, self._points, self._minpos, ignore)
if changed:
self.invalidate()
if updatex:
self._points[:, 0] = points[:, 0]
self._minpos[0] = minpos[0]
if updatey:
self._points[:, 1] = points[:, 1]
self._minpos[1] = minpos[1]
def update_from_data_xy(self, xy, ignore=None, updatex=True, updatey=True):
"""
Update the bounds of the :class:`Bbox` based on the passed in
data. After updating, the bounds will have positive *width*
and *height*; *x0* and *y0* will be the minimal values.
Parameters
----------
xy : ndarray
A numpy array of 2D points.
ignore : bool, optional
- When ``True``, ignore the existing bounds of the :class:`Bbox`.
- When ``False``, include the existing bounds of the :class:`Bbox`.
- When ``None``, use the last value passed to :meth:`ignore`.
updatex, updatey : bool, optional
When ``True``, update the x/y values.
"""
if len(xy) == 0:
return
path = Path(xy)
self.update_from_path(path, ignore=ignore,
updatex=updatex, updatey=updatey)
@BboxBase.x0.setter
def x0(self, val):
self._points[0, 0] = val
self.invalidate()
@BboxBase.y0.setter
def y0(self, val):
self._points[0, 1] = val
self.invalidate()
@BboxBase.x1.setter
def x1(self, val):
self._points[1, 0] = val
self.invalidate()
@BboxBase.y1.setter
def y1(self, val):
self._points[1, 1] = val
self.invalidate()
@BboxBase.p0.setter
def p0(self, val):
self._points[0] = val
self.invalidate()
@BboxBase.p1.setter
def p1(self, val):
self._points[1] = val
self.invalidate()
@BboxBase.intervalx.setter
def intervalx(self, interval):
self._points[:, 0] = interval
self.invalidate()
@BboxBase.intervaly.setter
def intervaly(self, interval):
self._points[:, 1] = interval
self.invalidate()
@BboxBase.bounds.setter
def bounds(self, bounds):
l, b, w, h = bounds
points = np.array([[l, b], [l + w, b + h]], float)
if np.any(self._points != points):
self._points = points
self.invalidate()
@property
def minpos(self):
return self._minpos
@property
def minposx(self):
return self._minpos[0]
@property
def minposy(self):
return self._minpos[1]
def get_points(self):
"""
Get the points of the bounding box directly as a numpy array
of the form: ``[[x0, y0], [x1, y1]]``.
"""
self._invalid = 0
return self._points
def set_points(self, points):
"""
Set the points of the bounding box directly from a numpy array
of the form: ``[[x0, y0], [x1, y1]]``. No error checking is
performed, as this method is mainly for internal use.
"""
if np.any(self._points != points):
self._points = points
self.invalidate()
def set(self, other):
"""
Set this bounding box from the "frozen" bounds of another
:class:`Bbox`.
"""
if np.any(self._points != other.get_points()):
self._points = other.get_points()
self.invalidate()
def mutated(self):
'Return whether the bbox has changed since init.'
return self.mutatedx() or self.mutatedy()
def mutatedx(self):
'Return whether the x-limits have changed since init.'
return (self._points[0, 0] != self._points_orig[0, 0] or
self._points[1, 0] != self._points_orig[1, 0])
def mutatedy(self):
'Return whether the y-limits have changed since init.'
return (self._points[0, 1] != self._points_orig[0, 1] or
self._points[1, 1] != self._points_orig[1, 1])
class TransformedBbox(BboxBase):
"""
A :class:`Bbox` that is automatically transformed by a given
transform. When either the child bounding box or transform
changes, the bounds of this bbox will update accordingly.
"""
def __init__(self, bbox, transform, **kwargs):
"""
Parameters
----------
bbox : :class:`Bbox`
transform : :class:`Transform`
"""
if not bbox.is_bbox:
raise ValueError("'bbox' is not a bbox")
if not isinstance(transform, Transform):
raise ValueError("'transform' must be an instance of "
"'matplotlib.transform.Transform'")
if transform.input_dims != 2 or transform.output_dims != 2:
raise ValueError(
"The input and output dimensions of 'transform' must be 2")
BboxBase.__init__(self, **kwargs)
self._bbox = bbox
self._transform = transform
self.set_children(bbox, transform)
self._points = None
def __str__(self):
return ("{}(\n"
"{},\n"
"{})"
.format(type(self).__name__,
_indent_str(self._bbox),
_indent_str(self._transform)))
__repr__ = __str__
def get_points(self):
if self._invalid:
p = self._bbox.get_points()
# Transform all four points, then make a new bounding box
# from the result, taking care to make the orientation the
# same.
points = self._transform.transform(
[[p[0, 0], p[0, 1]],
[p[1, 0], p[0, 1]],
[p[0, 0], p[1, 1]],
[p[1, 0], p[1, 1]]])
points = np.ma.filled(points, 0.0)
xs = min(points[:, 0]), max(points[:, 0])
if p[0, 0] > p[1, 0]:
xs = xs[::-1]
ys = min(points[:, 1]), max(points[:, 1])
if p[0, 1] > p[1, 1]:
ys = ys[::-1]
self._points = np.array([
[xs[0], ys[0]],
[xs[1], ys[1]]
])
self._invalid = 0
return self._points
get_points.__doc__ = Bbox.get_points.__doc__
if DEBUG:
_get_points = get_points
def get_points(self):
points = self._get_points()
self._check(points)
return points
class LockableBbox(BboxBase):
"""
A :class:`Bbox` where some elements may be locked at certain values.
When the child bounding box changes, the bounds of this bbox will update
accordingly with the exception of the locked elements.
"""
def __init__(self, bbox, x0=None, y0=None, x1=None, y1=None, **kwargs):
"""
Parameters
----------
bbox : Bbox
The child bounding box to wrap.
x0 : float or None
The locked value for x0, or None to leave unlocked.
y0 : float or None
The locked value for y0, or None to leave unlocked.
x1 : float or None
The locked value for x1, or None to leave unlocked.
y1 : float or None
The locked value for y1, or None to leave unlocked.
"""
if not bbox.is_bbox:
raise ValueError("'bbox' is not a bbox")
BboxBase.__init__(self, **kwargs)
self._bbox = bbox
self.set_children(bbox)
self._points = None
fp = [x0, y0, x1, y1]
mask = [val is None for val in fp]
self._locked_points = np.ma.array(fp, float, mask=mask).reshape((2, 2))
def __str__(self):
return ("{}(\n"
"{},\n"
"{})"
.format(type(self).__name__,
_indent_str(self._bbox),
_indent_str(self._locked_points)))
__repr__ = __str__
def get_points(self):
if self._invalid:
points = self._bbox.get_points()
self._points = np.where(self._locked_points.mask,
points,
self._locked_points)
self._invalid = 0
return self._points
get_points.__doc__ = Bbox.get_points.__doc__
if DEBUG:
_get_points = get_points
def get_points(self):
points = self._get_points()
self._check(points)
return points
@property
def locked_x0(self):
"""
float or None: The value used for the locked x0.
"""
if self._locked_points.mask[0, 0]:
return None
else:
return self._locked_points[0, 0]
@locked_x0.setter
def locked_x0(self, x0):
self._locked_points.mask[0, 0] = x0 is None
self._locked_points.data[0, 0] = x0
self.invalidate()
@property
def locked_y0(self):
"""
float or None: The value used for the locked y0.
"""
if self._locked_points.mask[0, 1]:
return None
else:
return self._locked_points[0, 1]
@locked_y0.setter
def locked_y0(self, y0):
self._locked_points.mask[0, 1] = y0 is None
self._locked_points.data[0, 1] = y0
self.invalidate()
@property
def locked_x1(self):
"""
float or None: The value used for the locked x1.
"""
if self._locked_points.mask[1, 0]:
return None
else:
return self._locked_points[1, 0]
@locked_x1.setter
def locked_x1(self, x1):
self._locked_points.mask[1, 0] = x1 is None
self._locked_points.data[1, 0] = x1
self.invalidate()
@property
def locked_y1(self):
"""
float or None: The value used for the locked y1.
"""
if self._locked_points.mask[1, 1]:
return None
else:
return self._locked_points[1, 1]
@locked_y1.setter
def locked_y1(self, y1):
self._locked_points.mask[1, 1] = y1 is None
self._locked_points.data[1, 1] = y1
self.invalidate()
class Transform(TransformNode):
"""
The base class of all :class:`TransformNode` instances that
actually perform a transformation.
All non-affine transformations should be subclasses of this class.
New affine transformations should be subclasses of
:class:`Affine2D`.
Subclasses of this class should override the following members (at
minimum):
- :attr:`input_dims`
- :attr:`output_dims`
- :meth:`transform`
- :attr:`is_separable`
- :attr:`has_inverse`
- :meth:`inverted` (if :attr:`has_inverse` is True)
If the transform needs to do something non-standard with
:class:`matplotlib.path.Path` objects, such as adding curves
where there were once line segments, it should override:
- :meth:`transform_path`
"""
input_dims = None
"""
The number of input dimensions of this transform.
Must be overridden (with integers) in the subclass.
"""
output_dims = None
"""
The number of output dimensions of this transform.
Must be overridden (with integers) in the subclass.
"""
has_inverse = False
"""True if this transform has a corresponding inverse transform."""
is_separable = False
"""True if this transform is separable in the x- and y- dimensions."""
def __add__(self, other):
"""
Composes two transforms together such that *self* is followed
by *other*.
"""
if isinstance(other, Transform):
return composite_transform_factory(self, other)
raise TypeError(
"Can not add Transform to object of type '%s'" % type(other))
def __radd__(self, other):
"""
Composes two transforms together such that *self* is followed
by *other*.
"""
if isinstance(other, Transform):
return composite_transform_factory(other, self)
raise TypeError(
"Can not add Transform to object of type '%s'" % type(other))
# Equality is based on object identity for `Transform`s (so we don't
# override `__eq__`), but some subclasses, such as TransformWrapper &
# AffineBase, override this behavior.
if six.PY2:
def __ne__(self, other):
return not (self == other)
def _iter_break_from_left_to_right(self):
"""
Returns an iterator breaking down this transform stack from left to
right recursively. If self == ((A, N), A) then the result will be an
iterator which yields I : ((A, N), A), followed by A : (N, A),
followed by (A, N) : (A), but not ((A, N), A) : I.
This is equivalent to flattening the stack then yielding
``flat_stack[:i], flat_stack[i:]`` where i=0..(n-1).
"""
yield IdentityTransform(), self
@property
def depth(self):
"""
Returns the number of transforms which have been chained
together to form this Transform instance.
.. note::
For the special case of a Composite transform, the maximum depth
of the two is returned.
"""
return 1
def contains_branch(self, other):
"""
Return whether the given transform is a sub-tree of this transform.
This routine uses transform equality to identify sub-trees, therefore
in many situations it is object id which will be used.
For the case where the given transform represents the whole
of this transform, returns True.
"""
if self.depth < other.depth:
return False
# check that a subtree is equal to other (starting from self)
for _, sub_tree in self._iter_break_from_left_to_right():
if sub_tree == other:
return True
return False
def contains_branch_seperately(self, other_transform):
"""
Returns whether the given branch is a sub-tree of this transform on
each separate dimension.
A common use for this method is to identify if a transform is a blended
transform containing an axes' data transform. e.g.::
x_isdata, y_isdata = trans.contains_branch_seperately(ax.transData)
"""
if self.output_dims != 2:
raise ValueError('contains_branch_seperately only supports '
'transforms with 2 output dimensions')
# for a non-blended transform each separate dimension is the same, so
# just return the appropriate shape.
return [self.contains_branch(other_transform)] * 2
def __sub__(self, other):
"""
Returns a transform stack which goes all the way down self's transform
stack, and then ascends back up other's stack. If it can, this is
optimised::
# normally
A - B == a + b.inverted()
# sometimes, when A contains the tree B there is no need to
# descend all the way down to the base of A (via B), instead we
# can just stop at B.
(A + B) - (B)^-1 == A
# similarly, when B contains tree A, we can avoid decending A at
# all, basically:
A - (A + B) == ((B + A) - A).inverted() or B^-1
For clarity, the result of ``(A + B) - B + B == (A + B)``.
"""
# we only know how to do this operation if other is a Transform.
if not isinstance(other, Transform):
return NotImplemented
for remainder, sub_tree in self._iter_break_from_left_to_right():
if sub_tree == other:
return remainder
for remainder, sub_tree in other._iter_break_from_left_to_right():
if sub_tree == self:
if not remainder.has_inverse:
raise ValueError("The shortcut cannot be computed since "
"other's transform includes a non-invertable component.")
return remainder.inverted()
# if we have got this far, then there was no shortcut possible
if other.has_inverse:
return self + other.inverted()
else:
raise ValueError('It is not possible to compute transA - transB '
'since transB cannot be inverted and there is no '
'shortcut possible.')
def __array__(self, *args, **kwargs):
"""
Array interface to get at this Transform's affine matrix.
"""
return self.get_affine().get_matrix()
def transform(self, values):
"""
Performs the transformation on the given array of values.
Accepts a numpy array of shape (N x :attr:`input_dims`) and
returns a numpy array of shape (N x :attr:`output_dims`).
Alternatively, accepts a numpy array of length :attr:`input_dims`
and returns a numpy array of length :attr:`output_dims`.
"""
# Ensure that values is a 2d array (but remember whether
# we started with a 1d or 2d array).
values = np.asanyarray(values)
ndim = values.ndim
values = values.reshape((-1, self.input_dims))
# Transform the values
res = self.transform_affine(self.transform_non_affine(values))
# Convert the result back to the shape of the input values.
if ndim == 0:
assert not np.ma.is_masked(res) # just to be on the safe side
return res[0, 0]
if ndim == 1:
return res.reshape(-1)
elif ndim == 2:
return res
raise ValueError(
"Input values must have shape (N x {dims}) "
"or ({dims}).".format(dims=self.input_dims))
def transform_affine(self, values):
"""
Performs only the affine part of this transformation on the
given array of values.
``transform(values)`` is always equivalent to
``transform_affine(transform_non_affine(values))``.
In non-affine transformations, this is generally a no-op. In
affine transformations, this is equivalent to
``transform(values)``.
Accepts a numpy array of shape (N x :attr:`input_dims`) and
returns a numpy array of shape (N x :attr:`output_dims`).
Alternatively, accepts a numpy array of length :attr:`input_dims`
and returns a numpy array of length :attr:`output_dims`.
"""
return self.get_affine().transform(values)
def transform_non_affine(self, values):
"""
Performs only the non-affine part of the transformation.
``transform(values)`` is always equivalent to
``transform_affine(transform_non_affine(values))``.
In non-affine transformations, this is generally equivalent to
``transform(values)``. In affine transformations, this is
always a no-op.
Accepts a numpy array of shape (N x :attr:`input_dims`) and
returns a numpy array of shape (N x :attr:`output_dims`).
Alternatively, accepts a numpy array of length :attr:`input_dims`
and returns a numpy array of length :attr:`output_dims`.
"""
return values
def transform_bbox(self, bbox):
"""
Transform the given bounding box.
Note, for smarter transforms including caching (a common
requirement for matplotlib figures), see :class:`TransformedBbox`.
"""
return Bbox(self.transform(bbox.get_points()))
def get_affine(self):
"""
Get the affine part of this transform.
"""
return IdentityTransform()
def get_matrix(self):
"""
Get the Affine transformation array for the affine part
of this transform.
"""
return self.get_affine().get_matrix()
def transform_point(self, point):
"""
A convenience function that returns the transformed copy of a
single point.
The point is given as a sequence of length :attr:`input_dims`.
The transformed point is returned as a sequence of length
:attr:`output_dims`.
"""
if len(point) != self.input_dims:
raise ValueError("The length of 'point' must be 'self.input_dims'")
return self.transform(np.asarray([point]))[0]
def transform_path(self, path):
"""
Returns a transformed path.
*path*: a :class:`~matplotlib.path.Path` instance.
In some cases, this transform may insert curves into the path
that began as line segments.
"""
return self.transform_path_affine(self.transform_path_non_affine(path))
def transform_path_affine(self, path):
"""
Returns a path, transformed only by the affine part of
this transform.
*path*: a :class:`~matplotlib.path.Path` instance.
``transform_path(path)`` is equivalent to
``transform_path_affine(transform_path_non_affine(values))``.
"""
return self.get_affine().transform_path_affine(path)
def transform_path_non_affine(self, path):
"""
Returns a path, transformed only by the non-affine
part of this transform.
*path*: a :class:`~matplotlib.path.Path` instance.
``transform_path(path)`` is equivalent to
``transform_path_affine(transform_path_non_affine(values))``.
"""
x = self.transform_non_affine(path.vertices)
return Path._fast_from_codes_and_verts(x, path.codes,
{'interpolation_steps': path._interpolation_steps,
'should_simplify': path.should_simplify})
def transform_angles(self, angles, pts, radians=False, pushoff=1e-5):
"""
Performs transformation on a set of angles anchored at
specific locations.
The *angles* must be a column vector (i.e., numpy array).
The *pts* must be a two-column numpy array of x,y positions
(angle transforms currently only work in 2D). This array must
have the same number of rows as *angles*.
*radians* indicates whether or not input angles are given in
radians (True) or degrees (False; the default).
*pushoff* is the distance to move away from *pts* for
determining transformed angles (see discussion of method
below).
The transformed angles are returned in an array with the same
size as *angles*.
The generic version of this method uses a very generic
algorithm that transforms *pts*, as well as locations very
close to *pts*, to find the angle in the transformed system.
"""
# Must be 2D
if self.input_dims != 2 or self.output_dims != 2:
raise NotImplementedError('Only defined in 2D')
if pts.shape[1] != 2:
raise ValueError("'pts' must be array with 2 columns for x,y")
if angles.ndim != 1 or angles.shape[0] != pts.shape[0]:
raise ValueError("'angles' must be a column vector and have same "
"number of rows as 'pts'")
# Convert to radians if desired
if not radians:
angles = angles / 180.0 * np.pi
# Move a short distance away
pts2 = pts + pushoff * np.c_[np.cos(angles), np.sin(angles)]
# Transform both sets of points
tpts = self.transform(pts)
tpts2 = self.transform(pts2)
# Calculate transformed angles
d = tpts2 - tpts
a = np.arctan2(d[:, 1], d[:, 0])
# Convert back to degrees if desired
if not radians:
a = np.rad2deg(a)
return a
def inverted(self):
"""
Return the corresponding inverse transformation.
The return value of this method should be treated as
temporary. An update to *self* does not cause a corresponding
update to its inverted copy.
``x === self.inverted().transform(self.transform(x))``
"""
raise NotImplementedError()
def __repr__(self):
return str(self)
class TransformWrapper(Transform):
"""
A helper class that holds a single child transform and acts
equivalently to it.
This is useful if a node of the transform tree must be replaced at
run time with a transform of a different type. This class allows
that replacement to correctly trigger invalidation.
Note that :class:`TransformWrapper` instances must have the same
input and output dimensions during their entire lifetime, so the
child transform may only be replaced with another child transform
of the same dimensions.
"""
pass_through = True
def __init__(self, child):
"""
*child*: A class:`Transform` instance. This child may later
be replaced with :meth:`set`.
"""
if not isinstance(child, Transform):
raise ValueError("'child' must be an instance of "
"'matplotlib.transform.Transform'")
self._init(child)
self.set_children(child)
def _init(self, child):
Transform.__init__(self)
self.input_dims = child.input_dims
self.output_dims = child.output_dims
self._set(child)
self._invalid = 0
def __eq__(self, other):
return self._child.__eq__(other)
# NOTE: Transform.__[gs]etstate__ should be sufficient when using only
# Python 3.4+.
def __getstate__(self):
# only store the child information and parents
return {
'child': self._child,
'input_dims': self.input_dims,
'output_dims': self.output_dims,
# turn the weak-values dictionary into a normal dictionary
'parents': dict((k, v()) for (k, v) in
six.iteritems(self._parents))
}
def __setstate__(self, state):
# re-initialise the TransformWrapper with the state's child
self._init(state['child'])
# The child may not be unpickled yet, so restore its information.
self.input_dims = state['input_dims']
self.output_dims = state['output_dims']
# turn the normal dictionary back into a dictionary with weak
# values
self._parents = dict((k, weakref.ref(v)) for (k, v) in
six.iteritems(state['parents']) if v is not None)
def __str__(self):
return ("{}(\n"
"{})"
.format(type(self).__name__,
_indent_str(self._child)))
def frozen(self):
return self._child.frozen()
frozen.__doc__ = Transform.frozen.__doc__
def _set(self, child):
self._child = child
self.transform = child.transform
self.transform_affine = child.transform_affine
self.transform_non_affine = child.transform_non_affine
self.transform_path = child.transform_path
self.transform_path_affine = child.transform_path_affine
self.transform_path_non_affine = child.transform_path_non_affine
self.get_affine = child.get_affine
self.inverted = child.inverted
self.get_matrix = child.get_matrix
# note we do not wrap other properties here since the transform's
# child can be changed with WrappedTransform.set and so checking
# is_affine and other such properties may be dangerous.
def set(self, child):
"""
Replace the current child of this transform with another one.
The new child must have the same number of input and output
dimensions as the current child.
"""
if (child.input_dims != self.input_dims or
child.output_dims != self.output_dims):
raise ValueError(
"The new child must have the same number of input and output "
"dimensions as the current child")
self.set_children(child)
self._set(child)
self._invalid = 0
self.invalidate()
self._invalid = 0
def _get_is_affine(self):
return self._child.is_affine
is_affine = property(_get_is_affine)
def _get_is_separable(self):
return self._child.is_separable
is_separable = property(_get_is_separable)
def _get_has_inverse(self):
return self._child.has_inverse
has_inverse = property(_get_has_inverse)
class AffineBase(Transform):
"""
The base class of all affine transformations of any number of
dimensions.
"""
is_affine = True
def __init__(self, *args, **kwargs):
Transform.__init__(self, *args, **kwargs)
self._inverted = None
def __array__(self, *args, **kwargs):
# optimises the access of the transform matrix vs the superclass
return self.get_matrix()
@staticmethod
def _concat(a, b):
"""
Concatenates two transformation matrices (represented as numpy
arrays) together.
"""
return np.dot(b, a)
def __eq__(self, other):
if getattr(other, "is_affine", False):
return np.all(self.get_matrix() == other.get_matrix())
return NotImplemented
def transform(self, values):
return self.transform_affine(values)
transform.__doc__ = Transform.transform.__doc__
def transform_affine(self, values):
raise NotImplementedError('Affine subclasses should override this '
'method.')
transform_affine.__doc__ = Transform.transform_affine.__doc__
def transform_non_affine(self, points):
return points
transform_non_affine.__doc__ = Transform.transform_non_affine.__doc__
def transform_path(self, path):
return self.transform_path_affine(path)
transform_path.__doc__ = Transform.transform_path.__doc__
def transform_path_affine(self, path):
return Path(self.transform_affine(path.vertices),
path.codes, path._interpolation_steps)
transform_path_affine.__doc__ = Transform.transform_path_affine.__doc__
def transform_path_non_affine(self, path):
return path
transform_path_non_affine.__doc__ = Transform.transform_path_non_affine.__doc__
def get_affine(self):
return self
get_affine.__doc__ = Transform.get_affine.__doc__
class Affine2DBase(AffineBase):
"""
The base class of all 2D affine transformations.
2D affine transformations are performed using a 3x3 numpy array::
a c e
b d f
0 0 1
This class provides the read-only interface. For a mutable 2D
affine transformation, use :class:`Affine2D`.
Subclasses of this class will generally only need to override a
constructor and :meth:`get_matrix` that generates a custom 3x3 matrix.
"""
has_inverse = True
input_dims = 2
output_dims = 2
def frozen(self):
return Affine2D(self.get_matrix().copy())
frozen.__doc__ = AffineBase.frozen.__doc__
def _get_is_separable(self):
mtx = self.get_matrix()
return mtx[0, 1] == 0.0 and mtx[1, 0] == 0.0
is_separable = property(_get_is_separable)
def to_values(self):
"""
Return the values of the matrix as a sequence (a,b,c,d,e,f)
"""
mtx = self.get_matrix()
return tuple(mtx[:2].swapaxes(0, 1).flatten())
@staticmethod
def matrix_from_values(a, b, c, d, e, f):
"""
(staticmethod) Create a new transformation matrix as a 3x3
numpy array of the form::
a c e
b d f
0 0 1
"""
return np.array([[a, c, e], [b, d, f], [0.0, 0.0, 1.0]], float)
def transform_affine(self, points):
mtx = self.get_matrix()
if isinstance(points, np.ma.MaskedArray):
tpoints = affine_transform(points.data, mtx)
return np.ma.MaskedArray(tpoints, mask=np.ma.getmask(points))
return affine_transform(points, mtx)
def transform_point(self, point):
mtx = self.get_matrix()
return affine_transform([point], mtx)[0]
transform_point.__doc__ = AffineBase.transform_point.__doc__
if DEBUG:
_transform_affine = transform_affine
def transform_affine(self, points):
# The major speed trap here is just converting to the
# points to an array in the first place. If we can use
# more arrays upstream, that should help here.
if not isinstance(points, (np.ma.MaskedArray, np.ndarray)):
warnings.warn(
('A non-numpy array of type %s was passed in for ' +
'transformation. Please correct this.')
% type(points))
return self._transform_affine(points)
transform_affine.__doc__ = AffineBase.transform_affine.__doc__
def inverted(self):
if self._inverted is None or self._invalid:
mtx = self.get_matrix()
shorthand_name = None
if self._shorthand_name:
shorthand_name = '(%s)-1' % self._shorthand_name
self._inverted = Affine2D(inv(mtx), shorthand_name=shorthand_name)
self._invalid = 0
return self._inverted
inverted.__doc__ = AffineBase.inverted.__doc__
class Affine2D(Affine2DBase):
"""
A mutable 2D affine transformation.
"""
def __init__(self, matrix=None, **kwargs):
"""
Initialize an Affine transform from a 3x3 numpy float array::
a c e
b d f
0 0 1
If *matrix* is None, initialize with the identity transform.
"""
Affine2DBase.__init__(self, **kwargs)
if matrix is None:
# A bit faster than np.identity(3).
matrix = IdentityTransform._mtx.copy()
self._mtx = matrix
self._invalid = 0
def __str__(self):
return ("{}(\n"
"{})"
.format(type(self).__name__,
_indent_str(self._mtx)))
@staticmethod
def from_values(a, b, c, d, e, f):
"""
(staticmethod) Create a new Affine2D instance from the given
values::
a c e
b d f
0 0 1
.
"""
return Affine2D(
np.array([a, c, e, b, d, f, 0.0, 0.0, 1.0], float).reshape((3, 3)))
def get_matrix(self):
"""
Get the underlying transformation matrix as a 3x3 numpy array::
a c e
b d f
0 0 1
.
"""
self._invalid = 0
return self._mtx
def set_matrix(self, mtx):
"""
Set the underlying transformation matrix from a 3x3 numpy array::
a c e
b d f
0 0 1
.
"""
self._mtx = mtx
self.invalidate()
def set(self, other):
"""
Set this transformation from the frozen copy of another
:class:`Affine2DBase` object.
"""
if not isinstance(other, Affine2DBase):
raise ValueError("'other' must be an instance of "
"'matplotlib.transform.Affine2DBase'")
self._mtx = other.get_matrix()
self.invalidate()
@staticmethod
def identity():
"""
(staticmethod) Return a new :class:`Affine2D` object that is
the identity transform.
Unless this transform will be mutated later on, consider using
the faster :class:`IdentityTransform` class instead.
"""
return Affine2D()
def clear(self):
"""
Reset the underlying matrix to the identity transform.
"""
# A bit faster than np.identity(3).
self._mtx = IdentityTransform._mtx.copy()
self.invalidate()
return self
def rotate(self, theta):
"""
Add a rotation (in radians) to this transform in place.
Returns *self*, so this method can easily be chained with more
calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate`
and :meth:`scale`.
"""
a = np.cos(theta)
b = np.sin(theta)
rotate_mtx = np.array([[a, -b, 0.0], [b, a, 0.0], [0.0, 0.0, 1.0]],
float)
self._mtx = np.dot(rotate_mtx, self._mtx)
self.invalidate()
return self
def rotate_deg(self, degrees):
"""
Add a rotation (in degrees) to this transform in place.
Returns *self*, so this method can easily be chained with more
calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate`
and :meth:`scale`.
"""
return self.rotate(np.deg2rad(degrees))
def rotate_around(self, x, y, theta):
"""
Add a rotation (in radians) around the point (x, y) in place.
Returns *self*, so this method can easily be chained with more
calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate`
and :meth:`scale`.
"""
return self.translate(-x, -y).rotate(theta).translate(x, y)
def rotate_deg_around(self, x, y, degrees):
"""
Add a rotation (in degrees) around the point (x, y) in place.
Returns *self*, so this method can easily be chained with more
calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate`
and :meth:`scale`.
"""
# Cast to float to avoid wraparound issues with uint8's
x, y = float(x), float(y)
return self.translate(-x, -y).rotate_deg(degrees).translate(x, y)
def translate(self, tx, ty):
"""
Adds a translation in place.
Returns *self*, so this method can easily be chained with more
calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate`
and :meth:`scale`.
"""
translate_mtx = np.array(
[[1.0, 0.0, tx], [0.0, 1.0, ty], [0.0, 0.0, 1.0]], float)
self._mtx = np.dot(translate_mtx, self._mtx)
self.invalidate()
return self
def scale(self, sx, sy=None):
"""
Adds a scale in place.
If *sy* is None, the same scale is applied in both the *x*- and
*y*-directions.
Returns *self*, so this method can easily be chained with more
calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate`
and :meth:`scale`.
"""
if sy is None:
sy = sx
scale_mtx = np.array(
[[sx, 0.0, 0.0], [0.0, sy, 0.0], [0.0, 0.0, 1.0]], float)
self._mtx = np.dot(scale_mtx, self._mtx)
self.invalidate()
return self
def skew(self, xShear, yShear):
"""
Adds a skew in place.
*xShear* and *yShear* are the shear angles along the *x*- and
*y*-axes, respectively, in radians.
Returns *self*, so this method can easily be chained with more
calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate`
and :meth:`scale`.
"""
rotX = np.tan(xShear)
rotY = np.tan(yShear)
skew_mtx = np.array(
[[1.0, rotX, 0.0], [rotY, 1.0, 0.0], [0.0, 0.0, 1.0]], float)
self._mtx = np.dot(skew_mtx, self._mtx)
self.invalidate()
return self
def skew_deg(self, xShear, yShear):
"""
Adds a skew in place.
*xShear* and *yShear* are the shear angles along the *x*- and
*y*-axes, respectively, in degrees.
Returns *self*, so this method can easily be chained with more
calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate`
and :meth:`scale`.
"""
return self.skew(np.deg2rad(xShear), np.deg2rad(yShear))
def _get_is_separable(self):
mtx = self.get_matrix()
return mtx[0, 1] == 0.0 and mtx[1, 0] == 0.0
is_separable = property(_get_is_separable)
class IdentityTransform(Affine2DBase):
"""
A special class that does one thing, the identity transform, in a
fast way.
"""
_mtx = np.identity(3)
def frozen(self):
return self
frozen.__doc__ = Affine2DBase.frozen.__doc__
def __str__(self):
return ("{}()"
.format(type(self).__name__))
def get_matrix(self):
return self._mtx
get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__
def transform(self, points):
return np.asanyarray(points)
transform.__doc__ = Affine2DBase.transform.__doc__
transform_affine = transform
transform_affine.__doc__ = Affine2DBase.transform_affine.__doc__
transform_non_affine = transform
transform_non_affine.__doc__ = Affine2DBase.transform_non_affine.__doc__
def transform_path(self, path):
return path
transform_path.__doc__ = Affine2DBase.transform_path.__doc__
transform_path_affine = transform_path
transform_path_affine.__doc__ = Affine2DBase.transform_path_affine.__doc__
transform_path_non_affine = transform_path
transform_path_non_affine.__doc__ = Affine2DBase.transform_path_non_affine.__doc__
def get_affine(self):
return self
get_affine.__doc__ = Affine2DBase.get_affine.__doc__
inverted = get_affine
inverted.__doc__ = Affine2DBase.inverted.__doc__
class BlendedGenericTransform(Transform):
"""
A "blended" transform uses one transform for the *x*-direction, and
another transform for the *y*-direction.
This "generic" version can handle any given child transform in the
*x*- and *y*-directions.
"""
input_dims = 2
output_dims = 2
is_separable = True
pass_through = True
def __init__(self, x_transform, y_transform, **kwargs):
"""
Create a new "blended" transform using *x_transform* to
transform the *x*-axis and *y_transform* to transform the
*y*-axis.
You will generally not call this constructor directly but use
the :func:`blended_transform_factory` function instead, which
can determine automatically which kind of blended transform to
create.
"""
# Here we ask: "Does it blend?"
Transform.__init__(self, **kwargs)
self._x = x_transform
self._y = y_transform
self.set_children(x_transform, y_transform)
self._affine = None
def __eq__(self, other):
# Note, this is an exact copy of BlendedAffine2D.__eq__
if isinstance(other, (BlendedAffine2D, BlendedGenericTransform)):
return (self._x == other._x) and (self._y == other._y)
elif self._x == self._y:
return self._x == other
else:
return NotImplemented
def contains_branch_seperately(self, transform):
# Note, this is an exact copy of BlendedAffine2D.contains_branch_seperately
return self._x.contains_branch(transform), self._y.contains_branch(transform)
@property
def depth(self):
return max(self._x.depth, self._y.depth)
def contains_branch(self, other):
# a blended transform cannot possibly contain a branch from two different transforms.
return False
def _get_is_affine(self):
return self._x.is_affine and self._y.is_affine
is_affine = property(_get_is_affine)
def _get_has_inverse(self):
return self._x.has_inverse and self._y.has_inverse
has_inverse = property(_get_has_inverse)
def frozen(self):
return blended_transform_factory(self._x.frozen(), self._y.frozen())
frozen.__doc__ = Transform.frozen.__doc__
def __str__(self):
return ("{}(\n"
"{},\n"
"{})"
.format(type(self).__name__,
_indent_str(self._x),
_indent_str(self._y)))
def transform_non_affine(self, points):
if self._x.is_affine and self._y.is_affine:
return points
x = self._x
y = self._y
if x == y and x.input_dims == 2:
return x.transform_non_affine(points)
if x.input_dims == 2:
x_points = x.transform_non_affine(points)[:, 0:1]
else:
x_points = x.transform_non_affine(points[:, 0])
x_points = x_points.reshape((len(x_points), 1))
if y.input_dims == 2:
y_points = y.transform_non_affine(points)[:, 1:]
else:
y_points = y.transform_non_affine(points[:, 1])
y_points = y_points.reshape((len(y_points), 1))
if (isinstance(x_points, np.ma.MaskedArray) or
isinstance(y_points, np.ma.MaskedArray)):
return np.ma.concatenate((x_points, y_points), 1)
else:
return np.concatenate((x_points, y_points), 1)
transform_non_affine.__doc__ = Transform.transform_non_affine.__doc__
def inverted(self):
return BlendedGenericTransform(self._x.inverted(), self._y.inverted())
inverted.__doc__ = Transform.inverted.__doc__
def get_affine(self):
if self._invalid or self._affine is None:
if self._x == self._y:
self._affine = self._x.get_affine()
else:
x_mtx = self._x.get_affine().get_matrix()
y_mtx = self._y.get_affine().get_matrix()
# This works because we already know the transforms are
# separable, though normally one would want to set b and
# c to zero.
mtx = np.vstack((x_mtx[0], y_mtx[1], [0.0, 0.0, 1.0]))
self._affine = Affine2D(mtx)
self._invalid = 0
return self._affine
get_affine.__doc__ = Transform.get_affine.__doc__
class BlendedAffine2D(Affine2DBase):
"""
A "blended" transform uses one transform for the *x*-direction, and
another transform for the *y*-direction.
This version is an optimization for the case where both child
transforms are of type :class:`Affine2DBase`.
"""
is_separable = True
def __init__(self, x_transform, y_transform, **kwargs):
"""
Create a new "blended" transform using *x_transform* to
transform the *x*-axis and *y_transform* to transform the
*y*-axis.
Both *x_transform* and *y_transform* must be 2D affine
transforms.
You will generally not call this constructor directly but use
the :func:`blended_transform_factory` function instead, which
can determine automatically which kind of blended transform to
create.
"""
is_affine = x_transform.is_affine and y_transform.is_affine
is_separable = x_transform.is_separable and y_transform.is_separable
is_correct = is_affine and is_separable
if not is_correct:
raise ValueError("Both *x_transform* and *y_transform* must be 2D "
"affine transforms")
Transform.__init__(self, **kwargs)
self._x = x_transform
self._y = y_transform
self.set_children(x_transform, y_transform)
Affine2DBase.__init__(self)
self._mtx = None
def __eq__(self, other):
# Note, this is an exact copy of BlendedGenericTransform.__eq__
if isinstance(other, (BlendedAffine2D, BlendedGenericTransform)):
return (self._x == other._x) and (self._y == other._y)
elif self._x == self._y:
return self._x == other
else:
return NotImplemented
def contains_branch_seperately(self, transform):
# Note, this is an exact copy of BlendedTransform.contains_branch_seperately
return self._x.contains_branch(transform), self._y.contains_branch(transform)
def __str__(self):
return ("{}(\n"
"{},\n"
"{})"
.format(type(self).__name__,
_indent_str(self._x),
_indent_str(self._y)))
def get_matrix(self):
if self._invalid:
if self._x == self._y:
self._mtx = self._x.get_matrix()
else:
x_mtx = self._x.get_matrix()
y_mtx = self._y.get_matrix()
# This works because we already know the transforms are
# separable, though normally one would want to set b and
# c to zero.
self._mtx = np.vstack((x_mtx[0], y_mtx[1], [0.0, 0.0, 1.0]))
self._inverted = None
self._invalid = 0
return self._mtx
get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__
def blended_transform_factory(x_transform, y_transform):
"""
Create a new "blended" transform using *x_transform* to transform
the *x*-axis and *y_transform* to transform the *y*-axis.
A faster version of the blended transform is returned for the case
where both child transforms are affine.
"""
if (isinstance(x_transform, Affine2DBase)
and isinstance(y_transform, Affine2DBase)):
return BlendedAffine2D(x_transform, y_transform)
return BlendedGenericTransform(x_transform, y_transform)
class CompositeGenericTransform(Transform):
"""
A composite transform formed by applying transform *a* then
transform *b*.
This "generic" version can handle any two arbitrary
transformations.
"""
pass_through = True
def __init__(self, a, b, **kwargs):
"""
Create a new composite transform that is the result of
applying transform *a* then transform *b*.
You will generally not call this constructor directly but use
the :func:`composite_transform_factory` function instead,
which can automatically choose the best kind of composite
transform instance to create.
"""
if a.output_dims != b.input_dims:
raise ValueError("The output dimension of 'a' must be equal to "
"the input dimensions of 'b'")
self.input_dims = a.input_dims
self.output_dims = b.output_dims
Transform.__init__(self, **kwargs)
self._a = a
self._b = b
self.set_children(a, b)
is_affine = property(lambda self: self._a.is_affine and self._b.is_affine)
def frozen(self):
self._invalid = 0
frozen = composite_transform_factory(self._a.frozen(), self._b.frozen())
if not isinstance(frozen, CompositeGenericTransform):
return frozen.frozen()
return frozen
frozen.__doc__ = Transform.frozen.__doc__
def _invalidate_internal(self, value, invalidating_node):
# In some cases for a composite transform, an invalidating call to AFFINE_ONLY needs
# to be extended to invalidate the NON_AFFINE part too. These cases are when the right
# hand transform is non-affine and either:
# (a) the left hand transform is non affine
# (b) it is the left hand node which has triggered the invalidation
if value == Transform.INVALID_AFFINE \
and not self._b.is_affine \
and (not self._a.is_affine or invalidating_node is self._a):
value = Transform.INVALID
Transform._invalidate_internal(self, value=value,
invalidating_node=invalidating_node)
def __eq__(self, other):
if isinstance(other, (CompositeGenericTransform, CompositeAffine2D)):
return self is other or (self._a == other._a and self._b == other._b)
else:
return False
def _iter_break_from_left_to_right(self):
for lh_compliment, rh_compliment in self._a._iter_break_from_left_to_right():
yield lh_compliment, rh_compliment + self._b
for lh_compliment, rh_compliment in self._b._iter_break_from_left_to_right():
yield self._a + lh_compliment, rh_compliment
@property
def depth(self):
return self._a.depth + self._b.depth
def _get_is_affine(self):
return self._a.is_affine and self._b.is_affine
is_affine = property(_get_is_affine)
def _get_is_separable(self):
return self._a.is_separable and self._b.is_separable
is_separable = property(_get_is_separable)
def __str__(self):
return ("{}(\n"
"{},\n"
"{})"
.format(type(self).__name__,
_indent_str(self._a),
_indent_str(self._b)))
def transform_affine(self, points):
return self.get_affine().transform(points)
transform_affine.__doc__ = Transform.transform_affine.__doc__
def transform_non_affine(self, points):
if self._a.is_affine and self._b.is_affine:
return points
elif not self._a.is_affine and self._b.is_affine:
return self._a.transform_non_affine(points)
else:
return self._b.transform_non_affine(
self._a.transform(points))
transform_non_affine.__doc__ = Transform.transform_non_affine.__doc__
def transform_path_non_affine(self, path):
if self._a.is_affine and self._b.is_affine:
return path
elif not self._a.is_affine and self._b.is_affine:
return self._a.transform_path_non_affine(path)
else:
return self._b.transform_path_non_affine(
self._a.transform_path(path))
transform_path_non_affine.__doc__ = Transform.transform_path_non_affine.__doc__
def get_affine(self):
if not self._b.is_affine:
return self._b.get_affine()
else:
return Affine2D(np.dot(self._b.get_affine().get_matrix(),
self._a.get_affine().get_matrix()))
get_affine.__doc__ = Transform.get_affine.__doc__
def inverted(self):
return CompositeGenericTransform(self._b.inverted(), self._a.inverted())
inverted.__doc__ = Transform.inverted.__doc__
def _get_has_inverse(self):
return self._a.has_inverse and self._b.has_inverse
has_inverse = property(_get_has_inverse)
class CompositeAffine2D(Affine2DBase):
"""
A composite transform formed by applying transform *a* then transform *b*.
This version is an optimization that handles the case where both *a*
and *b* are 2D affines.
"""
def __init__(self, a, b, **kwargs):
"""
Create a new composite transform that is the result of
applying transform *a* then transform *b*.
Both *a* and *b* must be instances of :class:`Affine2DBase`.
You will generally not call this constructor directly but use
the :func:`composite_transform_factory` function instead,
which can automatically choose the best kind of composite
transform instance to create.
"""
if not a.is_affine or not b.is_affine:
raise ValueError("'a' and 'b' must be affine transforms")
if a.output_dims != b.input_dims:
raise ValueError("The output dimension of 'a' must be equal to "
"the input dimensions of 'b'")
self.input_dims = a.input_dims
self.output_dims = b.output_dims
Affine2DBase.__init__(self, **kwargs)
self._a = a
self._b = b
self.set_children(a, b)
self._mtx = None
@property
def depth(self):
return self._a.depth + self._b.depth
def _iter_break_from_left_to_right(self):
for lh_compliment, rh_compliment in self._a._iter_break_from_left_to_right():
yield lh_compliment, rh_compliment + self._b
for lh_compliment, rh_compliment in self._b._iter_break_from_left_to_right():
yield self._a + lh_compliment, rh_compliment
def __str__(self):
return ("{}(\n"
"{},\n"
"{})"
.format(type(self).__name__,
_indent_str(self._a),
_indent_str(self._b)))
def get_matrix(self):
if self._invalid:
self._mtx = np.dot(
self._b.get_matrix(),
self._a.get_matrix())
self._inverted = None
self._invalid = 0
return self._mtx
get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__
def composite_transform_factory(a, b):
"""
Create a new composite transform that is the result of applying
transform a then transform b.
Shortcut versions of the blended transform are provided for the
case where both child transforms are affine, or one or the other
is the identity transform.
Composite transforms may also be created using the '+' operator,
e.g.::
c = a + b
"""
# check to see if any of a or b are IdentityTransforms. We use
# isinstance here to guarantee that the transforms will *always*
# be IdentityTransforms. Since TransformWrappers are mutable,
# use of equality here would be wrong.
if isinstance(a, IdentityTransform):
return b
elif isinstance(b, IdentityTransform):
return a
elif isinstance(a, Affine2D) and isinstance(b, Affine2D):
return CompositeAffine2D(a, b)
return CompositeGenericTransform(a, b)
class BboxTransform(Affine2DBase):
"""
:class:`BboxTransform` linearly transforms points from one
:class:`Bbox` to another :class:`Bbox`.
"""
is_separable = True
def __init__(self, boxin, boxout, **kwargs):
"""
Create a new :class:`BboxTransform` that linearly transforms
points from *boxin* to *boxout*.
"""
if not boxin.is_bbox or not boxout.is_bbox:
raise ValueError("'boxin' and 'boxout' must be bbox")
Affine2DBase.__init__(self, **kwargs)
self._boxin = boxin
self._boxout = boxout
self.set_children(boxin, boxout)
self._mtx = None
self._inverted = None
def __str__(self):
return ("{}(\n"
"{},\n"
"{})"
.format(type(self).__name__,
_indent_str(self._boxin),
_indent_str(self._boxout)))
def get_matrix(self):
if self._invalid:
inl, inb, inw, inh = self._boxin.bounds
outl, outb, outw, outh = self._boxout.bounds
x_scale = outw / inw
y_scale = outh / inh
if DEBUG and (x_scale == 0 or y_scale == 0):
raise ValueError("Transforming from or to a singular bounding box.")
self._mtx = np.array([[x_scale, 0.0 , (-inl*x_scale+outl)],
[0.0 , y_scale, (-inb*y_scale+outb)],
[0.0 , 0.0 , 1.0 ]],
float)
self._inverted = None
self._invalid = 0
return self._mtx
get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__
class BboxTransformTo(Affine2DBase):
"""
:class:`BboxTransformTo` is a transformation that linearly
transforms points from the unit bounding box to a given
:class:`Bbox`.
"""
is_separable = True
def __init__(self, boxout, **kwargs):
"""
Create a new :class:`BboxTransformTo` that linearly transforms
points from the unit bounding box to *boxout*.
"""
if not boxout.is_bbox:
raise ValueError("'boxout' must be bbox")
Affine2DBase.__init__(self, **kwargs)
self._boxout = boxout
self.set_children(boxout)
self._mtx = None
self._inverted = None
def __str__(self):
return ("{}(\n"
"{})"
.format(type(self).__name__,
_indent_str(self._boxout)))
def get_matrix(self):
if self._invalid:
outl, outb, outw, outh = self._boxout.bounds
if DEBUG and (outw == 0 or outh == 0):
raise ValueError("Transforming to a singular bounding box.")
self._mtx = np.array([[outw, 0.0, outl],
[ 0.0, outh, outb],
[ 0.0, 0.0, 1.0]],
float)
self._inverted = None
self._invalid = 0
return self._mtx
get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__
class BboxTransformToMaxOnly(BboxTransformTo):
"""
:class:`BboxTransformTo` is a transformation that linearly
transforms points from the unit bounding box to a given
:class:`Bbox` with a fixed upper left of (0, 0).
"""
def get_matrix(self):
if self._invalid:
xmax, ymax = self._boxout.max
if DEBUG and (xmax == 0 or ymax == 0):
raise ValueError("Transforming to a singular bounding box.")
self._mtx = np.array([[xmax, 0.0, 0.0],
[ 0.0, ymax, 0.0],
[ 0.0, 0.0, 1.0]],
float)
self._inverted = None
self._invalid = 0
return self._mtx
get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__
class BboxTransformFrom(Affine2DBase):
"""
:class:`BboxTransformFrom` linearly transforms points from a given
:class:`Bbox` to the unit bounding box.
"""
is_separable = True
def __init__(self, boxin, **kwargs):
if not boxin.is_bbox:
raise ValueError("'boxin' must be bbox")
Affine2DBase.__init__(self, **kwargs)
self._boxin = boxin
self.set_children(boxin)
self._mtx = None
self._inverted = None
def __str__(self):
return ("{}(\n"
"{})"
.format(type(self).__name__,
_indent_str(self._boxin)))
def get_matrix(self):
if self._invalid:
inl, inb, inw, inh = self._boxin.bounds
if DEBUG and (inw == 0 or inh == 0):
raise ValueError("Transforming from a singular bounding box.")
x_scale = 1.0 / inw
y_scale = 1.0 / inh
self._mtx = np.array([[x_scale, 0.0 , (-inl*x_scale)],
[0.0 , y_scale, (-inb*y_scale)],
[0.0 , 0.0 , 1.0 ]],
float)
self._inverted = None
self._invalid = 0
return self._mtx
get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__
class ScaledTranslation(Affine2DBase):
"""
A transformation that translates by *xt* and *yt*, after *xt* and *yt*
have been transformad by the given transform *scale_trans*.
"""
def __init__(self, xt, yt, scale_trans, **kwargs):
Affine2DBase.__init__(self, **kwargs)
self._t = (xt, yt)
self._scale_trans = scale_trans
self.set_children(scale_trans)
self._mtx = None
self._inverted = None
def __str__(self):
return ("{}(\n"
"{})"
.format(type(self).__name__,
_indent_str(self._t)))
def get_matrix(self):
if self._invalid:
xt, yt = self._scale_trans.transform_point(self._t)
self._mtx = np.array([[1.0, 0.0, xt],
[0.0, 1.0, yt],
[0.0, 0.0, 1.0]],
float)
self._invalid = 0
self._inverted = None
return self._mtx
get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__
class TransformedPath(TransformNode):
"""
A :class:`TransformedPath` caches a non-affine transformed copy of
the :class:`~matplotlib.path.Path`. This cached copy is
automatically updated when the non-affine part of the transform
changes.
.. note::
Paths are considered immutable by this class. Any update to the
path's vertices/codes will not trigger a transform recomputation.
"""
def __init__(self, path, transform):
"""
Create a new :class:`TransformedPath` from the given
:class:`~matplotlib.path.Path` and :class:`Transform`.
"""
if not isinstance(transform, Transform):
raise ValueError("'transform' must be an instance of "
"'matplotlib.transform.Transform'")
TransformNode.__init__(self)
self._path = path
self._transform = transform
self.set_children(transform)
self._transformed_path = None
self._transformed_points = None
def _revalidate(self):
# only recompute if the invalidation includes the non_affine part of the transform
if ((self._invalid & self.INVALID_NON_AFFINE == self.INVALID_NON_AFFINE)
or self._transformed_path is None):
self._transformed_path = \
self._transform.transform_path_non_affine(self._path)
self._transformed_points = \
Path._fast_from_codes_and_verts(
self._transform.transform_non_affine(self._path.vertices),
None,
{'interpolation_steps': self._path._interpolation_steps,
'should_simplify': self._path.should_simplify})
self._invalid = 0
def get_transformed_points_and_affine(self):
"""
Return a copy of the child path, with the non-affine part of
the transform already applied, along with the affine part of
the path necessary to complete the transformation. Unlike
:meth:`get_transformed_path_and_affine`, no interpolation will
be performed.
"""
self._revalidate()
return self._transformed_points, self.get_affine()
def get_transformed_path_and_affine(self):
"""
Return a copy of the child path, with the non-affine part of
the transform already applied, along with the affine part of
the path necessary to complete the transformation.
"""
self._revalidate()
return self._transformed_path, self.get_affine()
def get_fully_transformed_path(self):
"""
Return a fully-transformed copy of the child path.
"""
self._revalidate()
return self._transform.transform_path_affine(self._transformed_path)
def get_affine(self):
return self._transform.get_affine()
class TransformedPatchPath(TransformedPath):
"""
A :class:`TransformedPatchPath` caches a non-affine transformed copy of
the :class:`~matplotlib.path.Patch`. This cached copy is automatically
updated when the non-affine part of the transform or the patch changes.
"""
def __init__(self, patch):
"""
Create a new :class:`TransformedPatchPath` from the given
:class:`~matplotlib.path.Patch`.
"""
TransformNode.__init__(self)
transform = patch.get_transform()
self._patch = patch
self._transform = transform
self.set_children(transform)
self._path = patch.get_path()
self._transformed_path = None
self._transformed_points = None
def _revalidate(self):
patch_path = self._patch.get_path()
# Only recompute if the invalidation includes the non_affine part of
# the transform, or the Patch's Path has changed.
if (self._transformed_path is None or self._path != patch_path or
(self._invalid & self.INVALID_NON_AFFINE ==
self.INVALID_NON_AFFINE)):
self._path = patch_path
self._transformed_path = \
self._transform.transform_path_non_affine(patch_path)
self._transformed_points = \
Path._fast_from_codes_and_verts(
self._transform.transform_non_affine(patch_path.vertices),
None,
{'interpolation_steps': patch_path._interpolation_steps,
'should_simplify': patch_path.should_simplify})
self._invalid = 0
def nonsingular(vmin, vmax, expander=0.001, tiny=1e-15, increasing=True):
"""
Modify the endpoints of a range as needed to avoid singularities.
Parameters
----------
vmin, vmax : float
The initial endpoints.
expander : float, optional, default: 0.001
Fractional amount by which *vmin* and *vmax* are expanded if
the original interval is too small, based on *tiny*.
tiny : float, optional, default: 1e-15
Threshold for the ratio of the interval to the maximum absolute
value of its endpoints. If the interval is smaller than
this, it will be expanded. This value should be around
1e-15 or larger; otherwise the interval will be approaching
the double precision resolution limit.
increasing : bool, optional, default: True
If True, swap *vmin*, *vmax* if *vmin* > *vmax*.
Returns
-------
vmin, vmax : float
Endpoints, expanded and/or swapped if necessary.
If either input is inf or NaN, or if both inputs are 0 or very
close to zero, it returns -*expander*, *expander*.
"""
if (not np.isfinite(vmin)) or (not np.isfinite(vmax)):
return -expander, expander
swapped = False
if vmax < vmin:
vmin, vmax = vmax, vmin
swapped = True
maxabsvalue = max(abs(vmin), abs(vmax))
if maxabsvalue < (1e6 / tiny) * np.finfo(float).tiny:
vmin = -expander
vmax = expander
elif vmax - vmin <= maxabsvalue * tiny:
if vmax == 0 and vmin == 0:
vmin = -expander
vmax = expander
else:
vmin -= expander*abs(vmin)
vmax += expander*abs(vmax)
if swapped and not increasing:
vmin, vmax = vmax, vmin
return vmin, vmax
def interval_contains(interval, val):
"""
Check, inclusively, whether an interval includes a given value.
Parameters
----------
interval : sequence of scalar
A 2-length sequence, endpoints that define the interval.
val : scalar
Value to check is within interval.
Returns
-------
bool
Returns true if given val is within the interval.
"""
a, b = interval
return a <= val <= b or a >= val >= b
def interval_contains_open(interval, val):
"""
Check, excluding endpoints, whether an interval includes a given value.
Parameters
----------
interval : sequence of scalar
A 2-length sequence, endpoints that define the interval.
val : scalar
Value to check is within interval.
Returns
-------
bool
Returns true if given val is within the interval.
"""
a, b = interval
return a < val < b or a > val > b
def offset_copy(trans, fig=None, x=0.0, y=0.0, units='inches'):
"""
Return a new transform with an added offset.
Parameters
----------
trans : :class:`Transform` instance
Any transform, to which offset will be applied.
fig : :class:`~matplotlib.figure.Figure`, optional, default: None
Current figure. It can be None if *units* are 'dots'.
x, y : float, optional, default: 0.0
Specifies the offset to apply.
units : {'inches', 'points', 'dots'}, optional
Units of the offset.
Returns
-------
trans : :class:`Transform` instance
Transform with applied offset.
"""
if units == 'dots':
return trans + Affine2D().translate(x, y)
if fig is None:
raise ValueError('For units of inches or points a fig kwarg is needed')
if units == 'points':
x /= 72.0
y /= 72.0
elif not units == 'inches':
raise ValueError('units must be dots, points, or inches')
return trans + ScaledTranslation(x, y, fig.dpi_scale_trans)
| 101,447 | 32.448071 | 94 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/_layoutbox.py
|
# -*- coding: utf-8 -*-
"""
Conventions:
"constrain_x" means to constrain the variable with either
another kiwisolver variable, or a float. i.e. `constrain_width(0.2)`
will set a constraint that the width has to be 0.2 and this constraint is
permanent - i.e. it will not be removed if it becomes obsolete.
"edit_x" means to set x to a value (just a float), and that this value can
change. So `edit_width(0.2)` will set width to be 0.2, but `edit_width(0.3)`
will allow it to change to 0.3 later. Note that these values are still just
"suggestions" in `kiwisolver` parlance, and could be over-ridden by
other constrains.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import itertools
import kiwisolver as kiwi
import logging
import numpy as np
import warnings
import matplotlib
_log = logging.getLogger(__name__)
# renderers can be complicated
def get_renderer(fig):
if fig._cachedRenderer:
renderer = fig._cachedRenderer
else:
canvas = fig.canvas
if canvas and hasattr(canvas, "get_renderer"):
renderer = canvas.get_renderer()
else:
# not sure if this can happen
# seems to with PDF...
_log.info("constrained_layout : falling back to Agg renderer")
from matplotlib.backends.backend_agg import FigureCanvasAgg
canvas = FigureCanvasAgg(fig)
renderer = canvas.get_renderer()
return renderer
class LayoutBox(object):
"""
Basic rectangle representation using kiwi solver variables
"""
def __init__(self, parent=None, name='', tightwidth=False,
tightheight=False, artist=None,
lower_left=(0, 0), upper_right=(1, 1), pos=False,
subplot=False, h_pad=None, w_pad=None):
Variable = kiwi.Variable
self.parent = parent
self.name = name
sn = self.name + '_'
if parent is None:
self.solver = kiwi.Solver()
self.constrained_layout_called = 0
else:
self.solver = parent.solver
self.constrained_layout_called = None
# parent wants to know about this child!
parent.add_child(self)
# keep track of artist associated w/ this layout. Can be none
self.artist = artist
# keep track if this box is supposed to be a pos that is constrained
# by the parent.
self.pos = pos
# keep track of whether we need to match this subplot up with others.
self.subplot = subplot
# we need the str below for Py 2 which complains the string is unicode
self.top = Variable(str(sn + 'top'))
self.bottom = Variable(str(sn + 'bottom'))
self.left = Variable(str(sn + 'left'))
self.right = Variable(str(sn + 'right'))
self.width = Variable(str(sn + 'width'))
self.height = Variable(str(sn + 'height'))
self.h_center = Variable(str(sn + 'h_center'))
self.v_center = Variable(str(sn + 'v_center'))
self.min_width = Variable(str(sn + 'min_width'))
self.min_height = Variable(str(sn + 'min_height'))
self.pref_width = Variable(str(sn + 'pref_width'))
self.pref_height = Variable(str(sn + 'pref_height'))
# margis are only used for axes-position layout boxes. maybe should
# be a separate subclass:
self.left_margin = Variable(str(sn + 'left_margin'))
self.right_margin = Variable(str(sn + 'right_margin'))
self.bottom_margin = Variable(str(sn + 'bottom_margin'))
self.top_margin = Variable(str(sn + 'top_margin'))
# mins
self.left_margin_min = Variable(str(sn + 'left_margin_min'))
self.right_margin_min = Variable(str(sn + 'right_margin_min'))
self.bottom_margin_min = Variable(str(sn + 'bottom_margin_min'))
self.top_margin_min = Variable(str(sn + 'top_margin_min'))
right, top = upper_right
left, bottom = lower_left
self.tightheight = tightheight
self.tightwidth = tightwidth
self.add_constraints()
self.children = []
self.subplotspec = None
if self.pos:
self.constrain_margins()
self.h_pad = h_pad
self.w_pad = w_pad
def constrain_margins(self):
"""
Only do this for pos. This sets a variable distance
margin between the position of the axes and the outer edge of
the axes.
Margins are variable because they change with the fogure size.
Margin minimums are set to make room for axes decorations. However,
the margins can be larger if we are mathicng the position size to
otehr axes.
"""
sol = self.solver
# left
if not sol.hasEditVariable(self.left_margin_min):
sol.addEditVariable(self.left_margin_min, 'strong')
sol.suggestValue(self.left_margin_min, 0.0001)
c = (self.left_margin == self.left - self.parent.left)
self.solver.addConstraint(c | 'required')
c = (self.left_margin >= self.left_margin_min)
self.solver.addConstraint(c | 'strong')
# right
if not sol.hasEditVariable(self.right_margin_min):
sol.addEditVariable(self.right_margin_min, 'strong')
sol.suggestValue(self.right_margin_min, 0.0001)
c = (self.right_margin == self.parent.right - self.right)
self.solver.addConstraint(c | 'required')
c = (self.right_margin >= self.right_margin_min)
self.solver.addConstraint(c | 'required')
# bottom
if not sol.hasEditVariable(self.bottom_margin_min):
sol.addEditVariable(self.bottom_margin_min, 'strong')
sol.suggestValue(self.bottom_margin_min, 0.0001)
c = (self.bottom_margin == self.bottom - self.parent.bottom)
self.solver.addConstraint(c | 'required')
c = (self.bottom_margin >= self.bottom_margin_min)
self.solver.addConstraint(c | 'required')
# top
if not sol.hasEditVariable(self.top_margin_min):
sol.addEditVariable(self.top_margin_min, 'strong')
sol.suggestValue(self.top_margin_min, 0.0001)
c = (self.top_margin == self.parent.top - self.top)
self.solver.addConstraint(c | 'required')
c = (self.top_margin >= self.top_margin_min)
self.solver.addConstraint(c | 'required')
def add_child(self, child):
self.children += [child]
def remove_child(self, child):
try:
self.children.remove(child)
except ValueError:
_log.info("Tried to remove child that doesn't belong to parent")
def add_constraints(self):
sol = self.solver
# never let width and height go negative.
for i in [self.min_width, self.min_height]:
sol.addEditVariable(i, 1e9)
sol.suggestValue(i, 0.0)
# define relation ships between things thing width and right and left
self.hard_constraints()
# self.soft_constraints()
if self.parent:
self.parent_constrain()
# sol.updateVariables()
def parent_constrain(self):
parent = self.parent
hc = [self.left >= parent.left,
self.bottom >= parent.bottom,
self.top <= parent.top,
self.right <= parent.right]
for c in hc:
self.solver.addConstraint(c | 'required')
def hard_constraints(self):
hc = [self.width == self.right - self.left,
self.height == self.top - self.bottom,
self.h_center == (self.left + self.right) * 0.5,
self.v_center == (self.top + self.bottom) * 0.5,
self.width >= self.min_width,
self.height >= self.min_height]
for c in hc:
self.solver.addConstraint(c | 'required')
def soft_constraints(self):
sol = self.solver
if self.tightwidth:
suggest = 0.
else:
suggest = 20.
c = (self.pref_width == suggest)
for i in c:
sol.addConstraint(i | 'required')
if self.tightheight:
suggest = 0.
else:
suggest = 20.
c = (self.pref_height == suggest)
for i in c:
sol.addConstraint(i | 'required')
c = [(self.width >= suggest),
(self.height >= suggest)]
for i in c:
sol.addConstraint(i | 150000)
def set_parent(self, parent):
''' replace the parent of this with the new parent
'''
self.parent = parent
self.parent_constrain()
def constrain_geometry(self, left, bottom, right, top, strength='strong'):
hc = [self.left == left,
self.right == right,
self.bottom == bottom,
self.top == top]
for c in hc:
self.solver.addConstraint((c | strength))
# self.solver.updateVariables()
def constrain_same(self, other, strength='strong'):
"""
Make the layoutbox have same position as other layoutbox
"""
hc = [self.left == other.left,
self.right == other.right,
self.bottom == other.bottom,
self.top == other.top]
for c in hc:
self.solver.addConstraint((c | strength))
def constrain_left_margin(self, margin, strength='strong'):
c = (self.left == self.parent.left + margin)
self.solver.addConstraint(c | strength)
def edit_left_margin_min(self, margin):
self.solver.suggestValue(self.left_margin_min, margin)
def constrain_right_margin(self, margin, strength='strong'):
c = (self.right == self.parent.right - margin)
self.solver.addConstraint(c | strength)
def edit_right_margin_min(self, margin):
self.solver.suggestValue(self.right_margin_min, margin)
def constrain_bottom_margin(self, margin, strength='strong'):
c = (self.bottom == self.parent.bottom + margin)
self.solver.addConstraint(c | strength)
def edit_bottom_margin_min(self, margin):
self.solver.suggestValue(self.bottom_margin_min, margin)
def constrain_top_margin(self, margin, strength='strong'):
c = (self.top == self.parent.top - margin)
self.solver.addConstraint(c | strength)
def edit_top_margin_min(self, margin):
self.solver.suggestValue(self.top_margin_min, margin)
def get_rect(self):
return (self.left.value(), self.bottom.value(),
self.width.value(), self.height.value())
def update_variables(self):
'''
Update *all* the variables that are part of the solver this LayoutBox
is created with
'''
self.solver.updateVariables()
def edit_height(self, height, strength='strong'):
'''
Set the height of the layout box.
This is done as an editable variable so that the value can change
due to resizing.
'''
sol = self.solver
for i in [self.height]:
if not sol.hasEditVariable(i):
sol.addEditVariable(i, strength)
sol.suggestValue(self.height, height)
def constrain_height(self, height, strength='strong'):
'''
Constrain the height of the layout box. height is
either a float or a layoutbox.height.
'''
c = (self.height == height)
self.solver.addConstraint(c | strength)
def constrain_height_min(self, height, strength='strong'):
c = (self.height >= height)
self.solver.addConstraint(c | strength)
def edit_width(self, width, strength='strong'):
sol = self.solver
for i in [self.width]:
if not sol.hasEditVariable(i):
sol.addEditVariable(i, strength)
sol.suggestValue(self.width, width)
def constrain_width(self, width, strength='strong'):
'''
Constrain the width of the layout box. `width` is
either a float or a layoutbox.width.
'''
c = (self.width == width)
self.solver.addConstraint(c | strength)
def constrain_width_min(self, width, strength='strong'):
c = (self.width >= width)
self.solver.addConstraint(c | strength)
def constrain_left(self, left, strength='strong'):
c = (self.left == left)
self.solver.addConstraint(c | strength)
def constrain_bottom(self, bottom, strength='strong'):
c = (self.bottom == bottom)
self.solver.addConstraint(c | strength)
def constrain_right(self, right, strength='strong'):
c = (self.right == right)
self.solver.addConstraint(c | strength)
def constrain_top(self, top, strength='strong'):
c = (self.top == top)
self.solver.addConstraint(c | strength)
def _is_subplotspec_layoutbox(self):
'''
Helper to check if this layoutbox is the layoutbox of a
subplotspec
'''
name = (self.name).split('.')[-1][:-3]
if name == 'ss':
return True
return False
def _is_gridspec_layoutbox(self):
'''
Helper to check if this layoutbox is the layoutbox of a
gridspec
'''
name = (self.name).split('.')[-1][:-3]
if name == 'gridspec':
return True
return False
def find_child_subplots(self):
'''
Find children of this layout box that are subplots. We want to line
poss up, and this is an easy way to find them all.
'''
if self.subplot:
subplots = [self]
else:
subplots = []
for child in self.children:
subplots += child.find_child_subplots()
return subplots
def layout_from_subplotspec(self, subspec,
name='', artist=None, pos=False):
''' Make a layout box from a subplotspec. The layout box is
constrained to be a fraction of the width/height of the parent,
and be a fraction of the parent width/height from the left/bottom
of the parent. Therefore the parent can move around and the
layout for the subplot spec should move with it.
The parent is *usually* the gridspec that made the subplotspec.??
'''
lb = LayoutBox(parent=self, name=name, artist=artist, pos=pos)
gs = subspec.get_gridspec()
nrows, ncols = gs.get_geometry()
parent = self.parent
# OK, now, we want to set the position of this subplotspec
# based on its subplotspec parameters. The new gridspec will inherit.
# from gridspec. prob should be new method in gridspec
left = 0.0
right = 1.0
bottom = 0.0
top = 1.0
totWidth = right-left
totHeight = top-bottom
hspace = 0.
wspace = 0.
# calculate accumulated heights of columns
cellH = totHeight / (nrows + hspace * (nrows - 1))
sepH = hspace*cellH
if gs._row_height_ratios is not None:
netHeight = cellH * nrows
tr = float(sum(gs._row_height_ratios))
cellHeights = [netHeight*r/tr for r in gs._row_height_ratios]
else:
cellHeights = [cellH] * nrows
sepHeights = [0] + ([sepH] * (nrows - 1))
cellHs = np.add.accumulate(np.ravel(
list(zip(sepHeights, cellHeights))))
# calculate accumulated widths of rows
cellW = totWidth/(ncols + wspace * (ncols - 1))
sepW = wspace*cellW
if gs._col_width_ratios is not None:
netWidth = cellW * ncols
tr = float(sum(gs._col_width_ratios))
cellWidths = [netWidth * r / tr for r in gs._col_width_ratios]
else:
cellWidths = [cellW] * ncols
sepWidths = [0] + ([sepW] * (ncols - 1))
cellWs = np.add.accumulate(np.ravel(list(zip(sepWidths, cellWidths))))
figTops = [top - cellHs[2 * rowNum] for rowNum in range(nrows)]
figBottoms = [top - cellHs[2 * rowNum + 1] for rowNum in range(nrows)]
figLefts = [left + cellWs[2 * colNum] for colNum in range(ncols)]
figRights = [left + cellWs[2 * colNum + 1] for colNum in range(ncols)]
rowNum, colNum = divmod(subspec.num1, ncols)
figBottom = figBottoms[rowNum]
figTop = figTops[rowNum]
figLeft = figLefts[colNum]
figRight = figRights[colNum]
if subspec.num2 is not None:
rowNum2, colNum2 = divmod(subspec.num2, ncols)
figBottom2 = figBottoms[rowNum2]
figTop2 = figTops[rowNum2]
figLeft2 = figLefts[colNum2]
figRight2 = figRights[colNum2]
figBottom = min(figBottom, figBottom2)
figLeft = min(figLeft, figLeft2)
figTop = max(figTop, figTop2)
figRight = max(figRight, figRight2)
# These are numbers relative to 0,0,1,1. Need to constrain
# relative to parent.
width = figRight - figLeft
height = figTop - figBottom
parent = self.parent
cs = [self.left == parent.left + parent.width * figLeft,
self.bottom == parent.bottom + parent.height * figBottom,
self.width == parent.width * width,
self.height == parent.height * height]
for c in cs:
self.solver.addConstraint((c | 'required'))
return lb
def __repr__(self):
args = (self.name, self.left.value(), self.bottom.value(),
self.right.value(), self.top.value())
return ('LayoutBox: %25s, (left: %1.3f) (bot: %1.3f) '
'(right: %1.3f) (top: %1.3f) ') % args
# Utility functions that act on layoutboxes...
def hstack(boxes, padding=0, strength='strong'):
'''
Stack LayoutBox instances from left to right.
`padding` is in figure-relative units.
'''
for i in range(1, len(boxes)):
c = (boxes[i-1].right + padding <= boxes[i].left)
boxes[i].solver.addConstraint(c | strength)
def hpack(boxes, padding=0, strength='strong'):
'''
Stack LayoutBox instances from left to right.
'''
for i in range(1, len(boxes)):
c = (boxes[i-1].right + padding == boxes[i].left)
boxes[i].solver.addConstraint(c | strength)
def vstack(boxes, padding=0, strength='strong'):
'''
Stack LayoutBox instances from top to bottom
'''
for i in range(1, len(boxes)):
c = (boxes[i-1].bottom - padding >= boxes[i].top)
boxes[i].solver.addConstraint(c | strength)
def vpack(boxes, padding=0, strength='strong'):
'''
Stack LayoutBox instances from top to bottom
'''
for i in range(1, len(boxes)):
c = (boxes[i-1].bottom - padding >= boxes[i].top)
boxes[i].solver.addConstraint(c | strength)
def match_heights(boxes, height_ratios=None, strength='medium'):
'''
Stack LayoutBox instances from top to bottom
'''
if height_ratios is None:
height_ratios = np.ones(len(boxes))
for i in range(1, len(boxes)):
c = (boxes[i-1].height ==
boxes[i].height*height_ratios[i-1]/height_ratios[i])
boxes[i].solver.addConstraint(c | strength)
def match_widths(boxes, width_ratios=None, strength='medium'):
'''
Stack LayoutBox instances from top to bottom
'''
if width_ratios is None:
width_ratios = np.ones(len(boxes))
for i in range(1, len(boxes)):
c = (boxes[i-1].width ==
boxes[i].width*width_ratios[i-1]/width_ratios[i])
boxes[i].solver.addConstraint(c | strength)
def vstackeq(boxes, padding=0, height_ratios=None):
vstack(boxes, padding=padding)
match_heights(boxes, height_ratios=height_ratios)
def hstackeq(boxes, padding=0, width_ratios=None):
hstack(boxes, padding=padding)
match_widths(boxes, width_ratios=width_ratios)
def align(boxes, attr, strength='strong'):
cons = []
for box in boxes[1:]:
cons = (getattr(boxes[0], attr) == getattr(box, attr))
boxes[0].solver.addConstraint(cons | strength)
def match_top_margins(boxes, levels=1):
box0 = boxes[0]
top0 = box0
for n in range(levels):
top0 = top0.parent
for box in boxes[1:]:
topb = box
for n in range(levels):
topb = topb.parent
c = (box0.top-top0.top == box.top-topb.top)
box0.solver.addConstraint(c | 'strong')
def match_bottom_margins(boxes, levels=1):
box0 = boxes[0]
top0 = box0
for n in range(levels):
top0 = top0.parent
for box in boxes[1:]:
topb = box
for n in range(levels):
topb = topb.parent
c = (box0.bottom-top0.bottom == box.bottom-topb.bottom)
box0.solver.addConstraint(c | 'strong')
def match_left_margins(boxes, levels=1):
box0 = boxes[0]
top0 = box0
for n in range(levels):
top0 = top0.parent
for box in boxes[1:]:
topb = box
for n in range(levels):
topb = topb.parent
c = (box0.left-top0.left == box.left-topb.left)
box0.solver.addConstraint(c | 'strong')
def match_right_margins(boxes, levels=1):
box0 = boxes[0]
top0 = box0
for n in range(levels):
top0 = top0.parent
for box in boxes[1:]:
topb = box
for n in range(levels):
topb = topb.parent
c = (box0.right-top0.right == box.right-topb.right)
box0.solver.addConstraint(c | 'strong')
def match_width_margins(boxes, levels=1):
match_left_margins(boxes, levels=levels)
match_right_margins(boxes, levels=levels)
def match_height_margins(boxes, levels=1):
match_top_margins(boxes, levels=levels)
match_bottom_margins(boxes, levels=levels)
def match_margins(boxes, levels=1):
match_width_margins(boxes, levels=levels)
match_height_margins(boxes, levels=levels)
_layoutboxobjnum = itertools.count()
def seq_id():
'''
Generate a short sequential id for layoutbox objects...
'''
global _layoutboxobjnum
return ('%03d' % (next(_layoutboxobjnum)))
def print_children(lb):
'''
Print the children of the layoutbox
'''
print(lb)
for child in lb.children:
print_children(child)
def nonetree(lb):
'''
Make all elements in this tree none... This signals not to do any more
layout.
'''
if lb is not None:
if lb.parent is None:
# Clear the solver. Hopefully this garbage collects.
lb.solver.reset()
nonechildren(lb)
else:
nonetree(lb.parent)
def nonechildren(lb):
for child in lb.children:
nonechildren(child)
lb.artist._layoutbox = None
lb = None
def print_tree(lb):
'''
Print the tree of layoutboxes
'''
if lb.parent is None:
print('LayoutBox Tree\n')
print('==============\n')
print_children(lb)
print('\n')
else:
print_tree(lb.parent)
def plot_children(fig, box, level=0, printit=True):
'''
Simple plotting to show where boxes are
'''
import matplotlib
import matplotlib.pyplot as plt
if isinstance(fig, matplotlib.figure.Figure):
ax = fig.add_axes([0., 0., 1., 1.])
ax.set_facecolor([1., 1., 1., 0.7])
ax.set_alpha(0.3)
fig.draw(fig.canvas.get_renderer())
else:
ax = fig
import matplotlib.patches as patches
colors = plt.rcParams["axes.prop_cycle"].by_key()["color"]
if printit:
print("Level:", level)
for child in box.children:
rect = child.get_rect()
if printit:
print(child)
ax.add_patch(
patches.Rectangle(
(child.left.value(), child.bottom.value()), # (x,y)
child.width.value(), # width
child.height.value(), # height
fc='none',
alpha=0.8,
ec=colors[level]
)
)
if level > 0:
name = child.name.split('.')[-1]
if level % 2 == 0:
ax.text(child.left.value(), child.bottom.value(), name,
size=12-level, color=colors[level])
else:
ax.text(child.right.value(), child.top.value(), name,
ha='right', va='top', size=12-level,
color=colors[level])
plot_children(ax, child, level=level+1, printit=printit)
| 24,574 | 32.030914 | 78 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/tri/trifinder.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from matplotlib.tri import Triangulation
import matplotlib._tri as _tri
import numpy as np
class TriFinder(object):
"""
Abstract base class for classes used to find the triangles of a
Triangulation in which (x,y) points lie.
Rather than instantiate an object of a class derived from TriFinder, it is
usually better to use the function
:func:`matplotlib.tri.Triangulation.get_trifinder`.
Derived classes implement __call__(x,y) where x,y are array_like point
coordinates of the same shape.
"""
def __init__(self, triangulation):
if not isinstance(triangulation, Triangulation):
raise ValueError('Expected a Triangulation object')
self._triangulation = triangulation
class TrapezoidMapTriFinder(TriFinder):
"""
:class:`~matplotlib.tri.TriFinder` class implemented using the trapezoid
map algorithm from the book "Computational Geometry, Algorithms and
Applications", second edition, by M. de Berg, M. van Kreveld, M. Overmars
and O. Schwarzkopf.
The triangulation must be valid, i.e. it must not have duplicate points,
triangles formed from colinear points, or overlapping triangles. The
algorithm has some tolerance to triangles formed from colinear points, but
this should not be relied upon.
"""
def __init__(self, triangulation):
TriFinder.__init__(self, triangulation)
self._cpp_trifinder = _tri.TrapezoidMapTriFinder(
triangulation.get_cpp_triangulation())
self._initialize()
def __call__(self, x, y):
"""
Return an array containing the indices of the triangles in which the
specified x,y points lie, or -1 for points that do not lie within a
triangle.
*x*, *y* are array_like x and y coordinates of the same shape and any
number of dimensions.
Returns integer array with the same shape and *x* and *y*.
"""
x = np.asarray(x, dtype=np.float64)
y = np.asarray(y, dtype=np.float64)
if x.shape != y.shape:
raise ValueError("x and y must be array-like with the same shape")
# C++ does the heavy lifting, and expects 1D arrays.
indices = (self._cpp_trifinder.find_many(x.ravel(), y.ravel())
.reshape(x.shape))
return indices
def _get_tree_stats(self):
"""
Return a python list containing the statistics about the node tree:
0: number of nodes (tree size)
1: number of unique nodes
2: number of trapezoids (tree leaf nodes)
3: number of unique trapezoids
4: maximum parent count (max number of times a node is repeated in
tree)
5: maximum depth of tree (one more than the maximum number of
comparisons needed to search through the tree)
6: mean of all trapezoid depths (one more than the average number
of comparisons needed to search through the tree)
"""
return self._cpp_trifinder.get_tree_stats()
def _initialize(self):
"""
Initialize the underlying C++ object. Can be called multiple times if,
for example, the triangulation is modified.
"""
self._cpp_trifinder.initialize()
def _print_tree(self):
"""
Print a text representation of the node tree, which is useful for
debugging purposes.
"""
self._cpp_trifinder.print_tree()
| 3,626 | 36.391753 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/tri/tricontour.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from matplotlib.contour import ContourSet
from matplotlib.tri.triangulation import Triangulation
import matplotlib._tri as _tri
import numpy as np
class TriContourSet(ContourSet):
"""
Create and store a set of contour lines or filled regions for
a triangular grid.
User-callable method: clabel
Useful attributes:
ax:
the axes object in which the contours are drawn
collections:
a silent_list of LineCollections or PolyCollections
levels:
contour levels
layers:
same as levels for line contours; half-way between
levels for filled contours. See _process_colors method.
"""
def __init__(self, ax, *args, **kwargs):
"""
Draw triangular grid contour lines or filled regions,
depending on whether keyword arg 'filled' is False
(default) or True.
The first argument of the initializer must be an axes
object. The remaining arguments and keyword arguments
are described in the docstring of `tricontour`.
"""
ContourSet.__init__(self, ax, *args, **kwargs)
def _process_args(self, *args, **kwargs):
"""
Process args and kwargs.
"""
if isinstance(args[0], TriContourSet):
C = args[0].cppContourGenerator
if self.levels is None:
self.levels = args[0].levels
else:
tri, z = self._contour_args(args, kwargs)
C = _tri.TriContourGenerator(tri.get_cpp_triangulation(), z)
self._mins = [tri.x.min(), tri.y.min()]
self._maxs = [tri.x.max(), tri.y.max()]
self.cppContourGenerator = C
return kwargs
def _get_allsegs_and_allkinds(self):
"""
Create and return allsegs and allkinds by calling underlying C code.
"""
allsegs = []
if self.filled:
lowers, uppers = self._get_lowers_and_uppers()
allkinds = []
for lower, upper in zip(lowers, uppers):
segs, kinds = self.cppContourGenerator.create_filled_contour(
lower, upper)
allsegs.append([segs])
allkinds.append([kinds])
else:
allkinds = None
for level in self.levels:
segs = self.cppContourGenerator.create_contour(level)
allsegs.append(segs)
return allsegs, allkinds
def _contour_args(self, args, kwargs):
if self.filled:
fn = 'contourf'
else:
fn = 'contour'
tri, args, kwargs = Triangulation.get_from_args_and_kwargs(*args,
**kwargs)
z = np.asarray(args[0])
if z.shape != tri.x.shape:
raise ValueError('z array must have same length as triangulation x'
' and y arrays')
self.zmax = z.max()
self.zmin = z.min()
if self.logscale and self.zmin <= 0:
raise ValueError('Cannot %s log of negative values.' % fn)
self._contour_level_args(z, args[1:])
return (tri, z)
def tricontour(ax, *args, **kwargs):
"""
Draw contours on an unstructured triangular grid.
:func:`~matplotlib.pyplot.tricontour` and
:func:`~matplotlib.pyplot.tricontourf` draw contour lines and
filled contours, respectively. Except as noted, function
signatures and return values are the same for both versions.
The triangulation can be specified in one of two ways; either::
tricontour(triangulation, ...)
where triangulation is a :class:`matplotlib.tri.Triangulation`
object, or
::
tricontour(x, y, ...)
tricontour(x, y, triangles, ...)
tricontour(x, y, triangles=triangles, ...)
tricontour(x, y, mask=mask, ...)
tricontour(x, y, triangles, mask=mask, ...)
in which case a Triangulation object will be created. See
:class:`~matplotlib.tri.Triangulation` for a explanation of
these possibilities.
The remaining arguments may be::
tricontour(..., Z)
where *Z* is the array of values to contour, one per point
in the triangulation. The level values are chosen
automatically.
::
tricontour(..., Z, N)
contour up to *N+1* automatically chosen contour levels
(*N* intervals).
::
tricontour(..., Z, V)
draw contour lines at the values specified in sequence *V*,
which must be in increasing order.
::
tricontourf(..., Z, V)
fill the (len(*V*)-1) regions between the values in *V*,
which must be in increasing order.
::
tricontour(Z, **kwargs)
Use keyword args to control colors, linewidth, origin, cmap ... see
below for more details.
``C = tricontour(...)`` returns a
:class:`~matplotlib.contour.TriContourSet` object.
Optional keyword arguments:
*colors*: [ *None* | string | (mpl_colors) ]
If *None*, the colormap specified by cmap will be used.
If a string, like 'r' or 'red', all levels will be plotted in this
color.
If a tuple of matplotlib color args (string, float, rgb, etc),
different levels will be plotted in different colors in the order
specified.
*alpha*: float
The alpha blending value
*cmap*: [ *None* | Colormap ]
A cm :class:`~matplotlib.colors.Colormap` instance or
*None*. If *cmap* is *None* and *colors* is *None*, a
default Colormap is used.
*norm*: [ *None* | Normalize ]
A :class:`matplotlib.colors.Normalize` instance for
scaling data values to colors. If *norm* is *None* and
*colors* is *None*, the default linear scaling is used.
*levels* [level0, level1, ..., leveln]
A list of floating point numbers indicating the level
curves to draw, in increasing order; e.g., to draw just
the zero contour pass ``levels=[0]``
*origin*: [ *None* | 'upper' | 'lower' | 'image' ]
If *None*, the first value of *Z* will correspond to the
lower left corner, location (0,0). If 'image', the rc
value for ``image.origin`` will be used.
This keyword is not active if *X* and *Y* are specified in
the call to contour.
*extent*: [ *None* | (x0,x1,y0,y1) ]
If *origin* is not *None*, then *extent* is interpreted as
in :func:`matplotlib.pyplot.imshow`: it gives the outer
pixel boundaries. In this case, the position of Z[0,0]
is the center of the pixel, not a corner. If *origin* is
*None*, then (*x0*, *y0*) is the position of Z[0,0], and
(*x1*, *y1*) is the position of Z[-1,-1].
This keyword is not active if *X* and *Y* are specified in
the call to contour.
*locator*: [ *None* | ticker.Locator subclass ]
If *locator* is None, the default
:class:`~matplotlib.ticker.MaxNLocator` is used. The
locator is used to determine the contour levels if they
are not given explicitly via the *V* argument.
*extend*: [ 'neither' | 'both' | 'min' | 'max' ]
Unless this is 'neither', contour levels are automatically
added to one or both ends of the range so that all data
are included. These added ranges are then mapped to the
special colormap values which default to the ends of the
colormap range, but can be set via
:meth:`matplotlib.colors.Colormap.set_under` and
:meth:`matplotlib.colors.Colormap.set_over` methods.
*xunits*, *yunits*: [ *None* | registered units ]
Override axis units by specifying an instance of a
:class:`matplotlib.units.ConversionInterface`.
tricontour-only keyword arguments:
*linewidths*: [ *None* | number | tuple of numbers ]
If *linewidths* is *None*, the default width in
``lines.linewidth`` in ``matplotlibrc`` is used.
If a number, all levels will be plotted with this linewidth.
If a tuple, different levels will be plotted with different
linewidths in the order specified
*linestyles*: [ *None* | 'solid' | 'dashed' | 'dashdot' | 'dotted' ]
If *linestyles* is *None*, the 'solid' is used.
*linestyles* can also be an iterable of the above strings
specifying a set of linestyles to be used. If this
iterable is shorter than the number of contour levels
it will be repeated as necessary.
If contour is using a monochrome colormap and the contour
level is less than 0, then the linestyle specified
in ``contour.negative_linestyle`` in ``matplotlibrc``
will be used.
tricontourf-only keyword arguments:
*antialiased*: bool
enable antialiasing
Note: tricontourf fills intervals that are closed at the top; that
is, for boundaries *z1* and *z2*, the filled region is::
z1 < z <= z2
There is one exception: if the lowest boundary coincides with
the minimum value of the *z* array, then that minimum value
will be included in the lowest interval.
"""
if not ax._hold:
ax.cla()
kwargs['filled'] = False
return TriContourSet(ax, *args, **kwargs)
def tricontourf(ax, *args, **kwargs):
if not ax._hold:
ax.cla()
kwargs['filled'] = True
return TriContourSet(ax, *args, **kwargs)
tricontourf.__doc__ = tricontour.__doc__
| 9,640 | 32.947183 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/tri/triplot.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import numpy as np
from matplotlib.tri.triangulation import Triangulation
def triplot(ax, *args, **kwargs):
"""
Draw a unstructured triangular grid as lines and/or markers.
The triangulation to plot can be specified in one of two ways;
either::
triplot(triangulation, ...)
where triangulation is a :class:`matplotlib.tri.Triangulation`
object, or
::
triplot(x, y, ...)
triplot(x, y, triangles, ...)
triplot(x, y, triangles=triangles, ...)
triplot(x, y, mask=mask, ...)
triplot(x, y, triangles, mask=mask, ...)
in which case a Triangulation object will be created. See
:class:`~matplotlib.tri.Triangulation` for a explanation of these
possibilities.
The remaining args and kwargs are the same as for
:meth:`~matplotlib.axes.Axes.plot`.
Return a list of 2 :class:`~matplotlib.lines.Line2D` containing
respectively:
- the lines plotted for triangles edges
- the markers plotted for triangles nodes
"""
import matplotlib.axes
tri, args, kwargs = Triangulation.get_from_args_and_kwargs(*args, **kwargs)
x, y, edges = (tri.x, tri.y, tri.edges)
# Decode plot format string, e.g., 'ro-'
fmt = ""
if len(args) > 0:
fmt = args[0]
linestyle, marker, color = matplotlib.axes._base._process_plot_format(fmt)
# Insert plot format string into a copy of kwargs (kwargs values prevail).
kw = kwargs.copy()
for key, val in zip(('linestyle', 'marker', 'color'),
(linestyle, marker, color)):
if val is not None:
kw[key] = kwargs.get(key, val)
# Draw lines without markers.
# Note 1: If we drew markers here, most markers would be drawn more than
# once as they belong to several edges.
# Note 2: We insert nan values in the flattened edges arrays rather than
# plotting directly (triang.x[edges].T, triang.y[edges].T)
# as it considerably speeds-up code execution.
linestyle = kw['linestyle']
kw_lines = kw.copy()
kw_lines['marker'] = 'None' # No marker to draw.
kw_lines['zorder'] = kw.get('zorder', 1) # Path default zorder is used.
if (linestyle is not None) and (linestyle not in ['None', '', ' ']):
tri_lines_x = np.insert(x[edges], 2, np.nan, axis=1)
tri_lines_y = np.insert(y[edges], 2, np.nan, axis=1)
tri_lines = ax.plot(tri_lines_x.ravel(), tri_lines_y.ravel(),
**kw_lines)
else:
tri_lines = ax.plot([], [], **kw_lines)
# Draw markers separately.
marker = kw['marker']
kw_markers = kw.copy()
kw_markers['linestyle'] = 'None' # No line to draw.
if (marker is not None) and (marker not in ['None', '', ' ']):
tri_markers = ax.plot(x, y, **kw_markers)
else:
tri_markers = ax.plot([], [], **kw_markers)
return tri_lines + tri_markers
| 3,043 | 33.202247 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/tri/trirefine.py
|
"""
Mesh refinement for triangular grids.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import numpy as np
from matplotlib.tri.triangulation import Triangulation
import matplotlib.tri.triinterpolate
class TriRefiner(object):
"""
Abstract base class for classes implementing mesh refinement.
A TriRefiner encapsulates a Triangulation object and provides tools for
mesh refinement and interpolation.
Derived classes must implements:
- ``refine_triangulation(return_tri_index=False, **kwargs)`` , where
the optional keyword arguments *kwargs* are defined in each
TriRefiner concrete implementation, and which returns :
- a refined triangulation
- optionally (depending on *return_tri_index*), for each
point of the refined triangulation: the index of
the initial triangulation triangle to which it belongs.
- ``refine_field(z, triinterpolator=None, **kwargs)`` , where:
- *z* array of field values (to refine) defined at the base
triangulation nodes
- *triinterpolator* is a
:class:`~matplotlib.tri.TriInterpolator` (optional)
- the other optional keyword arguments *kwargs* are defined in
each TriRefiner concrete implementation
and which returns (as a tuple) a refined triangular mesh and the
interpolated values of the field at the refined triangulation nodes.
"""
def __init__(self, triangulation):
if not isinstance(triangulation, Triangulation):
raise ValueError("Expected a Triangulation object")
self._triangulation = triangulation
class UniformTriRefiner(TriRefiner):
"""
Uniform mesh refinement by recursive subdivisions.
Parameters
----------
triangulation : :class:`~matplotlib.tri.Triangulation`
The encapsulated triangulation (to be refined)
"""
# See Also
# --------
# :class:`~matplotlib.tri.CubicTriInterpolator` and
# :class:`~matplotlib.tri.TriAnalyzer`.
# """
def __init__(self, triangulation):
TriRefiner.__init__(self, triangulation)
def refine_triangulation(self, return_tri_index=False, subdiv=3):
"""
Computes an uniformly refined triangulation *refi_triangulation* of
the encapsulated :attr:`triangulation`.
This function refines the encapsulated triangulation by splitting each
father triangle into 4 child sub-triangles built on the edges midside
nodes, recursively (level of recursion *subdiv*).
In the end, each triangle is hence divided into ``4**subdiv``
child triangles.
The default value for *subdiv* is 3 resulting in 64 refined
subtriangles for each triangle of the initial triangulation.
Parameters
----------
return_tri_index : boolean, optional
Boolean indicating whether an index table indicating the father
triangle index of each point will be returned. Default value
False.
subdiv : integer, optional
Recursion level for the subdivision. Defaults value 3.
Each triangle will be divided into ``4**subdiv`` child triangles.
Returns
-------
refi_triangulation : :class:`~matplotlib.tri.Triangulation`
The returned refined triangulation
found_index : array-like of integers
Index of the initial triangulation containing triangle, for each
point of *refi_triangulation*.
Returned only if *return_tri_index* is set to True.
"""
refi_triangulation = self._triangulation
ntri = refi_triangulation.triangles.shape[0]
# Computes the triangulation ancestors numbers in the reference
# triangulation.
ancestors = np.arange(ntri, dtype=np.int32)
for _ in range(subdiv):
refi_triangulation, ancestors = self._refine_triangulation_once(
refi_triangulation, ancestors)
refi_npts = refi_triangulation.x.shape[0]
refi_triangles = refi_triangulation.triangles
# Now we compute found_index table if needed
if return_tri_index:
# We have to initialize found_index with -1 because some nodes
# may very well belong to no triangle at all, e.g., in case of
# Delaunay Triangulation with DuplicatePointWarning.
found_index = - np.ones(refi_npts, dtype=np.int32)
tri_mask = self._triangulation.mask
if tri_mask is None:
found_index[refi_triangles] = np.repeat(ancestors,
3).reshape(-1, 3)
else:
# There is a subtlety here: we want to avoid whenever possible
# that refined points container is a masked triangle (which
# would result in artifacts in plots).
# So we impose the numbering from masked ancestors first,
# then overwrite it with unmasked ancestor numbers.
ancestor_mask = tri_mask[ancestors]
found_index[refi_triangles[ancestor_mask, :]
] = np.repeat(ancestors[ancestor_mask],
3).reshape(-1, 3)
found_index[refi_triangles[~ancestor_mask, :]
] = np.repeat(ancestors[~ancestor_mask],
3).reshape(-1, 3)
return refi_triangulation, found_index
else:
return refi_triangulation
def refine_field(self, z, triinterpolator=None, subdiv=3):
"""
Refines a field defined on the encapsulated triangulation.
Returns *refi_tri* (refined triangulation), *refi_z* (interpolated
values of the field at the node of the refined triangulation).
Parameters
----------
z : 1d-array-like of length ``n_points``
Values of the field to refine, defined at the nodes of the
encapsulated triangulation. (``n_points`` is the number of points
in the initial triangulation)
triinterpolator : :class:`~matplotlib.tri.TriInterpolator`, optional
Interpolator used for field interpolation. If not specified,
a :class:`~matplotlib.tri.CubicTriInterpolator` will
be used.
subdiv : integer, optional
Recursion level for the subdivision. Defaults to 3.
Each triangle will be divided into ``4**subdiv`` child triangles.
Returns
-------
refi_tri : :class:`~matplotlib.tri.Triangulation` object
The returned refined triangulation
refi_z : 1d array of length: *refi_tri* node count.
The returned interpolated field (at *refi_tri* nodes)
"""
if triinterpolator is None:
interp = matplotlib.tri.CubicTriInterpolator(
self._triangulation, z)
else:
if not isinstance(triinterpolator,
matplotlib.tri.TriInterpolator):
raise ValueError("Expected a TriInterpolator object")
interp = triinterpolator
refi_tri, found_index = self.refine_triangulation(
subdiv=subdiv, return_tri_index=True)
refi_z = interp._interpolate_multikeys(
refi_tri.x, refi_tri.y, tri_index=found_index)[0]
return refi_tri, refi_z
@staticmethod
def _refine_triangulation_once(triangulation, ancestors=None):
"""
This function refines a matplotlib.tri *triangulation* by splitting
each triangle into 4 child-masked_triangles built on the edges midside
nodes.
The masked triangles, if present, are also split but their children
returned masked.
If *ancestors* is not provided, returns only a new triangulation:
child_triangulation.
If the array-like key table *ancestor* is given, it shall be of shape
(ntri,) where ntri is the number of *triangulation* masked_triangles.
In this case, the function returns
(child_triangulation, child_ancestors)
child_ancestors is defined so that the 4 child masked_triangles share
the same index as their father: child_ancestors.shape = (4 * ntri,).
"""
x = triangulation.x
y = triangulation.y
# According to tri.triangulation doc:
# neighbors[i,j] is the triangle that is the neighbor
# to the edge from point index masked_triangles[i,j] to point
# index masked_triangles[i,(j+1)%3].
neighbors = triangulation.neighbors
triangles = triangulation.triangles
npts = np.shape(x)[0]
ntri = np.shape(triangles)[0]
if ancestors is not None:
ancestors = np.asarray(ancestors)
if np.shape(ancestors) != (ntri,):
raise ValueError(
"Incompatible shapes provide for triangulation"
".masked_triangles and ancestors: {0} and {1}".format(
np.shape(triangles), np.shape(ancestors)))
# Initiating tables refi_x and refi_y of the refined triangulation
# points
# hint: each apex is shared by 2 masked_triangles except the borders.
borders = np.sum(neighbors == -1)
added_pts = (3*ntri + borders) // 2
refi_npts = npts + added_pts
refi_x = np.zeros(refi_npts)
refi_y = np.zeros(refi_npts)
# First part of refi_x, refi_y is just the initial points
refi_x[:npts] = x
refi_y[:npts] = y
# Second part contains the edge midside nodes.
# Each edge belongs to 1 triangle (if border edge) or is shared by 2
# masked_triangles (interior edge).
# We first build 2 * ntri arrays of edge starting nodes (edge_elems,
# edge_apexes) ; we then extract only the masters to avoid overlaps.
# The so-called 'master' is the triangle with biggest index
# The 'slave' is the triangle with lower index
# (can be -1 if border edge)
# For slave and master we will identify the apex pointing to the edge
# start
edge_elems = np.ravel(np.vstack([np.arange(ntri, dtype=np.int32),
np.arange(ntri, dtype=np.int32),
np.arange(ntri, dtype=np.int32)]))
edge_apexes = np.ravel(np.vstack([np.zeros(ntri, dtype=np.int32),
np.ones(ntri, dtype=np.int32),
np.ones(ntri, dtype=np.int32)*2]))
edge_neighbors = neighbors[edge_elems, edge_apexes]
mask_masters = (edge_elems > edge_neighbors)
# Identifying the "masters" and adding to refi_x, refi_y vec
masters = edge_elems[mask_masters]
apex_masters = edge_apexes[mask_masters]
x_add = (x[triangles[masters, apex_masters]] +
x[triangles[masters, (apex_masters+1) % 3]]) * 0.5
y_add = (y[triangles[masters, apex_masters]] +
y[triangles[masters, (apex_masters+1) % 3]]) * 0.5
refi_x[npts:] = x_add
refi_y[npts:] = y_add
# Building the new masked_triangles ; each old masked_triangles hosts
# 4 new masked_triangles
# there are 6 pts to identify per 'old' triangle, 3 new_pt_corner and
# 3 new_pt_midside
new_pt_corner = triangles
# What is the index in refi_x, refi_y of point at middle of apex iapex
# of elem ielem ?
# If ielem is the apex master: simple count, given the way refi_x was
# built.
# If ielem is the apex slave: yet we do not know ; but we will soon
# using the neighbors table.
new_pt_midside = np.empty([ntri, 3], dtype=np.int32)
cum_sum = npts
for imid in range(3):
mask_st_loc = (imid == apex_masters)
n_masters_loc = np.sum(mask_st_loc)
elem_masters_loc = masters[mask_st_loc]
new_pt_midside[:, imid][elem_masters_loc] = np.arange(
n_masters_loc, dtype=np.int32) + cum_sum
cum_sum += n_masters_loc
# Now dealing with slave elems.
# for each slave element we identify the master and then the inode
# once slave_masters is identified, slave_masters_apex is such that:
# neighbors[slaves_masters, slave_masters_apex] == slaves
mask_slaves = np.logical_not(mask_masters)
slaves = edge_elems[mask_slaves]
slaves_masters = edge_neighbors[mask_slaves]
diff_table = np.abs(neighbors[slaves_masters, :] -
np.outer(slaves, np.ones(3, dtype=np.int32)))
slave_masters_apex = np.argmin(diff_table, axis=1)
slaves_apex = edge_apexes[mask_slaves]
new_pt_midside[slaves, slaves_apex] = new_pt_midside[
slaves_masters, slave_masters_apex]
# Builds the 4 child masked_triangles
child_triangles = np.empty([ntri*4, 3], dtype=np.int32)
child_triangles[0::4, :] = np.vstack([
new_pt_corner[:, 0], new_pt_midside[:, 0],
new_pt_midside[:, 2]]).T
child_triangles[1::4, :] = np.vstack([
new_pt_corner[:, 1], new_pt_midside[:, 1],
new_pt_midside[:, 0]]).T
child_triangles[2::4, :] = np.vstack([
new_pt_corner[:, 2], new_pt_midside[:, 2],
new_pt_midside[:, 1]]).T
child_triangles[3::4, :] = np.vstack([
new_pt_midside[:, 0], new_pt_midside[:, 1],
new_pt_midside[:, 2]]).T
child_triangulation = Triangulation(refi_x, refi_y, child_triangles)
# Builds the child mask
if triangulation.mask is not None:
child_triangulation.set_mask(np.repeat(triangulation.mask, 4))
if ancestors is None:
return child_triangulation
else:
return child_triangulation, np.repeat(ancestors, 4)
| 14,266 | 43.033951 | 78 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/tri/triinterpolate.py
|
"""
Interpolation inside triangular grids.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange
from matplotlib.tri import Triangulation
from matplotlib.tri.trifinder import TriFinder
from matplotlib.tri.tritools import TriAnalyzer
import numpy as np
import warnings
__all__ = ('TriInterpolator', 'LinearTriInterpolator', 'CubicTriInterpolator')
class TriInterpolator(object):
"""
Abstract base class for classes used to perform interpolation on
triangular grids.
Derived classes implement the following methods:
- ``__call__(x, y)`` ,
where x, y are array_like point coordinates of the same shape, and
that returns a masked array of the same shape containing the
interpolated z-values.
- ``gradient(x, y)`` ,
where x, y are array_like point coordinates of the same
shape, and that returns a list of 2 masked arrays of the same shape
containing the 2 derivatives of the interpolator (derivatives of
interpolated z values with respect to x and y).
"""
def __init__(self, triangulation, z, trifinder=None):
if not isinstance(triangulation, Triangulation):
raise ValueError("Expected a Triangulation object")
self._triangulation = triangulation
self._z = np.asarray(z)
if self._z.shape != self._triangulation.x.shape:
raise ValueError("z array must have same length as triangulation x"
" and y arrays")
if trifinder is not None and not isinstance(trifinder, TriFinder):
raise ValueError("Expected a TriFinder object")
self._trifinder = trifinder or self._triangulation.get_trifinder()
# Default scaling factors : 1.0 (= no scaling)
# Scaling may be used for interpolations for which the order of
# magnitude of x, y has an impact on the interpolant definition.
# Please refer to :meth:`_interpolate_multikeys` for details.
self._unit_x = 1.0
self._unit_y = 1.0
# Default triangle renumbering: None (= no renumbering)
# Renumbering may be used to avoid unnecessary computations
# if complex calculations are done inside the Interpolator.
# Please refer to :meth:`_interpolate_multikeys` for details.
self._tri_renum = None
# __call__ and gradient docstrings are shared by all subclasses
# (except, if needed, relevant additions).
# However these methods are only implemented in subclasses to avoid
# confusion in the documentation.
_docstring__call__ = """
Returns a masked array containing interpolated values at the specified
x,y points.
Parameters
----------
x, y : array-like
x and y coordinates of the same shape and any number of
dimensions.
Returns
-------
z : np.ma.array
Masked array of the same shape as *x* and *y* ; values
corresponding to (*x*, *y*) points outside of the triangulation
are masked out.
"""
_docstringgradient = """
Returns a list of 2 masked arrays containing interpolated derivatives
at the specified x,y points.
Parameters
----------
x, y : array-like
x and y coordinates of the same shape and any number of
dimensions.
Returns
-------
dzdx, dzdy : np.ma.array
2 masked arrays of the same shape as *x* and *y* ; values
corresponding to (x,y) points outside of the triangulation
are masked out.
The first returned array contains the values of
:math:`\\frac{\\partial z}{\\partial x}` and the second those of
:math:`\\frac{\\partial z}{\\partial y}`.
"""
def _interpolate_multikeys(self, x, y, tri_index=None,
return_keys=('z',)):
"""
Versatile (private) method defined for all TriInterpolators.
:meth:`_interpolate_multikeys` is a wrapper around method
:meth:`_interpolate_single_key` (to be defined in the child
subclasses).
:meth:`_interpolate_single_key actually performs the interpolation,
but only for 1-dimensional inputs and at valid locations (inside
unmasked triangles of the triangulation).
The purpose of :meth:`_interpolate_multikeys` is to implement the
following common tasks needed in all subclasses implementations:
- calculation of containing triangles
- dealing with more than one interpolation request at the same
location (e.g., if the 2 derivatives are requested, it is
unnecessary to compute the containing triangles twice)
- scaling according to self._unit_x, self._unit_y
- dealing with points outside of the grid (with fill value np.nan)
- dealing with multi-dimensionnal *x*, *y* arrays: flattening for
:meth:`_interpolate_params` call and final reshaping.
(Note that np.vectorize could do most of those things very well for
you, but it does it by function evaluations over successive tuples of
the input arrays. Therefore, this tends to be more time consuming than
using optimized numpy functions - e.g., np.dot - which can be used
easily on the flattened inputs, in the child-subclass methods
:meth:`_interpolate_single_key`.)
It is guaranteed that the calls to :meth:`_interpolate_single_key`
will be done with flattened (1-d) array_like input parameters `x`, `y`
and with flattened, valid `tri_index` arrays (no -1 index allowed).
Parameters
----------
x, y : array_like
x and y coordinates indicating where interpolated values are
requested.
tri_index : integer array_like, optional
Array of the containing triangle indices, same shape as
*x* and *y*. Defaults to None. If None, these indices
will be computed by a TriFinder instance.
(Note: For point outside the grid, tri_index[ipt] shall be -1).
return_keys : tuple of keys from {'z', 'dzdx', 'dzdy'}
Defines the interpolation arrays to return, and in which order.
Returns
-------
ret : list of arrays
Each array-like contains the expected interpolated values in the
order defined by *return_keys* parameter.
"""
# Flattening and rescaling inputs arrays x, y
# (initial shape is stored for output)
x = np.asarray(x, dtype=np.float64)
y = np.asarray(y, dtype=np.float64)
sh_ret = x.shape
if x.shape != y.shape:
raise ValueError("x and y shall have same shapes."
" Given: {0} and {1}".format(x.shape, y.shape))
x = np.ravel(x)
y = np.ravel(y)
x_scaled = x/self._unit_x
y_scaled = y/self._unit_y
size_ret = np.size(x_scaled)
# Computes & ravels the element indexes, extract the valid ones.
if tri_index is None:
tri_index = self._trifinder(x, y)
else:
if (tri_index.shape != sh_ret):
raise ValueError(
"tri_index array is provided and shall"
" have same shape as x and y. Given: "
"{0} and {1}".format(tri_index.shape, sh_ret))
tri_index = np.ravel(tri_index)
mask_in = (tri_index != -1)
if self._tri_renum is None:
valid_tri_index = tri_index[mask_in]
else:
valid_tri_index = self._tri_renum[tri_index[mask_in]]
valid_x = x_scaled[mask_in]
valid_y = y_scaled[mask_in]
ret = []
for return_key in return_keys:
# Find the return index associated with the key.
try:
return_index = {'z': 0, 'dzdx': 1, 'dzdy': 2}[return_key]
except KeyError:
raise ValueError("return_keys items shall take values in"
" {'z', 'dzdx', 'dzdy'}")
# Sets the scale factor for f & df components
scale = [1., 1./self._unit_x, 1./self._unit_y][return_index]
# Computes the interpolation
ret_loc = np.empty(size_ret, dtype=np.float64)
ret_loc[~mask_in] = np.nan
ret_loc[mask_in] = self._interpolate_single_key(
return_key, valid_tri_index, valid_x, valid_y) * scale
ret += [np.ma.masked_invalid(ret_loc.reshape(sh_ret), copy=False)]
return ret
def _interpolate_single_key(self, return_key, tri_index, x, y):
"""
Performs the interpolation at points belonging to the triangulation
(inside an unmasked triangles).
Parameters
----------
return_index : string key from {'z', 'dzdx', 'dzdy'}
Identifies the requested values (z or its derivatives)
tri_index : 1d integer array
Valid triangle index (-1 prohibited)
x, y : 1d arrays, same shape as `tri_index`
Valid locations where interpolation is requested.
Returns
-------
ret : 1-d array
Returned array of the same size as *tri_index*
"""
raise NotImplementedError("TriInterpolator subclasses" +
"should implement _interpolate_single_key!")
class LinearTriInterpolator(TriInterpolator):
"""
A LinearTriInterpolator performs linear interpolation on a triangular grid.
Each triangle is represented by a plane so that an interpolated value at
point (x,y) lies on the plane of the triangle containing (x,y).
Interpolated values are therefore continuous across the triangulation, but
their first derivatives are discontinuous at edges between triangles.
Parameters
----------
triangulation : :class:`~matplotlib.tri.Triangulation` object
The triangulation to interpolate over.
z : array_like of shape (npoints,)
Array of values, defined at grid points, to interpolate between.
trifinder : :class:`~matplotlib.tri.TriFinder` object, optional
If this is not specified, the Triangulation's default TriFinder will
be used by calling
:func:`matplotlib.tri.Triangulation.get_trifinder`.
Methods
-------
`__call__` (x, y) : Returns interpolated values at x,y points
`gradient` (x, y) : Returns interpolated derivatives at x,y points
"""
def __init__(self, triangulation, z, trifinder=None):
TriInterpolator.__init__(self, triangulation, z, trifinder)
# Store plane coefficients for fast interpolation calculations.
self._plane_coefficients = \
self._triangulation.calculate_plane_coefficients(self._z)
def __call__(self, x, y):
return self._interpolate_multikeys(x, y, tri_index=None,
return_keys=('z',))[0]
__call__.__doc__ = TriInterpolator._docstring__call__
def gradient(self, x, y):
return self._interpolate_multikeys(x, y, tri_index=None,
return_keys=('dzdx', 'dzdy'))
gradient.__doc__ = TriInterpolator._docstringgradient
def _interpolate_single_key(self, return_key, tri_index, x, y):
if return_key == 'z':
return (self._plane_coefficients[tri_index, 0]*x +
self._plane_coefficients[tri_index, 1]*y +
self._plane_coefficients[tri_index, 2])
elif return_key == 'dzdx':
return self._plane_coefficients[tri_index, 0]
elif return_key == 'dzdy':
return self._plane_coefficients[tri_index, 1]
else:
raise ValueError("Invalid return_key: " + return_key)
class CubicTriInterpolator(TriInterpolator):
"""
A CubicTriInterpolator performs cubic interpolation on triangular grids.
In one-dimension - on a segment - a cubic interpolating function is
defined by the values of the function and its derivative at both ends.
This is almost the same in 2-d inside a triangle, except that the values
of the function and its 2 derivatives have to be defined at each triangle
node.
The CubicTriInterpolator takes the value of the function at each node -
provided by the user - and internally computes the value of the
derivatives, resulting in a smooth interpolation.
(As a special feature, the user can also impose the value of the
derivatives at each node, but this is not supposed to be the common
usage.)
Parameters
----------
triangulation : :class:`~matplotlib.tri.Triangulation` object
The triangulation to interpolate over.
z : array_like of shape (npoints,)
Array of values, defined at grid points, to interpolate between.
kind : {'min_E', 'geom', 'user'}, optional
Choice of the smoothing algorithm, in order to compute
the interpolant derivatives (defaults to 'min_E'):
- if 'min_E': (default) The derivatives at each node is computed
to minimize a bending energy.
- if 'geom': The derivatives at each node is computed as a
weighted average of relevant triangle normals. To be used for
speed optimization (large grids).
- if 'user': The user provides the argument `dz`, no computation
is hence needed.
trifinder : :class:`~matplotlib.tri.TriFinder` object, optional
If not specified, the Triangulation's default TriFinder will
be used by calling
:func:`matplotlib.tri.Triangulation.get_trifinder`.
dz : tuple of array_likes (dzdx, dzdy), optional
Used only if *kind* ='user'. In this case *dz* must be provided as
(dzdx, dzdy) where dzdx, dzdy are arrays of the same shape as *z* and
are the interpolant first derivatives at the *triangulation* points.
Methods
-------
`__call__` (x, y) : Returns interpolated values at x,y points
`gradient` (x, y) : Returns interpolated derivatives at x,y points
Notes
-----
This note is a bit technical and details the way a
:class:`~matplotlib.tri.CubicTriInterpolator` computes a cubic
interpolation.
The interpolation is based on a Clough-Tocher subdivision scheme of
the *triangulation* mesh (to make it clearer, each triangle of the
grid will be divided in 3 child-triangles, and on each child triangle
the interpolated function is a cubic polynomial of the 2 coordinates).
This technique originates from FEM (Finite Element Method) analysis;
the element used is a reduced Hsieh-Clough-Tocher (HCT)
element. Its shape functions are described in [1]_.
The assembled function is guaranteed to be C1-smooth, i.e. it is
continuous and its first derivatives are also continuous (this
is easy to show inside the triangles but is also true when crossing the
edges).
In the default case (*kind* ='min_E'), the interpolant minimizes a
curvature energy on the functional space generated by the HCT element
shape functions - with imposed values but arbitrary derivatives at each
node. The minimized functional is the integral of the so-called total
curvature (implementation based on an algorithm from [2]_ - PCG sparse
solver):
.. math::
E(z) = \\ \\frac{1}{2} \\int_{\\Omega} \\left(
\\left( \\frac{\\partial^2{z}}{\\partial{x}^2} \\right)^2 +
\\left( \\frac{\\partial^2{z}}{\\partial{y}^2} \\right)^2 +
2\\left( \\frac{\\partial^2{z}}{\\partial{y}\\partial{x}}
\\right)^2 \\right) dx\\,dy
If the case *kind* ='geom' is chosen by the user, a simple geometric
approximation is used (weighted average of the triangle normal
vectors), which could improve speed on very large grids.
References
----------
.. [1] Michel Bernadou, Kamal Hassan, "Basis functions for general
Hsieh-Clough-Tocher triangles, complete or reduced.",
International Journal for Numerical Methods in Engineering,
17(5):784 - 789. 2.01.
.. [2] C.T. Kelley, "Iterative Methods for Optimization".
"""
def __init__(self, triangulation, z, kind='min_E', trifinder=None,
dz=None):
TriInterpolator.__init__(self, triangulation, z, trifinder)
# Loads the underlying c++ _triangulation.
# (During loading, reordering of triangulation._triangles may occur so
# that all final triangles are now anti-clockwise)
self._triangulation.get_cpp_triangulation()
# To build the stiffness matrix and avoid zero-energy spurious modes
# we will only store internally the valid (unmasked) triangles and
# the necessary (used) points coordinates.
# 2 renumbering tables need to be computed and stored:
# - a triangle renum table in order to translate the result from a
# TriFinder instance into the internal stored triangle number.
# - a node renum table to overwrite the self._z values into the new
# (used) node numbering.
tri_analyzer = TriAnalyzer(self._triangulation)
(compressed_triangles, compressed_x, compressed_y, tri_renum,
node_renum) = tri_analyzer._get_compressed_triangulation(True, True)
self._triangles = compressed_triangles
self._tri_renum = tri_renum
# Taking into account the node renumbering in self._z:
node_mask = (node_renum == -1)
self._z[node_renum[~node_mask]] = self._z
self._z = self._z[~node_mask]
# Computing scale factors
self._unit_x = np.ptp(compressed_x)
self._unit_y = np.ptp(compressed_y)
self._pts = np.column_stack([compressed_x / self._unit_x,
compressed_y / self._unit_y])
# Computing triangle points
self._tris_pts = self._pts[self._triangles]
# Computing eccentricities
self._eccs = self._compute_tri_eccentricities(self._tris_pts)
# Computing dof estimations for HCT triangle shape function
self._dof = self._compute_dof(kind, dz=dz)
# Loading HCT element
self._ReferenceElement = _ReducedHCT_Element()
def __call__(self, x, y):
return self._interpolate_multikeys(x, y, tri_index=None,
return_keys=('z',))[0]
__call__.__doc__ = TriInterpolator._docstring__call__
def gradient(self, x, y):
return self._interpolate_multikeys(x, y, tri_index=None,
return_keys=('dzdx', 'dzdy'))
gradient.__doc__ = TriInterpolator._docstringgradient
def _interpolate_single_key(self, return_key, tri_index, x, y):
tris_pts = self._tris_pts[tri_index]
alpha = self._get_alpha_vec(x, y, tris_pts)
ecc = self._eccs[tri_index]
dof = np.expand_dims(self._dof[tri_index], axis=1)
if return_key == 'z':
return self._ReferenceElement.get_function_values(
alpha, ecc, dof)
elif return_key in ['dzdx', 'dzdy']:
J = self._get_jacobian(tris_pts)
dzdx = self._ReferenceElement.get_function_derivatives(
alpha, J, ecc, dof)
if return_key == 'dzdx':
return dzdx[:, 0, 0]
else:
return dzdx[:, 1, 0]
else:
raise ValueError("Invalid return_key: " + return_key)
def _compute_dof(self, kind, dz=None):
"""
Computes and returns nodal dofs according to kind
Parameters
----------
kind: {'min_E', 'geom', 'user'}
Choice of the _DOF_estimator subclass to perform the gradient
estimation.
dz: tuple of array_likes (dzdx, dzdy), optional
Used only if *kind=user ; in this case passed to the
:class:`_DOF_estimator_user`.
Returns
-------
dof : array_like, shape (npts,2)
Estimation of the gradient at triangulation nodes (stored as
degree of freedoms of reduced-HCT triangle elements).
"""
if kind == 'user':
if dz is None:
raise ValueError("For a CubicTriInterpolator with "
"*kind*='user', a valid *dz* "
"argument is expected.")
TE = _DOF_estimator_user(self, dz=dz)
elif kind == 'geom':
TE = _DOF_estimator_geom(self)
elif kind == 'min_E':
TE = _DOF_estimator_min_E(self)
else:
raise ValueError("CubicTriInterpolator *kind* proposed: {0} ; "
"should be one of: "
"'user', 'geom', 'min_E'".format(kind))
return TE.compute_dof_from_df()
@staticmethod
def _get_alpha_vec(x, y, tris_pts):
"""
Fast (vectorized) function to compute barycentric coordinates alpha.
Parameters
----------
x, y : array-like of dim 1 (shape (nx,))
Coordinates of the points whose points barycentric
coordinates are requested
tris_pts : array like of dim 3 (shape: (nx,3,2))
Coordinates of the containing triangles apexes.
Returns
-------
alpha : array of dim 2 (shape (nx,3))
Barycentric coordinates of the points inside the containing
triangles.
"""
ndim = tris_pts.ndim-2
a = tris_pts[:, 1, :] - tris_pts[:, 0, :]
b = tris_pts[:, 2, :] - tris_pts[:, 0, :]
abT = np.concatenate([np.expand_dims(a, ndim+1),
np.expand_dims(b, ndim+1)], ndim+1)
ab = _transpose_vectorized(abT)
x = np.expand_dims(x, ndim)
y = np.expand_dims(y, ndim)
OM = np.concatenate([x, y], ndim) - tris_pts[:, 0, :]
metric = _prod_vectorized(ab, abT)
# Here we try to deal with the colinear cases.
# metric_inv is in this case set to the Moore-Penrose pseudo-inverse
# meaning that we will still return a set of valid barycentric
# coordinates.
metric_inv = _pseudo_inv22sym_vectorized(metric)
Covar = _prod_vectorized(ab, _transpose_vectorized(
np.expand_dims(OM, ndim)))
ksi = _prod_vectorized(metric_inv, Covar)
alpha = _to_matrix_vectorized([
[1-ksi[:, 0, 0]-ksi[:, 1, 0]], [ksi[:, 0, 0]], [ksi[:, 1, 0]]])
return alpha
@staticmethod
def _get_jacobian(tris_pts):
"""
Fast (vectorized) function to compute triangle jacobian matrix.
Parameters
----------
tris_pts : array like of dim 3 (shape: (nx,3,2))
Coordinates of the containing triangles apexes.
Returns
-------
J : array of dim 3 (shape (nx,2,2))
Barycentric coordinates of the points inside the containing
triangles.
J[itri,:,:] is the jacobian matrix at apex 0 of the triangle
itri, so that the following (matrix) relationship holds:
[dz/dksi] = [J] x [dz/dx]
with x: global coordinates
ksi: element parametric coordinates in triangle first apex
local basis.
"""
a = np.array(tris_pts[:, 1, :] - tris_pts[:, 0, :])
b = np.array(tris_pts[:, 2, :] - tris_pts[:, 0, :])
J = _to_matrix_vectorized([[a[:, 0], a[:, 1]],
[b[:, 0], b[:, 1]]])
return J
@staticmethod
def _compute_tri_eccentricities(tris_pts):
"""
Computes triangle eccentricities
Parameters
----------
tris_pts : array like of dim 3 (shape: (nx,3,2))
Coordinates of the triangles apexes.
Returns
-------
ecc : array like of dim 2 (shape: (nx,3))
The so-called eccentricity parameters [1] needed for
HCT triangular element.
"""
a = np.expand_dims(tris_pts[:, 2, :]-tris_pts[:, 1, :], axis=2)
b = np.expand_dims(tris_pts[:, 0, :]-tris_pts[:, 2, :], axis=2)
c = np.expand_dims(tris_pts[:, 1, :]-tris_pts[:, 0, :], axis=2)
# Do not use np.squeeze, this is dangerous if only one triangle
# in the triangulation...
dot_a = _prod_vectorized(_transpose_vectorized(a), a)[:, 0, 0]
dot_b = _prod_vectorized(_transpose_vectorized(b), b)[:, 0, 0]
dot_c = _prod_vectorized(_transpose_vectorized(c), c)[:, 0, 0]
# Note that this line will raise a warning for dot_a, dot_b or dot_c
# zeros, but we choose not to support triangles with duplicate points.
return _to_matrix_vectorized([[(dot_c-dot_b) / dot_a],
[(dot_a-dot_c) / dot_b],
[(dot_b-dot_a) / dot_c]])
# FEM element used for interpolation and for solving minimisation
# problem (Reduced HCT element)
class _ReducedHCT_Element():
"""
Implementation of reduced HCT triangular element with explicit shape
functions.
Computes z, dz, d2z and the element stiffness matrix for bending energy:
E(f) = integral( (d2z/dx2 + d2z/dy2)**2 dA)
*** Reference for the shape functions: ***
[1] Basis functions for general Hsieh-Clough-Tocher _triangles, complete or
reduced.
Michel Bernadou, Kamal Hassan
International Journal for Numerical Methods in Engineering.
17(5):784 - 789. 2.01
*** Element description: ***
9 dofs: z and dz given at 3 apex
C1 (conform)
"""
# 1) Loads matrices to generate shape functions as a function of
# triangle eccentricities - based on [1] p.11 '''
M = np.array([
[ 0.00, 0.00, 0.00, 4.50, 4.50, 0.00, 0.00, 0.00, 0.00, 0.00],
[-0.25, 0.00, 0.00, 0.50, 1.25, 0.00, 0.00, 0.00, 0.00, 0.00],
[-0.25, 0.00, 0.00, 1.25, 0.50, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.50, 1.00, 0.00, -1.50, 0.00, 3.00, 3.00, 0.00, 0.00, 3.00],
[ 0.00, 0.00, 0.00, -0.25, 0.25, 0.00, 1.00, 0.00, 0.00, 0.50],
[ 0.25, 0.00, 0.00, -0.50, -0.25, 1.00, 0.00, 0.00, 0.00, 1.00],
[ 0.50, 0.00, 1.00, 0.00, -1.50, 0.00, 0.00, 3.00, 3.00, 3.00],
[ 0.25, 0.00, 0.00, -0.25, -0.50, 0.00, 0.00, 0.00, 1.00, 1.00],
[ 0.00, 0.00, 0.00, 0.25, -0.25, 0.00, 0.00, 1.00, 0.00, 0.50]])
M0 = np.array([
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[-1.00, 0.00, 0.00, 1.50, 1.50, 0.00, 0.00, 0.00, 0.00, -3.00],
[-0.50, 0.00, 0.00, 0.75, 0.75, 0.00, 0.00, 0.00, 0.00, -1.50],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 1.00, 0.00, 0.00, -1.50, -1.50, 0.00, 0.00, 0.00, 0.00, 3.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.50, 0.00, 0.00, -0.75, -0.75, 0.00, 0.00, 0.00, 0.00, 1.50]])
M1 = np.array([
[-0.50, 0.00, 0.00, 1.50, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[-0.25, 0.00, 0.00, 0.75, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.50, 0.00, 0.00, -1.50, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.25, 0.00, 0.00, -0.75, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00]])
M2 = np.array([
[ 0.50, 0.00, 0.00, 0.00, -1.50, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.25, 0.00, 0.00, 0.00, -0.75, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[-0.50, 0.00, 0.00, 0.00, 1.50, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[-0.25, 0.00, 0.00, 0.00, 0.75, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00],
[ 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00]])
# 2) Loads matrices to rotate components of gradient & Hessian
# vectors in the reference basis of triangle first apex (a0)
rotate_dV = np.array([[ 1., 0.], [ 0., 1.],
[ 0., 1.], [-1., -1.],
[-1., -1.], [ 1., 0.]])
rotate_d2V = np.array([[1., 0., 0.], [0., 1., 0.], [ 0., 0., 1.],
[0., 1., 0.], [1., 1., 1.], [ 0., -2., -1.],
[1., 1., 1.], [1., 0., 0.], [-2., 0., -1.]])
# 3) Loads Gauss points & weights on the 3 sub-_triangles for P2
# exact integral - 3 points on each subtriangles.
# NOTE: as the 2nd derivative is discontinuous , we really need those 9
# points!
n_gauss = 9
gauss_pts = np.array([[13./18., 4./18., 1./18.],
[ 4./18., 13./18., 1./18.],
[ 7./18., 7./18., 4./18.],
[ 1./18., 13./18., 4./18.],
[ 1./18., 4./18., 13./18.],
[ 4./18., 7./18., 7./18.],
[ 4./18., 1./18., 13./18.],
[13./18., 1./18., 4./18.],
[ 7./18., 4./18., 7./18.]], dtype=np.float64)
gauss_w = np.ones([9], dtype=np.float64) / 9.
# 4) Stiffness matrix for curvature energy
E = np.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 2.]])
# 5) Loads the matrix to compute DOF_rot from tri_J at apex 0
J0_to_J1 = np.array([[-1., 1.], [-1., 0.]])
J0_to_J2 = np.array([[ 0., -1.], [ 1., -1.]])
def get_function_values(self, alpha, ecc, dofs):
"""
Parameters
----------
alpha : is a (N x 3 x 1) array (array of column-matrices) of
barycentric coordinates,
ecc : is a (N x 3 x 1) array (array of column-matrices) of triangle
eccentricities,
dofs : is a (N x 1 x 9) arrays (arrays of row-matrices) of computed
degrees of freedom.
Returns
-------
Returns the N-array of interpolated function values.
"""
subtri = np.argmin(alpha, axis=1)[:, 0]
ksi = _roll_vectorized(alpha, -subtri, axis=0)
E = _roll_vectorized(ecc, -subtri, axis=0)
x = ksi[:, 0, 0]
y = ksi[:, 1, 0]
z = ksi[:, 2, 0]
x_sq = x*x
y_sq = y*y
z_sq = z*z
V = _to_matrix_vectorized([
[x_sq*x], [y_sq*y], [z_sq*z], [x_sq*z], [x_sq*y], [y_sq*x],
[y_sq*z], [z_sq*y], [z_sq*x], [x*y*z]])
prod = _prod_vectorized(self.M, V)
prod += _scalar_vectorized(E[:, 0, 0],
_prod_vectorized(self.M0, V))
prod += _scalar_vectorized(E[:, 1, 0],
_prod_vectorized(self.M1, V))
prod += _scalar_vectorized(E[:, 2, 0],
_prod_vectorized(self.M2, V))
s = _roll_vectorized(prod, 3*subtri, axis=0)
return _prod_vectorized(dofs, s)[:, 0, 0]
def get_function_derivatives(self, alpha, J, ecc, dofs):
"""
Parameters
----------
*alpha* is a (N x 3 x 1) array (array of column-matrices of
barycentric coordinates)
*J* is a (N x 2 x 2) array of jacobian matrices (jacobian matrix at
triangle first apex)
*ecc* is a (N x 3 x 1) array (array of column-matrices of triangle
eccentricities)
*dofs* is a (N x 1 x 9) arrays (arrays of row-matrices) of computed
degrees of freedom.
Returns
-------
Returns the values of interpolated function derivatives [dz/dx, dz/dy]
in global coordinates at locations alpha, as a column-matrices of
shape (N x 2 x 1).
"""
subtri = np.argmin(alpha, axis=1)[:, 0]
ksi = _roll_vectorized(alpha, -subtri, axis=0)
E = _roll_vectorized(ecc, -subtri, axis=0)
x = ksi[:, 0, 0]
y = ksi[:, 1, 0]
z = ksi[:, 2, 0]
x_sq = x*x
y_sq = y*y
z_sq = z*z
dV = _to_matrix_vectorized([
[ -3.*x_sq, -3.*x_sq],
[ 3.*y_sq, 0.],
[ 0., 3.*z_sq],
[ -2.*x*z, -2.*x*z+x_sq],
[-2.*x*y+x_sq, -2.*x*y],
[ 2.*x*y-y_sq, -y_sq],
[ 2.*y*z, y_sq],
[ z_sq, 2.*y*z],
[ -z_sq, 2.*x*z-z_sq],
[ x*z-y*z, x*y-y*z]])
# Puts back dV in first apex basis
dV = _prod_vectorized(dV, _extract_submatrices(
self.rotate_dV, subtri, block_size=2, axis=0))
prod = _prod_vectorized(self.M, dV)
prod += _scalar_vectorized(E[:, 0, 0],
_prod_vectorized(self.M0, dV))
prod += _scalar_vectorized(E[:, 1, 0],
_prod_vectorized(self.M1, dV))
prod += _scalar_vectorized(E[:, 2, 0],
_prod_vectorized(self.M2, dV))
dsdksi = _roll_vectorized(prod, 3*subtri, axis=0)
dfdksi = _prod_vectorized(dofs, dsdksi)
# In global coordinates:
# Here we try to deal with the simplest colinear cases, returning a
# null matrix.
J_inv = _safe_inv22_vectorized(J)
dfdx = _prod_vectorized(J_inv, _transpose_vectorized(dfdksi))
return dfdx
def get_function_hessians(self, alpha, J, ecc, dofs):
"""
Parameters
----------
*alpha* is a (N x 3 x 1) array (array of column-matrices) of
barycentric coordinates
*J* is a (N x 2 x 2) array of jacobian matrices (jacobian matrix at
triangle first apex)
*ecc* is a (N x 3 x 1) array (array of column-matrices) of triangle
eccentricities
*dofs* is a (N x 1 x 9) arrays (arrays of row-matrices) of computed
degrees of freedom.
Returns
-------
Returns the values of interpolated function 2nd-derivatives
[d2z/dx2, d2z/dy2, d2z/dxdy] in global coordinates at locations alpha,
as a column-matrices of shape (N x 3 x 1).
"""
d2sdksi2 = self.get_d2Sidksij2(alpha, ecc)
d2fdksi2 = _prod_vectorized(dofs, d2sdksi2)
H_rot = self.get_Hrot_from_J(J)
d2fdx2 = _prod_vectorized(d2fdksi2, H_rot)
return _transpose_vectorized(d2fdx2)
def get_d2Sidksij2(self, alpha, ecc):
"""
Parameters
----------
*alpha* is a (N x 3 x 1) array (array of column-matrices) of
barycentric coordinates
*ecc* is a (N x 3 x 1) array (array of column-matrices) of triangle
eccentricities
Returns
-------
Returns the arrays d2sdksi2 (N x 3 x 1) Hessian of shape functions
expressed in covariante coordinates in first apex basis.
"""
subtri = np.argmin(alpha, axis=1)[:, 0]
ksi = _roll_vectorized(alpha, -subtri, axis=0)
E = _roll_vectorized(ecc, -subtri, axis=0)
x = ksi[:, 0, 0]
y = ksi[:, 1, 0]
z = ksi[:, 2, 0]
d2V = _to_matrix_vectorized([
[ 6.*x, 6.*x, 6.*x],
[ 6.*y, 0., 0.],
[ 0., 6.*z, 0.],
[ 2.*z, 2.*z-4.*x, 2.*z-2.*x],
[2.*y-4.*x, 2.*y, 2.*y-2.*x],
[2.*x-4.*y, 0., -2.*y],
[ 2.*z, 0., 2.*y],
[ 0., 2.*y, 2.*z],
[ 0., 2.*x-4.*z, -2.*z],
[ -2.*z, -2.*y, x-y-z]])
# Puts back d2V in first apex basis
d2V = _prod_vectorized(d2V, _extract_submatrices(
self.rotate_d2V, subtri, block_size=3, axis=0))
prod = _prod_vectorized(self.M, d2V)
prod += _scalar_vectorized(E[:, 0, 0],
_prod_vectorized(self.M0, d2V))
prod += _scalar_vectorized(E[:, 1, 0],
_prod_vectorized(self.M1, d2V))
prod += _scalar_vectorized(E[:, 2, 0],
_prod_vectorized(self.M2, d2V))
d2sdksi2 = _roll_vectorized(prod, 3*subtri, axis=0)
return d2sdksi2
def get_bending_matrices(self, J, ecc):
"""
Parameters
----------
*J* is a (N x 2 x 2) array of jacobian matrices (jacobian matrix at
triangle first apex)
*ecc* is a (N x 3 x 1) array (array of column-matrices) of triangle
eccentricities
Returns
-------
Returns the element K matrices for bending energy expressed in
GLOBAL nodal coordinates.
K_ij = integral [ (d2zi/dx2 + d2zi/dy2) * (d2zj/dx2 + d2zj/dy2) dA]
tri_J is needed to rotate dofs from local basis to global basis
"""
n = np.size(ecc, 0)
# 1) matrix to rotate dofs in global coordinates
J1 = _prod_vectorized(self.J0_to_J1, J)
J2 = _prod_vectorized(self.J0_to_J2, J)
DOF_rot = np.zeros([n, 9, 9], dtype=np.float64)
DOF_rot[:, 0, 0] = 1
DOF_rot[:, 3, 3] = 1
DOF_rot[:, 6, 6] = 1
DOF_rot[:, 1:3, 1:3] = J
DOF_rot[:, 4:6, 4:6] = J1
DOF_rot[:, 7:9, 7:9] = J2
# 2) matrix to rotate Hessian in global coordinates.
H_rot, area = self.get_Hrot_from_J(J, return_area=True)
# 3) Computes stiffness matrix
# Gauss quadrature.
K = np.zeros([n, 9, 9], dtype=np.float64)
weights = self.gauss_w
pts = self.gauss_pts
for igauss in range(self.n_gauss):
alpha = np.tile(pts[igauss, :], n).reshape(n, 3)
alpha = np.expand_dims(alpha, 2)
weight = weights[igauss]
d2Skdksi2 = self.get_d2Sidksij2(alpha, ecc)
d2Skdx2 = _prod_vectorized(d2Skdksi2, H_rot)
K += weight * _prod_vectorized(_prod_vectorized(d2Skdx2, self.E),
_transpose_vectorized(d2Skdx2))
# 4) With nodal (not elem) dofs
K = _prod_vectorized(_prod_vectorized(_transpose_vectorized(DOF_rot),
K), DOF_rot)
# 5) Need the area to compute total element energy
return _scalar_vectorized(area, K)
def get_Hrot_from_J(self, J, return_area=False):
"""
Parameters
----------
*J* is a (N x 2 x 2) array of jacobian matrices (jacobian matrix at
triangle first apex)
Returns
-------
Returns H_rot used to rotate Hessian from local basis of first apex,
to global coordinates.
if *return_area* is True, returns also the triangle area (0.5*det(J))
"""
# Here we try to deal with the simplest colinear cases ; a null
# energy and area is imposed.
J_inv = _safe_inv22_vectorized(J)
Ji00 = J_inv[:, 0, 0]
Ji11 = J_inv[:, 1, 1]
Ji10 = J_inv[:, 1, 0]
Ji01 = J_inv[:, 0, 1]
H_rot = _to_matrix_vectorized([
[Ji00*Ji00, Ji10*Ji10, Ji00*Ji10],
[Ji01*Ji01, Ji11*Ji11, Ji01*Ji11],
[2*Ji00*Ji01, 2*Ji11*Ji10, Ji00*Ji11+Ji10*Ji01]])
if not return_area:
return H_rot
else:
area = 0.5 * (J[:, 0, 0]*J[:, 1, 1] - J[:, 0, 1]*J[:, 1, 0])
return H_rot, area
def get_Kff_and_Ff(self, J, ecc, triangles, Uc):
"""
Builds K and F for the following elliptic formulation:
minimization of curvature energy with value of function at node
imposed and derivatives 'free'.
Builds the global Kff matrix in cco format.
Builds the full Ff vec Ff = - Kfc x Uc
Parameters
----------
*J* is a (N x 2 x 2) array of jacobian matrices (jacobian matrix at
triangle first apex)
*ecc* is a (N x 3 x 1) array (array of column-matrices) of triangle
eccentricities
*triangles* is a (N x 3) array of nodes indexes.
*Uc* is (N x 3) array of imposed displacements at nodes
Returns
-------
(Kff_rows, Kff_cols, Kff_vals) Kff matrix in coo format - Duplicate
(row, col) entries must be summed.
Ff: force vector - dim npts * 3
"""
ntri = np.size(ecc, 0)
vec_range = np.arange(ntri, dtype=np.int32)
c_indices = -np.ones(ntri, dtype=np.int32) # for unused dofs, -1
f_dof = [1, 2, 4, 5, 7, 8]
c_dof = [0, 3, 6]
# vals, rows and cols indices in global dof numbering
f_dof_indices = _to_matrix_vectorized([[
c_indices, triangles[:, 0]*2, triangles[:, 0]*2+1,
c_indices, triangles[:, 1]*2, triangles[:, 1]*2+1,
c_indices, triangles[:, 2]*2, triangles[:, 2]*2+1]])
expand_indices = np.ones([ntri, 9, 1], dtype=np.int32)
f_row_indices = _prod_vectorized(_transpose_vectorized(f_dof_indices),
_transpose_vectorized(expand_indices))
f_col_indices = _prod_vectorized(expand_indices, f_dof_indices)
K_elem = self.get_bending_matrices(J, ecc)
# Extracting sub-matrices
# Explanation & notations:
# * Subscript f denotes 'free' degrees of freedom (i.e. dz/dx, dz/dx)
# * Subscript c denotes 'condensated' (imposed) degrees of freedom
# (i.e. z at all nodes)
# * F = [Ff, Fc] is the force vector
# * U = [Uf, Uc] is the imposed dof vector
# [ Kff Kfc ]
# * K = [ ] is the laplacian stiffness matrix
# [ Kcf Kff ]
# * As F = K x U one gets straightforwardly: Ff = - Kfc x Uc
# Computing Kff stiffness matrix in sparse coo format
Kff_vals = np.ravel(K_elem[np.ix_(vec_range, f_dof, f_dof)])
Kff_rows = np.ravel(f_row_indices[np.ix_(vec_range, f_dof, f_dof)])
Kff_cols = np.ravel(f_col_indices[np.ix_(vec_range, f_dof, f_dof)])
# Computing Ff force vector in sparse coo format
Kfc_elem = K_elem[np.ix_(vec_range, f_dof, c_dof)]
Uc_elem = np.expand_dims(Uc, axis=2)
Ff_elem = - _prod_vectorized(Kfc_elem, Uc_elem)[:, :, 0]
Ff_indices = f_dof_indices[np.ix_(vec_range, [0], f_dof)][:, 0, :]
# Extracting Ff force vector in dense format
# We have to sum duplicate indices - using bincount
Ff = np.bincount(np.ravel(Ff_indices), weights=np.ravel(Ff_elem))
return Kff_rows, Kff_cols, Kff_vals, Ff
# :class:_DOF_estimator, _DOF_estimator_user, _DOF_estimator_geom,
# _DOF_estimator_min_E
# Private classes used to compute the degree of freedom of each triangular
# element for the TriCubicInterpolator.
class _DOF_estimator():
"""
Abstract base class for classes used to perform estimation of a function
first derivatives, and deduce the dofs for a CubicTriInterpolator using a
reduced HCT element formulation.
Derived classes implement compute_df(self,**kwargs), returning
np.vstack([dfx,dfy]).T where : dfx, dfy are the estimation of the 2
gradient coordinates.
"""
def __init__(self, interpolator, **kwargs):
if not isinstance(interpolator, CubicTriInterpolator):
raise ValueError("Expected a CubicTriInterpolator object")
self._pts = interpolator._pts
self._tris_pts = interpolator._tris_pts
self.z = interpolator._z
self._triangles = interpolator._triangles
(self._unit_x, self._unit_y) = (interpolator._unit_x,
interpolator._unit_y)
self.dz = self.compute_dz(**kwargs)
self.compute_dof_from_df()
def compute_dz(self, **kwargs):
raise NotImplementedError
def compute_dof_from_df(self):
"""
Computes reduced-HCT elements degrees of freedom, knowing the
gradient.
"""
J = CubicTriInterpolator._get_jacobian(self._tris_pts)
tri_z = self.z[self._triangles]
tri_dz = self.dz[self._triangles]
tri_dof = self.get_dof_vec(tri_z, tri_dz, J)
return tri_dof
@staticmethod
def get_dof_vec(tri_z, tri_dz, J):
"""
Computes the dof vector of a triangle, knowing the value of f, df and
of the local Jacobian at each node.
*tri_z*: array of shape (3,) of f nodal values
*tri_dz*: array of shape (3,2) of df/dx, df/dy nodal values
*J*: Jacobian matrix in local basis of apex 0
Returns dof array of shape (9,) so that for each apex iapex:
dof[iapex*3+0] = f(Ai)
dof[iapex*3+1] = df(Ai).(AiAi+)
dof[iapex*3+2] = df(Ai).(AiAi-)]
"""
npt = tri_z.shape[0]
dof = np.zeros([npt, 9], dtype=np.float64)
J1 = _prod_vectorized(_ReducedHCT_Element.J0_to_J1, J)
J2 = _prod_vectorized(_ReducedHCT_Element.J0_to_J2, J)
col0 = _prod_vectorized(J, np.expand_dims(tri_dz[:, 0, :], axis=3))
col1 = _prod_vectorized(J1, np.expand_dims(tri_dz[:, 1, :], axis=3))
col2 = _prod_vectorized(J2, np.expand_dims(tri_dz[:, 2, :], axis=3))
dfdksi = _to_matrix_vectorized([
[col0[:, 0, 0], col1[:, 0, 0], col2[:, 0, 0]],
[col0[:, 1, 0], col1[:, 1, 0], col2[:, 1, 0]]])
dof[:, 0:7:3] = tri_z
dof[:, 1:8:3] = dfdksi[:, 0]
dof[:, 2:9:3] = dfdksi[:, 1]
return dof
class _DOF_estimator_user(_DOF_estimator):
""" dz is imposed by user / Accounts for scaling if any """
def compute_dz(self, dz):
(dzdx, dzdy) = dz
dzdx = dzdx * self._unit_x
dzdy = dzdy * self._unit_y
return np.vstack([dzdx, dzdy]).T
class _DOF_estimator_geom(_DOF_estimator):
""" Fast 'geometric' approximation, recommended for large arrays. """
def compute_dz(self):
"""
self.df is computed as weighted average of _triangles sharing a common
node. On each triangle itri f is first assumed linear (= ~f), which
allows to compute d~f[itri]
Then the following approximation of df nodal values is then proposed:
f[ipt] = SUM ( w[itri] x d~f[itri] , for itri sharing apex ipt)
The weighted coeff. w[itri] are proportional to the angle of the
triangle itri at apex ipt
"""
el_geom_w = self.compute_geom_weights()
el_geom_grad = self.compute_geom_grads()
# Sum of weights coeffs
w_node_sum = np.bincount(np.ravel(self._triangles),
weights=np.ravel(el_geom_w))
# Sum of weighted df = (dfx, dfy)
dfx_el_w = np.empty_like(el_geom_w)
dfy_el_w = np.empty_like(el_geom_w)
for iapex in range(3):
dfx_el_w[:, iapex] = el_geom_w[:, iapex]*el_geom_grad[:, 0]
dfy_el_w[:, iapex] = el_geom_w[:, iapex]*el_geom_grad[:, 1]
dfx_node_sum = np.bincount(np.ravel(self._triangles),
weights=np.ravel(dfx_el_w))
dfy_node_sum = np.bincount(np.ravel(self._triangles),
weights=np.ravel(dfy_el_w))
# Estimation of df
dfx_estim = dfx_node_sum/w_node_sum
dfy_estim = dfy_node_sum/w_node_sum
return np.vstack([dfx_estim, dfy_estim]).T
def compute_geom_weights(self):
"""
Builds the (nelems x 3) weights coeffs of _triangles angles,
renormalized so that np.sum(weights, axis=1) == np.ones(nelems)
"""
weights = np.zeros([np.size(self._triangles, 0), 3])
tris_pts = self._tris_pts
for ipt in range(3):
p0 = tris_pts[:, (ipt) % 3, :]
p1 = tris_pts[:, (ipt+1) % 3, :]
p2 = tris_pts[:, (ipt-1) % 3, :]
alpha1 = np.arctan2(p1[:, 1]-p0[:, 1], p1[:, 0]-p0[:, 0])
alpha2 = np.arctan2(p2[:, 1]-p0[:, 1], p2[:, 0]-p0[:, 0])
# In the below formula we could take modulo 2. but
# modulo 1. is safer regarding round-off errors (flat triangles).
angle = np.abs(np.mod((alpha2-alpha1) / np.pi, 1.))
# Weight proportional to angle up np.pi/2 ; null weight for
# degenerated cases 0. and np.pi (Note that `angle` is normalized
# by np.pi)
weights[:, ipt] = 0.5 - np.abs(angle-0.5)
return weights
def compute_geom_grads(self):
"""
Compute the (global) gradient component of f assumed linear (~f).
returns array df of shape (nelems,2)
df[ielem].dM[ielem] = dz[ielem] i.e. df = dz x dM = dM.T^-1 x dz
"""
tris_pts = self._tris_pts
tris_f = self.z[self._triangles]
dM1 = tris_pts[:, 1, :] - tris_pts[:, 0, :]
dM2 = tris_pts[:, 2, :] - tris_pts[:, 0, :]
dM = np.dstack([dM1, dM2])
# Here we try to deal with the simplest colinear cases: a null
# gradient is assumed in this case.
dM_inv = _safe_inv22_vectorized(dM)
dZ1 = tris_f[:, 1] - tris_f[:, 0]
dZ2 = tris_f[:, 2] - tris_f[:, 0]
dZ = np.vstack([dZ1, dZ2]).T
df = np.empty_like(dZ)
# With np.einsum : could be ej,eji -> ej
df[:, 0] = dZ[:, 0]*dM_inv[:, 0, 0] + dZ[:, 1]*dM_inv[:, 1, 0]
df[:, 1] = dZ[:, 0]*dM_inv[:, 0, 1] + dZ[:, 1]*dM_inv[:, 1, 1]
return df
class _DOF_estimator_min_E(_DOF_estimator_geom):
"""
The 'smoothest' approximation, df is computed through global minimization
of the bending energy:
E(f) = integral[(d2z/dx2 + d2z/dy2 + 2 d2z/dxdy)**2 dA]
"""
def __init__(self, Interpolator):
self._eccs = Interpolator._eccs
_DOF_estimator_geom.__init__(self, Interpolator)
def compute_dz(self):
"""
Elliptic solver for bending energy minimization.
Uses a dedicated 'toy' sparse Jacobi PCG solver.
"""
# Initial guess for iterative PCG solver.
dz_init = _DOF_estimator_geom.compute_dz(self)
Uf0 = np.ravel(dz_init)
reference_element = _ReducedHCT_Element()
J = CubicTriInterpolator._get_jacobian(self._tris_pts)
eccs = self._eccs
triangles = self._triangles
Uc = self.z[self._triangles]
# Building stiffness matrix and force vector in coo format
Kff_rows, Kff_cols, Kff_vals, Ff = reference_element.get_Kff_and_Ff(
J, eccs, triangles, Uc)
# Building sparse matrix and solving minimization problem
# We could use scipy.sparse direct solver ; however to avoid this
# external dependency an implementation of a simple PCG solver with
# a simplendiagonal Jocabi preconditioner is implemented.
tol = 1.e-10
n_dof = Ff.shape[0]
Kff_coo = _Sparse_Matrix_coo(Kff_vals, Kff_rows, Kff_cols,
shape=(n_dof, n_dof))
Kff_coo.compress_csc()
Uf, err = _cg(A=Kff_coo, b=Ff, x0=Uf0, tol=tol)
# If the PCG did not converge, we return the best guess between Uf0
# and Uf.
err0 = np.linalg.norm(Kff_coo.dot(Uf0) - Ff)
if err0 < err:
# Maybe a good occasion to raise a warning here ?
warnings.warn("In TriCubicInterpolator initialization, PCG sparse"
" solver did not converge after 1000 iterations. "
"`geom` approximation is used instead of `min_E`")
Uf = Uf0
# Building dz from Uf
dz = np.empty([self._pts.shape[0], 2], dtype=np.float64)
dz[:, 0] = Uf[::2]
dz[:, 1] = Uf[1::2]
return dz
# The following private :class:_Sparse_Matrix_coo and :func:_cg provide
# a PCG sparse solver for (symmetric) elliptic problems.
class _Sparse_Matrix_coo(object):
def __init__(self, vals, rows, cols, shape):
"""
Creates a sparse matrix in coo format
*vals*: arrays of values of non-null entries of the matrix
*rows*: int arrays of rows of non-null entries of the matrix
*cols*: int arrays of cols of non-null entries of the matrix
*shape*: 2-tuple (n,m) of matrix shape
"""
self.n, self.m = shape
self.vals = np.asarray(vals, dtype=np.float64)
self.rows = np.asarray(rows, dtype=np.int32)
self.cols = np.asarray(cols, dtype=np.int32)
def dot(self, V):
"""
Dot product of self by a vector *V* in sparse-dense to dense format
*V* dense vector of shape (self.m,)
"""
assert V.shape == (self.m,)
return np.bincount(self.rows,
weights=self.vals*V[self.cols],
minlength=self.m)
def compress_csc(self):
"""
Compress rows, cols, vals / summing duplicates. Sort for csc format.
"""
_, unique, indices = np.unique(
self.rows + self.n*self.cols,
return_index=True, return_inverse=True)
self.rows = self.rows[unique]
self.cols = self.cols[unique]
self.vals = np.bincount(indices, weights=self.vals)
def compress_csr(self):
"""
Compress rows, cols, vals / summing duplicates. Sort for csr format.
"""
_, unique, indices = np.unique(
self.m*self.rows + self.cols,
return_index=True, return_inverse=True)
self.rows = self.rows[unique]
self.cols = self.cols[unique]
self.vals = np.bincount(indices, weights=self.vals)
def to_dense(self):
"""
Returns a dense matrix representing self.
Mainly for debugging purposes.
"""
ret = np.zeros([self.n, self.m], dtype=np.float64)
nvals = self.vals.size
for i in range(nvals):
ret[self.rows[i], self.cols[i]] += self.vals[i]
return ret
def __str__(self):
return self.to_dense().__str__()
@property
def diag(self):
"""
Returns the (dense) vector of the diagonal elements.
"""
in_diag = (self.rows == self.cols)
diag = np.zeros(min(self.n, self.n), dtype=np.float64) # default 0.
diag[self.rows[in_diag]] = self.vals[in_diag]
return diag
def _cg(A, b, x0=None, tol=1.e-10, maxiter=1000):
"""
Use Preconditioned Conjugate Gradient iteration to solve A x = b
A simple Jacobi (diagonal) preconditionner is used.
Parameters
----------
A: _Sparse_Matrix_coo
*A* must have been compressed before by compress_csc or
compress_csr method.
b: array
Right hand side of the linear system.
Returns
-------
x: array.
The converged solution.
err: float
The absolute error np.linalg.norm(A.dot(x) - b)
Other parameters
----------------
x0: array.
Starting guess for the solution.
tol: float.
Tolerance to achieve. The algorithm terminates when the relative
residual is below tol.
maxiter: integer.
Maximum number of iterations. Iteration will stop
after maxiter steps even if the specified tolerance has not
been achieved.
"""
n = b.size
assert A.n == n
assert A.m == n
b_norm = np.linalg.norm(b)
# Jacobi pre-conditioner
kvec = A.diag
# For diag elem < 1e-6 we keep 1e-6.
kvec = np.where(kvec > 1.e-6, kvec, 1.e-6)
# Initial guess
if x0 is None:
x = np.zeros(n)
else:
x = x0
r = b - A.dot(x)
w = r/kvec
p = np.zeros(n)
beta = 0.0
rho = np.dot(r, w)
k = 0
# Following C. T. Kelley
while (np.sqrt(abs(rho)) > tol*b_norm) and (k < maxiter):
p = w + beta*p
z = A.dot(p)
alpha = rho/np.dot(p, z)
r = r - alpha*z
w = r/kvec
rhoold = rho
rho = np.dot(r, w)
x = x + alpha*p
beta = rho/rhoold
#err = np.linalg.norm(A.dot(x) - b) # absolute accuracy - not used
k += 1
err = np.linalg.norm(A.dot(x) - b)
return x, err
# The following private functions:
# :func:`_inv22_vectorized`
# :func:`_safe_inv22_vectorized`
# :func:`_pseudo_inv22sym_vectorized`
# :func:`_prod_vectorized`
# :func:`_scalar_vectorized`
# :func:`_transpose_vectorized`
# :func:`_roll_vectorized`
# :func:`_to_matrix_vectorized`
# :func:`_extract_submatrices`
# provide fast numpy implementation of some standard operations on arrays of
# matrices - stored as (:, n_rows, n_cols)-shaped np.arrays.
def _inv22_vectorized(M):
"""
Inversion of arrays of (2,2) matrices.
"""
assert (M.ndim == 3)
assert (M.shape[-2:] == (2, 2))
M_inv = np.empty_like(M)
delta_inv = np.reciprocal(M[:, 0, 0]*M[:, 1, 1] - M[:, 0, 1]*M[:, 1, 0])
M_inv[:, 0, 0] = M[:, 1, 1]*delta_inv
M_inv[:, 0, 1] = -M[:, 0, 1]*delta_inv
M_inv[:, 1, 0] = -M[:, 1, 0]*delta_inv
M_inv[:, 1, 1] = M[:, 0, 0]*delta_inv
return M_inv
# Development note: Dealing with pathologic 'flat' triangles in the
# CubicTriInterpolator code and impact on (2,2)-matrix inversion functions
# :func:`_safe_inv22_vectorized` and :func:`_pseudo_inv22sym_vectorized`.
#
# Goals:
# 1) The CubicTriInterpolator should be able to handle flat or almost flat
# triangles without raising an error,
# 2) These degenerated triangles should have no impact on the automatic dof
# calculation (associated with null weight for the _DOF_estimator_geom and
# with null energy for the _DOF_estimator_min_E),
# 3) Linear patch test should be passed exactly on degenerated meshes,
# 4) Interpolation (with :meth:`_interpolate_single_key` or
# :meth:`_interpolate_multi_key`) shall be correctly handled even *inside*
# the pathologic triangles, to interact correctly with a TriRefiner class.
#
# Difficulties:
# Flat triangles have rank-deficient *J* (so-called jacobian matrix) and
# *metric* (the metric tensor = J x J.T). Computation of the local
# tangent plane is also problematic.
#
# Implementation:
# Most of the time, when computing the inverse of a rank-deficient matrix it
# is safe to simply return the null matrix (which is the implementation in
# :func:`_safe_inv22_vectorized`). This is because of point 2), itself
# enforced by:
# - null area hence null energy in :class:`_DOF_estimator_min_E`
# - angles close or equal to 0 or np.pi hence null weight in
# :class:`_DOF_estimator_geom`.
# Note that the function angle -> weight is continuous and maximum for an
# angle np.pi/2 (refer to :meth:`compute_geom_weights`)
# The exception is the computation of barycentric coordinates, which is done
# by inversion of the *metric* matrix. In this case, we need to compute a set
# of valid coordinates (1 among numerous possibilities), to ensure point 4).
# We benefit here from the symmetry of metric = J x J.T, which makes it easier
# to compute a pseudo-inverse in :func:`_pseudo_inv22sym_vectorized`
def _safe_inv22_vectorized(M):
"""
Inversion of arrays of (2,2) matrices, returns 0 for rank-deficient
matrices.
*M* : array of (2,2) matrices to inverse, shape (n,2,2)
"""
assert M.ndim == 3
assert M.shape[-2:] == (2, 2)
M_inv = np.empty_like(M)
prod1 = M[:, 0, 0]*M[:, 1, 1]
delta = prod1 - M[:, 0, 1]*M[:, 1, 0]
# We set delta_inv to 0. in case of a rank deficient matrix ; a
# rank-deficient input matrix *M* will lead to a null matrix in output
rank2 = (np.abs(delta) > 1e-8*np.abs(prod1))
if np.all(rank2):
# Normal 'optimized' flow.
delta_inv = 1./delta
else:
# 'Pathologic' flow.
delta_inv = np.zeros(M.shape[0])
delta_inv[rank2] = 1./delta[rank2]
M_inv[:, 0, 0] = M[:, 1, 1]*delta_inv
M_inv[:, 0, 1] = -M[:, 0, 1]*delta_inv
M_inv[:, 1, 0] = -M[:, 1, 0]*delta_inv
M_inv[:, 1, 1] = M[:, 0, 0]*delta_inv
return M_inv
def _pseudo_inv22sym_vectorized(M):
"""
Inversion of arrays of (2,2) SYMMETRIC matrices ; returns the
(Moore-Penrose) pseudo-inverse for rank-deficient matrices.
In case M is of rank 1, we have M = trace(M) x P where P is the orthogonal
projection on Im(M), and we return trace(M)^-1 x P == M / trace(M)**2
In case M is of rank 0, we return the null matrix.
*M* : array of (2,2) matrices to inverse, shape (n,2,2)
"""
assert M.ndim == 3
assert M.shape[-2:] == (2, 2)
M_inv = np.empty_like(M)
prod1 = M[:, 0, 0]*M[:, 1, 1]
delta = prod1 - M[:, 0, 1]*M[:, 1, 0]
rank2 = (np.abs(delta) > 1e-8*np.abs(prod1))
if np.all(rank2):
# Normal 'optimized' flow.
M_inv[:, 0, 0] = M[:, 1, 1] / delta
M_inv[:, 0, 1] = -M[:, 0, 1] / delta
M_inv[:, 1, 0] = -M[:, 1, 0] / delta
M_inv[:, 1, 1] = M[:, 0, 0] / delta
else:
# 'Pathologic' flow.
# Here we have to deal with 2 sub-cases
# 1) First sub-case: matrices of rank 2:
delta = delta[rank2]
M_inv[rank2, 0, 0] = M[rank2, 1, 1] / delta
M_inv[rank2, 0, 1] = -M[rank2, 0, 1] / delta
M_inv[rank2, 1, 0] = -M[rank2, 1, 0] / delta
M_inv[rank2, 1, 1] = M[rank2, 0, 0] / delta
# 2) Second sub-case: rank-deficient matrices of rank 0 and 1:
rank01 = ~rank2
tr = M[rank01, 0, 0] + M[rank01, 1, 1]
tr_zeros = (np.abs(tr) < 1.e-8)
sq_tr_inv = (1.-tr_zeros) / (tr**2+tr_zeros)
#sq_tr_inv = 1. / tr**2
M_inv[rank01, 0, 0] = M[rank01, 0, 0] * sq_tr_inv
M_inv[rank01, 0, 1] = M[rank01, 0, 1] * sq_tr_inv
M_inv[rank01, 1, 0] = M[rank01, 1, 0] * sq_tr_inv
M_inv[rank01, 1, 1] = M[rank01, 1, 1] * sq_tr_inv
return M_inv
def _prod_vectorized(M1, M2):
"""
Matrix product between arrays of matrices, or a matrix and an array of
matrices (*M1* and *M2*)
"""
sh1 = M1.shape
sh2 = M2.shape
assert len(sh1) >= 2
assert len(sh2) >= 2
assert sh1[-1] == sh2[-2]
ndim1 = len(sh1)
t1_index = list(xrange(ndim1-2)) + [ndim1-1, ndim1-2]
return np.sum(np.transpose(M1, t1_index)[..., np.newaxis] *
M2[..., np.newaxis, :], -3)
def _scalar_vectorized(scalar, M):
"""
Scalar product between scalars and matrices.
"""
return scalar[:, np.newaxis, np.newaxis]*M
def _transpose_vectorized(M):
"""
Transposition of an array of matrices *M*.
"""
ndim = M.ndim
assert ndim == 3
return np.transpose(M, [0, ndim-1, ndim-2])
def _roll_vectorized(M, roll_indices, axis):
"""
Rolls an array of matrices along an axis according to an array of indices
*roll_indices*
*axis* can be either 0 (rolls rows) or 1 (rolls columns).
"""
assert axis in [0, 1]
ndim = M.ndim
assert ndim == 3
ndim_roll = roll_indices.ndim
assert ndim_roll == 1
sh = M.shape
r, c = sh[-2:]
assert sh[0] == roll_indices.shape[0]
vec_indices = np.arange(sh[0], dtype=np.int32)
# Builds the rolled matrix
M_roll = np.empty_like(M)
if axis == 0:
for ir in range(r):
for ic in range(c):
M_roll[:, ir, ic] = M[vec_indices, (-roll_indices+ir) % r, ic]
elif axis == 1:
for ir in range(r):
for ic in range(c):
M_roll[:, ir, ic] = M[vec_indices, ir, (-roll_indices+ic) % c]
return M_roll
def _to_matrix_vectorized(M):
"""
Builds an array of matrices from individuals np.arrays of identical
shapes.
*M*: ncols-list of nrows-lists of shape sh.
Returns M_res np.array of shape (sh, nrow, ncols) so that:
M_res[...,i,j] = M[i][j]
"""
assert isinstance(M, (tuple, list))
assert all([isinstance(item, (tuple, list)) for item in M])
c_vec = np.asarray([len(item) for item in M])
assert np.all(c_vec-c_vec[0] == 0)
r = len(M)
c = c_vec[0]
M00 = np.asarray(M[0][0])
dt = M00.dtype
sh = [M00.shape[0], r, c]
M_ret = np.empty(sh, dtype=dt)
for irow in range(r):
for icol in range(c):
M_ret[:, irow, icol] = np.asarray(M[irow][icol])
return M_ret
def _extract_submatrices(M, block_indices, block_size, axis):
"""
Extracts selected blocks of a matrices *M* depending on parameters
*block_indices* and *block_size*.
Returns the array of extracted matrices *Mres* so that:
M_res[...,ir,:] = M[(block_indices*block_size+ir), :]
"""
assert block_indices.ndim == 1
assert axis in [0, 1]
r, c = M.shape
if axis == 0:
sh = [block_indices.shape[0], block_size, c]
elif axis == 1:
sh = [block_indices.shape[0], r, block_size]
dt = M.dtype
M_res = np.empty(sh, dtype=dt)
if axis == 0:
for ir in range(block_size):
M_res[:, ir, :] = M[(block_indices*block_size+ir), :]
elif axis == 1:
for ic in range(block_size):
M_res[:, :, ic] = M[:, (block_indices*block_size+ic)]
return M_res
| 65,843 | 39.197802 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/tri/triangulation.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import matplotlib._tri as _tri
import matplotlib._qhull as _qhull
import numpy as np
class Triangulation(object):
"""
An unstructured triangular grid consisting of npoints points and
ntri triangles. The triangles can either be specified by the user
or automatically generated using a Delaunay triangulation.
Parameters
----------
x, y : array_like of shape (npoints)
Coordinates of grid points.
triangles : integer array_like of shape (ntri, 3), optional
For each triangle, the indices of the three points that make
up the triangle, ordered in an anticlockwise manner. If not
specified, the Delaunay triangulation is calculated.
mask : boolean array_like of shape (ntri), optional
Which triangles are masked out.
Attributes
----------
`edges`
`neighbors`
is_delaunay : bool
Whether the Triangulation is a calculated Delaunay
triangulation (where `triangles` was not specified) or not.
Notes
-----
For a Triangulation to be valid it must not have duplicate points,
triangles formed from colinear points, or overlapping triangles.
"""
def __init__(self, x, y, triangles=None, mask=None):
self.x = np.asarray(x, dtype=np.float64)
self.y = np.asarray(y, dtype=np.float64)
if self.x.shape != self.y.shape or self.x.ndim != 1:
raise ValueError("x and y must be equal-length 1-D arrays")
self.mask = None
self._edges = None
self._neighbors = None
self.is_delaunay = False
if triangles is None:
# No triangulation specified, so use matplotlib._qhull to obtain
# Delaunay triangulation.
self.triangles, self._neighbors = _qhull.delaunay(x, y)
self.is_delaunay = True
else:
# Triangulation specified. Copy, since we may correct triangle
# orientation.
self.triangles = np.array(triangles, dtype=np.int32, order='C')
if self.triangles.ndim != 2 or self.triangles.shape[1] != 3:
raise ValueError('triangles must be a (?,3) array')
if self.triangles.max() >= len(self.x):
raise ValueError('triangles max element is out of bounds')
if self.triangles.min() < 0:
raise ValueError('triangles min element is out of bounds')
if mask is not None:
self.mask = np.asarray(mask, dtype=bool)
if self.mask.shape != (self.triangles.shape[0],):
raise ValueError('mask array must have same length as '
'triangles array')
# Underlying C++ object is not created until first needed.
self._cpp_triangulation = None
# Default TriFinder not created until needed.
self._trifinder = None
def calculate_plane_coefficients(self, z):
"""
Calculate plane equation coefficients for all unmasked triangles from
the point (x,y) coordinates and specified z-array of shape (npoints).
Returned array has shape (npoints,3) and allows z-value at (x,y)
position in triangle tri to be calculated using
z = array[tri,0]*x + array[tri,1]*y + array[tri,2].
"""
return self.get_cpp_triangulation().calculate_plane_coefficients(z)
@property
def edges(self):
"""
Return integer array of shape (nedges,2) containing all edges of
non-masked triangles.
Each edge is the start point index and end point index. Each
edge (start,end and end,start) appears only once.
"""
if self._edges is None:
self._edges = self.get_cpp_triangulation().get_edges()
return self._edges
def get_cpp_triangulation(self):
# Return the underlying C++ Triangulation object, creating it
# if necessary.
if self._cpp_triangulation is None:
self._cpp_triangulation = _tri.Triangulation(
self.x, self.y, self.triangles, self.mask, self._edges,
self._neighbors, not self.is_delaunay)
return self._cpp_triangulation
def get_masked_triangles(self):
"""
Return an array of triangles that are not masked.
"""
if self.mask is not None:
return self.triangles.compress(1 - self.mask, axis=0)
else:
return self.triangles
@staticmethod
def get_from_args_and_kwargs(*args, **kwargs):
"""
Return a Triangulation object from the args and kwargs, and
the remaining args and kwargs with the consumed values removed.
There are two alternatives: either the first argument is a
Triangulation object, in which case it is returned, or the args
and kwargs are sufficient to create a new Triangulation to
return. In the latter case, see Triangulation.__init__ for
the possible args and kwargs.
"""
if isinstance(args[0], Triangulation):
triangulation = args[0]
args = args[1:]
else:
x = args[0]
y = args[1]
args = args[2:] # Consumed first two args.
# Check triangles in kwargs then args.
triangles = kwargs.pop('triangles', None)
from_args = False
if triangles is None and len(args) > 0:
triangles = args[0]
from_args = True
if triangles is not None:
try:
triangles = np.asarray(triangles, dtype=np.int32)
except ValueError:
triangles = None
if triangles is not None and (triangles.ndim != 2 or
triangles.shape[1] != 3):
triangles = None
if triangles is not None and from_args:
args = args[1:] # Consumed first item in args.
# Check for mask in kwargs.
mask = kwargs.pop('mask', None)
triangulation = Triangulation(x, y, triangles, mask)
return triangulation, args, kwargs
def get_trifinder(self):
"""
Return the default :class:`matplotlib.tri.TriFinder` of this
triangulation, creating it if necessary. This allows the same
TriFinder object to be easily shared.
"""
if self._trifinder is None:
# Default TriFinder class.
from matplotlib.tri.trifinder import TrapezoidMapTriFinder
self._trifinder = TrapezoidMapTriFinder(self)
return self._trifinder
@property
def neighbors(self):
"""
Return integer array of shape (ntri,3) containing neighbor
triangles.
For each triangle, the indices of the three triangles that
share the same edges, or -1 if there is no such neighboring
triangle. neighbors[i,j] is the triangle that is the neighbor
to the edge from point index triangles[i,j] to point index
triangles[i,(j+1)%3].
"""
if self._neighbors is None:
self._neighbors = self.get_cpp_triangulation().get_neighbors()
return self._neighbors
def set_mask(self, mask):
"""
Set or clear the mask array. This is either None, or a boolean
array of shape (ntri).
"""
if mask is None:
self.mask = None
else:
self.mask = np.asarray(mask, dtype=bool)
if self.mask.shape != (self.triangles.shape[0],):
raise ValueError('mask array must have same length as '
'triangles array')
# Set mask in C++ Triangulation.
if self._cpp_triangulation is not None:
self._cpp_triangulation.set_mask(self.mask)
# Clear derived fields so they are recalculated when needed.
self._edges = None
self._neighbors = None
# Recalculate TriFinder if it exists.
if self._trifinder is not None:
self._trifinder._initialize()
| 8,217 | 36.525114 | 77 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/tri/__init__.py
|
"""
Unstructured triangular grid functions.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from .triangulation import *
from .tricontour import *
from .tritools import *
from .trifinder import *
from .triinterpolate import *
from .trirefine import *
from .tripcolor import *
from .triplot import *
| 377 | 21.235294 | 66 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/tri/tritools.py
|
"""
Tools for triangular grids.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from matplotlib.tri import Triangulation
import numpy as np
class TriAnalyzer(object):
"""
Define basic tools for triangular mesh analysis and improvement.
A TriAnalizer encapsulates a :class:`~matplotlib.tri.Triangulation`
object and provides basic tools for mesh analysis and mesh improvement.
Parameters
----------
triangulation : :class:`~matplotlib.tri.Triangulation` object
The encapsulated triangulation to analyze.
Attributes
----------
`scale_factors`
"""
def __init__(self, triangulation):
if not isinstance(triangulation, Triangulation):
raise ValueError("Expected a Triangulation object")
self._triangulation = triangulation
@property
def scale_factors(self):
"""
Factors to rescale the triangulation into a unit square.
Returns *k*, tuple of 2 scale factors.
Returns
-------
k : tuple of 2 floats (kx, ky)
Tuple of floats that would rescale the triangulation :
``[triangulation.x * kx, triangulation.y * ky]``
fits exactly inside a unit square.
"""
compressed_triangles = self._triangulation.get_masked_triangles()
node_used = (np.bincount(np.ravel(compressed_triangles),
minlength=self._triangulation.x.size) != 0)
return (1 / np.ptp(self._triangulation.x[node_used]),
1 / np.ptp(self._triangulation.y[node_used]))
def circle_ratios(self, rescale=True):
"""
Returns a measure of the triangulation triangles flatness.
The ratio of the incircle radius over the circumcircle radius is a
widely used indicator of a triangle flatness.
It is always ``<= 0.5`` and ``== 0.5`` only for equilateral
triangles. Circle ratios below 0.01 denote very flat triangles.
To avoid unduly low values due to a difference of scale between the 2
axis, the triangular mesh can first be rescaled to fit inside a unit
square with :attr:`scale_factors` (Only if *rescale* is True, which is
its default value).
Parameters
----------
rescale : boolean, optional
If True, a rescaling will be internally performed (based on
:attr:`scale_factors`, so that the (unmasked) triangles fit
exactly inside a unit square mesh. Default is True.
Returns
-------
circle_ratios : masked array
Ratio of the incircle radius over the
circumcircle radius, for each 'rescaled' triangle of the
encapsulated triangulation.
Values corresponding to masked triangles are masked out.
"""
# Coords rescaling
if rescale:
(kx, ky) = self.scale_factors
else:
(kx, ky) = (1.0, 1.0)
pts = np.vstack([self._triangulation.x*kx,
self._triangulation.y*ky]).T
tri_pts = pts[self._triangulation.triangles]
# Computes the 3 side lengths
a = tri_pts[:, 1, :] - tri_pts[:, 0, :]
b = tri_pts[:, 2, :] - tri_pts[:, 1, :]
c = tri_pts[:, 0, :] - tri_pts[:, 2, :]
a = np.sqrt(a[:, 0]**2 + a[:, 1]**2)
b = np.sqrt(b[:, 0]**2 + b[:, 1]**2)
c = np.sqrt(c[:, 0]**2 + c[:, 1]**2)
# circumcircle and incircle radii
s = (a+b+c)*0.5
prod = s*(a+b-s)*(a+c-s)*(b+c-s)
# We have to deal with flat triangles with infinite circum_radius
bool_flat = (prod == 0.)
if np.any(bool_flat):
# Pathologic flow
ntri = tri_pts.shape[0]
circum_radius = np.empty(ntri, dtype=np.float64)
circum_radius[bool_flat] = np.inf
abc = a*b*c
circum_radius[~bool_flat] = abc[~bool_flat] / (
4.0*np.sqrt(prod[~bool_flat]))
else:
# Normal optimized flow
circum_radius = (a*b*c) / (4.0*np.sqrt(prod))
in_radius = (a*b*c) / (4.0*circum_radius*s)
circle_ratio = in_radius/circum_radius
mask = self._triangulation.mask
if mask is None:
return circle_ratio
else:
return np.ma.array(circle_ratio, mask=mask)
def get_flat_tri_mask(self, min_circle_ratio=0.01, rescale=True):
"""
Eliminates excessively flat border triangles from the triangulation.
Returns a mask *new_mask* which allows to clean the encapsulated
triangulation from its border-located flat triangles
(according to their :meth:`circle_ratios`).
This mask is meant to be subsequently applied to the triangulation
using :func:`matplotlib.tri.Triangulation.set_mask` .
*new_mask* is an extension of the initial triangulation mask
in the sense that an initially masked triangle will remain masked.
The *new_mask* array is computed recursively ; at each step flat
triangles are removed only if they share a side with the current
mesh border. Thus no new holes in the triangulated domain will be
created.
Parameters
----------
min_circle_ratio : float, optional
Border triangles with incircle/circumcircle radii ratio r/R will
be removed if r/R < *min_circle_ratio*. Default value: 0.01
rescale : boolean, optional
If True, a rescaling will first be internally performed (based on
:attr:`scale_factors` ), so that the (unmasked) triangles fit
exactly inside a unit square mesh. This rescaling accounts for the
difference of scale which might exist between the 2 axis. Default
(and recommended) value is True.
Returns
-------
new_mask : array-like of booleans
Mask to apply to encapsulated triangulation.
All the initially masked triangles remain masked in the
*new_mask*.
Notes
-----
The rationale behind this function is that a Delaunay
triangulation - of an unstructured set of points - sometimes contains
almost flat triangles at its border, leading to artifacts in plots
(especially for high-resolution contouring).
Masked with computed *new_mask*, the encapsulated
triangulation would contain no more unmasked border triangles
with a circle ratio below *min_circle_ratio*, thus improving the
mesh quality for subsequent plots or interpolation.
"""
# Recursively computes the mask_current_borders, true if a triangle is
# at the border of the mesh OR touching the border through a chain of
# invalid aspect ratio masked_triangles.
ntri = self._triangulation.triangles.shape[0]
mask_bad_ratio = self.circle_ratios(rescale) < min_circle_ratio
current_mask = self._triangulation.mask
if current_mask is None:
current_mask = np.zeros(ntri, dtype=bool)
valid_neighbors = np.copy(self._triangulation.neighbors)
renum_neighbors = np.arange(ntri, dtype=np.int32)
nadd = -1
while nadd != 0:
# The active wavefront is the triangles from the border (unmasked
# but with a least 1 neighbor equal to -1
wavefront = ((np.min(valid_neighbors, axis=1) == -1)
& ~current_mask)
# The element from the active wavefront will be masked if their
# circle ratio is bad.
added_mask = np.logical_and(wavefront, mask_bad_ratio)
current_mask = (added_mask | current_mask)
nadd = np.sum(added_mask)
# now we have to update the tables valid_neighbors
valid_neighbors[added_mask, :] = -1
renum_neighbors[added_mask] = -1
valid_neighbors = np.where(valid_neighbors == -1, -1,
renum_neighbors[valid_neighbors])
return np.ma.filled(current_mask, True)
def _get_compressed_triangulation(self, return_tri_renum=False,
return_node_renum=False):
"""
Compress (if masked) the encapsulated triangulation.
Returns minimal-length triangles array (*compressed_triangles*) and
coordinates arrays (*compressed_x*, *compressed_y*) that can still
describe the unmasked triangles of the encapsulated triangulation.
Parameters
----------
return_tri_renum : boolean, optional
Indicates whether a renumbering table to translate the triangle
numbers from the encapsulated triangulation numbering into the
new (compressed) renumbering will be returned.
return_node_renum : boolean, optional
Indicates whether a renumbering table to translate the nodes
numbers from the encapsulated triangulation numbering into the
new (compressed) renumbering will be returned.
Returns
-------
compressed_triangles : array-like
the returned compressed triangulation triangles
compressed_x : array-like
the returned compressed triangulation 1st coordinate
compressed_y : array-like
the returned compressed triangulation 2nd coordinate
tri_renum : array-like of integers
renumbering table to translate the triangle numbers from the
encapsulated triangulation into the new (compressed) renumbering.
-1 for masked triangles (deleted from *compressed_triangles*).
Returned only if *return_tri_renum* is True.
node_renum : array-like of integers
renumbering table to translate the point numbers from the
encapsulated triangulation into the new (compressed) renumbering.
-1 for unused points (i.e. those deleted from *compressed_x* and
*compressed_y*). Returned only if *return_node_renum* is True.
"""
# Valid triangles and renumbering
tri_mask = self._triangulation.mask
compressed_triangles = self._triangulation.get_masked_triangles()
ntri = self._triangulation.triangles.shape[0]
tri_renum = self._total_to_compress_renum(tri_mask, ntri)
# Valid nodes and renumbering
node_mask = (np.bincount(np.ravel(compressed_triangles),
minlength=self._triangulation.x.size) == 0)
compressed_x = self._triangulation.x[~node_mask]
compressed_y = self._triangulation.y[~node_mask]
node_renum = self._total_to_compress_renum(node_mask)
# Now renumbering the valid triangles nodes
compressed_triangles = node_renum[compressed_triangles]
# 4 cases possible for return
if not return_tri_renum:
if not return_node_renum:
return compressed_triangles, compressed_x, compressed_y
else:
return (compressed_triangles, compressed_x, compressed_y,
node_renum)
else:
if not return_node_renum:
return (compressed_triangles, compressed_x, compressed_y,
tri_renum)
else:
return (compressed_triangles, compressed_x, compressed_y,
tri_renum, node_renum)
@staticmethod
def _total_to_compress_renum(mask, n=None):
"""
Parameters
----------
mask : 1d boolean array or None
mask
n : integer
length of the mask. Useful only id mask can be None
Returns
-------
renum : integer array
array so that (`valid_array` being a compressed array
based on a `masked_array` with mask *mask*) :
- For all i such as mask[i] = False:
valid_array[renum[i]] = masked_array[i]
- For all i such as mask[i] = True:
renum[i] = -1 (invalid value)
"""
if n is None:
n = np.size(mask)
if mask is not None:
renum = -np.ones(n, dtype=np.int32) # Default num is -1
valid = np.arange(n, dtype=np.int32).compress(~mask, axis=0)
renum[valid] = np.arange(np.size(valid, 0), dtype=np.int32)
return renum
else:
return np.arange(n, dtype=np.int32)
| 12,628 | 40.406557 | 78 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/tri/tripcolor.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from matplotlib.collections import PolyCollection, TriMesh
from matplotlib.colors import Normalize
from matplotlib.tri.triangulation import Triangulation
import numpy as np
def tripcolor(ax, *args, **kwargs):
"""
Create a pseudocolor plot of an unstructured triangular grid.
The triangulation can be specified in one of two ways; either::
tripcolor(triangulation, ...)
where triangulation is a :class:`matplotlib.tri.Triangulation`
object, or
::
tripcolor(x, y, ...)
tripcolor(x, y, triangles, ...)
tripcolor(x, y, triangles=triangles, ...)
tripcolor(x, y, mask=mask, ...)
tripcolor(x, y, triangles, mask=mask, ...)
in which case a Triangulation object will be created. See
:class:`~matplotlib.tri.Triangulation` for a explanation of these
possibilities.
The next argument must be *C*, the array of color values, either
one per point in the triangulation if color values are defined at
points, or one per triangle in the triangulation if color values
are defined at triangles. If there are the same number of points
and triangles in the triangulation it is assumed that color
values are defined at points; to force the use of color values at
triangles use the kwarg ``facecolors=C`` instead of just ``C``.
*shading* may be 'flat' (the default) or 'gouraud'. If *shading*
is 'flat' and C values are defined at points, the color values
used for each triangle are from the mean C of the triangle's
three points. If *shading* is 'gouraud' then color values must be
defined at points.
The remaining kwargs are the same as for
:meth:`~matplotlib.axes.Axes.pcolor`.
"""
if not ax._hold:
ax.cla()
alpha = kwargs.pop('alpha', 1.0)
norm = kwargs.pop('norm', None)
cmap = kwargs.pop('cmap', None)
vmin = kwargs.pop('vmin', None)
vmax = kwargs.pop('vmax', None)
shading = kwargs.pop('shading', 'flat')
facecolors = kwargs.pop('facecolors', None)
if shading not in ['flat', 'gouraud']:
raise ValueError("shading must be one of ['flat', 'gouraud'] "
"not {0}".format(shading))
tri, args, kwargs = Triangulation.get_from_args_and_kwargs(*args, **kwargs)
# C is the colors array defined at either points or faces (i.e. triangles).
# If facecolors is None, C are defined at points.
# If facecolors is not None, C are defined at faces.
if facecolors is not None:
C = facecolors
else:
C = np.asarray(args[0])
# If there are a different number of points and triangles in the
# triangulation, can omit facecolors kwarg as it is obvious from
# length of C whether it refers to points or faces.
# Do not do this for gouraud shading.
if (facecolors is None and len(C) == len(tri.triangles) and
len(C) != len(tri.x) and shading != 'gouraud'):
facecolors = C
# Check length of C is OK.
if ((facecolors is None and len(C) != len(tri.x)) or
(facecolors is not None and len(C) != len(tri.triangles))):
raise ValueError('Length of color values array must be the same '
'as either the number of triangulation points '
'or triangles')
# Handling of linewidths, shading, edgecolors and antialiased as
# in Axes.pcolor
linewidths = (0.25,)
if 'linewidth' in kwargs:
kwargs['linewidths'] = kwargs.pop('linewidth')
kwargs.setdefault('linewidths', linewidths)
edgecolors = 'none'
if 'edgecolor' in kwargs:
kwargs['edgecolors'] = kwargs.pop('edgecolor')
ec = kwargs.setdefault('edgecolors', edgecolors)
if 'antialiased' in kwargs:
kwargs['antialiaseds'] = kwargs.pop('antialiased')
if 'antialiaseds' not in kwargs and ec.lower() == "none":
kwargs['antialiaseds'] = False
if shading == 'gouraud':
if facecolors is not None:
raise ValueError('Gouraud shading does not support the use '
'of facecolors kwarg')
if len(C) != len(tri.x):
raise ValueError('For gouraud shading, the length of color '
'values array must be the same as the '
'number of triangulation points')
collection = TriMesh(tri, **kwargs)
else:
# Vertices of triangles.
maskedTris = tri.get_masked_triangles()
verts = np.concatenate((tri.x[maskedTris][..., np.newaxis],
tri.y[maskedTris][..., np.newaxis]), axis=2)
# Color values.
if facecolors is None:
# One color per triangle, the mean of the 3 vertex color values.
C = C[maskedTris].mean(axis=1)
elif tri.mask is not None:
# Remove color values of masked triangles.
C = C.compress(1-tri.mask)
collection = PolyCollection(verts, **kwargs)
collection.set_alpha(alpha)
collection.set_array(C)
if norm is not None and not isinstance(norm, Normalize):
raise ValueError("'norm' must be an instance of 'Normalize'")
collection.set_cmap(cmap)
collection.set_norm(norm)
if vmin is not None or vmax is not None:
collection.set_clim(vmin, vmax)
else:
collection.autoscale_None()
ax.grid(False)
minx = tri.x.min()
maxx = tri.x.max()
miny = tri.y.min()
maxy = tri.y.max()
corners = (minx, miny), (maxx, maxy)
ax.update_datalim(corners)
ax.autoscale_view()
ax.add_collection(collection)
return collection
| 5,729 | 35.967742 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/disable_internet.py
|
# Originally from astropy project (http://astropy.org), under BSD
# 3-clause license.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import contextlib
import socket
from six.moves import urllib
# save original socket method for restoration
# These are global so that re-calling the turn_off_internet function doesn't
# overwrite them again
socket_original = socket.socket
socket_create_connection = socket.create_connection
socket_bind = socket.socket.bind
socket_connect = socket.socket.connect
INTERNET_OFF = False
# urllib2 uses a global variable to cache its default "opener" for opening
# connections for various protocols; we store it off here so we can restore to
# the default after re-enabling internet use
_orig_opener = None
# ::1 is apparently another valid name for localhost?
# it is returned by getaddrinfo when that function is given localhost
def check_internet_off(original_function):
"""
Wraps ``original_function``, which in most cases is assumed
to be a `socket.socket` method, to raise an `IOError` for any operations
on non-local AF_INET sockets.
"""
def new_function(*args, **kwargs):
if isinstance(args[0], socket.socket):
if not args[0].family in (socket.AF_INET, socket.AF_INET6):
# Should be fine in all but some very obscure cases
# More to the point, we don't want to affect AF_UNIX
# sockets.
return original_function(*args, **kwargs)
host = args[1][0]
addr_arg = 1
valid_hosts = ('localhost', '127.0.0.1', '::1')
else:
# The only other function this is used to wrap currently is
# socket.create_connection, which should be passed a 2-tuple, but
# we'll check just in case
if not (isinstance(args[0], tuple) and len(args[0]) == 2):
return original_function(*args, **kwargs)
host = args[0][0]
addr_arg = 0
valid_hosts = ('localhost', '127.0.0.1')
hostname = socket.gethostname()
fqdn = socket.getfqdn()
if host in (hostname, fqdn):
host = 'localhost'
new_addr = (host, args[addr_arg][1])
args = args[:addr_arg] + (new_addr,) + args[addr_arg + 1:]
if any([h in host for h in valid_hosts]):
return original_function(*args, **kwargs)
else:
raise IOError("An attempt was made to connect to the internet "
"by a test that was not marked `remote_data`.")
return new_function
def turn_off_internet(verbose=False):
"""
Disable internet access via python by preventing connections from being
created using the socket module. Presumably this could be worked around by
using some other means of accessing the internet, but all default python
modules (urllib, requests, etc.) use socket [citation needed].
"""
global INTERNET_OFF
global _orig_opener
if INTERNET_OFF:
return
INTERNET_OFF = True
__tracebackhide__ = True
if verbose:
print("Internet access disabled")
# Update urllib2 to force it not to use any proxies
# Must use {} here (the default of None will kick off an automatic search
# for proxies)
_orig_opener = urllib.request.build_opener()
no_proxy_handler = urllib.request.ProxyHandler({})
opener = urllib.request.build_opener(no_proxy_handler)
urllib.request.install_opener(opener)
socket.create_connection = check_internet_off(socket_create_connection)
socket.socket.bind = check_internet_off(socket_bind)
socket.socket.connect = check_internet_off(socket_connect)
return socket
def turn_on_internet(verbose=False):
"""
Restore internet access. Not used, but kept in case it is needed.
"""
global INTERNET_OFF
global _orig_opener
if not INTERNET_OFF:
return
INTERNET_OFF = False
if verbose:
print("Internet access enabled")
urllib.request.install_opener(_orig_opener)
socket.create_connection = socket_create_connection
socket.socket.bind = socket_bind
socket.socket.connect = socket_connect
return socket
@contextlib.contextmanager
def no_internet(verbose=False):
"""Context manager to temporarily disable internet access (if not already
disabled). If it was already disabled before entering the context manager
(i.e. `turn_off_internet` was called previously) then this is a no-op and
leaves internet access disabled until a manual call to `turn_on_internet`.
"""
already_disabled = INTERNET_OFF
turn_off_internet(verbose=verbose)
try:
yield
finally:
if not already_disabled:
turn_on_internet(verbose=verbose)
| 4,867 | 31.238411 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/exceptions.py
|
class ImageComparisonFailure(AssertionError):
"""
Raise this exception to mark a test as a comparison between two images.
"""
| 138 | 26.8 | 75 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/conftest.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import pytest
import matplotlib
def pytest_configure(config):
matplotlib.use('agg')
matplotlib._called_from_pytest = True
matplotlib._init_tests()
def pytest_unconfigure(config):
matplotlib._called_from_pytest = False
@pytest.fixture(autouse=True)
def mpl_test_settings(request):
from matplotlib.testing.decorators import _do_cleanup
original_units_registry = matplotlib.units.registry.copy()
original_settings = matplotlib.rcParams.copy()
backend = None
backend_marker = request.keywords.get('backend')
if backend_marker is not None:
assert len(backend_marker.args) == 1, \
"Marker 'backend' must specify 1 backend."
backend = backend_marker.args[0]
prev_backend = matplotlib.get_backend()
style = '_classic_test' # Default of cleanup and image_comparison too.
style_marker = request.keywords.get('style')
if style_marker is not None:
assert len(style_marker.args) == 1, \
"Marker 'style' must specify 1 style."
style = style_marker.args[0]
matplotlib.testing.setup()
if backend is not None:
# This import must come after setup() so it doesn't load the default
# backend prematurely.
import matplotlib.pyplot as plt
plt.switch_backend(backend)
matplotlib.style.use(style)
try:
yield
finally:
if backend is not None:
plt.switch_backend(prev_backend)
_do_cleanup(original_units_registry,
original_settings)
@pytest.fixture
def mpl_image_comparison_parameters(request, extension):
# This fixture is applied automatically by the image_comparison decorator.
#
# The sole purpose of this fixture is to provide an indirect method of
# obtaining parameters *without* modifying the decorated function
# signature. In this way, the function signature can stay the same and
# pytest won't get confused.
# We annotate the decorated function with any parameters captured by this
# fixture so that they can be used by the wrapper in image_comparison.
baseline_images = request.keywords['baseline_images'].args[0]
if baseline_images is None:
# Allow baseline image list to be produced on the fly based on current
# parametrization.
baseline_images = request.getfixturevalue('baseline_images')
func = request.function
func.__wrapped__.parameters = (baseline_images, extension)
try:
yield
finally:
delattr(func.__wrapped__, 'parameters')
@pytest.fixture
def pd():
"""Fixture to import and configure pandas."""
pd = pytest.importorskip('pandas')
try:
from pandas.plotting import (
register_matplotlib_converters as register)
except ImportError:
from pandas.tseries.converter import register
register()
try:
yield pd
finally:
try:
from pandas.plotting import (
deregister_matplotlib_converters as deregister)
except ImportError:
pass
else:
deregister()
| 3,222 | 30.910891 | 78 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/noseclasses.py
|
"""
The module testing.noseclasses is deprecated as of 2.1
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
try:
from ._nose.plugins.knownfailure import KnownFailure as _KnownFailure
has_nose = True
except ImportError:
has_nose = False
_KnownFailure = object
from .. import cbook
cbook.warn_deprecated(
since="2.1",
message="The noseclass module has been deprecated in 2.1 and will "
"be removed in matplotlib 2.3.")
@cbook.deprecated("2.1")
class KnownFailure(_KnownFailure):
def __init__(self):
if not has_nose:
raise ImportError("Need nose for this plugin.")
| 691 | 24.62963 | 73 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/compare.py
|
"""
Provides a collection of utilities for comparing (image) results.
"""
from __future__ import absolute_import, division, print_function
import six
import atexit
import functools
import hashlib
import itertools
import os
import re
import shutil
import sys
from tempfile import TemporaryFile
import numpy as np
import matplotlib
from matplotlib.compat import subprocess
from matplotlib.testing.exceptions import ImageComparisonFailure
from matplotlib import _png
from matplotlib import _get_cachedir
from matplotlib import cbook
__all__ = ['compare_float', 'compare_images', 'comparable_formats']
def make_test_filename(fname, purpose):
"""
Make a new filename by inserting `purpose` before the file's
extension.
"""
base, ext = os.path.splitext(fname)
return '%s-%s%s' % (base, purpose, ext)
def compare_float(expected, actual, relTol=None, absTol=None):
"""
Fail if the floating point values are not close enough, with
the given message.
You can specify a relative tolerance, absolute tolerance, or both.
"""
if relTol is None and absTol is None:
raise ValueError("You haven't specified a 'relTol' relative "
"tolerance or a 'absTol' absolute tolerance "
"function argument. You must specify one.")
msg = ""
if absTol is not None:
absDiff = abs(expected - actual)
if absTol < absDiff:
template = ['',
'Expected: {expected}',
'Actual: {actual}',
'Abs diff: {absDiff}',
'Abs tol: {absTol}']
msg += '\n '.join([line.format(**locals()) for line in template])
if relTol is not None:
# The relative difference of the two values. If the expected value is
# zero, then return the absolute value of the difference.
relDiff = abs(expected - actual)
if expected:
relDiff = relDiff / abs(expected)
if relTol < relDiff:
# The relative difference is a ratio, so it's always unit-less.
template = ['',
'Expected: {expected}',
'Actual: {actual}',
'Rel diff: {relDiff}',
'Rel tol: {relTol}']
msg += '\n '.join([line.format(**locals()) for line in template])
return msg or None
def get_cache_dir():
cachedir = _get_cachedir()
if cachedir is None:
raise RuntimeError('Could not find a suitable configuration directory')
cache_dir = os.path.join(cachedir, 'test_cache')
if not os.path.exists(cache_dir):
try:
cbook.mkdirs(cache_dir)
except IOError:
return None
if not os.access(cache_dir, os.W_OK):
return None
return cache_dir
def get_file_hash(path, block_size=2 ** 20):
md5 = hashlib.md5()
with open(path, 'rb') as fd:
while True:
data = fd.read(block_size)
if not data:
break
md5.update(data)
if path.endswith('.pdf'):
from matplotlib import checkdep_ghostscript
md5.update(checkdep_ghostscript()[1].encode('utf-8'))
elif path.endswith('.svg'):
from matplotlib import checkdep_inkscape
md5.update(checkdep_inkscape().encode('utf-8'))
return md5.hexdigest()
def make_external_conversion_command(cmd):
def convert(old, new):
cmdline = cmd(old, new)
pipe = subprocess.Popen(cmdline, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = pipe.communicate()
errcode = pipe.wait()
if not os.path.exists(new) or errcode:
msg = "Conversion command failed:\n%s\n" % ' '.join(cmdline)
if stdout:
msg += "Standard output:\n%s\n" % stdout
if stderr:
msg += "Standard error:\n%s\n" % stderr
raise IOError(msg)
return convert
# Modified from https://bugs.python.org/issue25567.
_find_unsafe_bytes = re.compile(br'[^a-zA-Z0-9_@%+=:,./-]').search
def _shlex_quote_bytes(b):
return (b if _find_unsafe_bytes(b) is None
else b"'" + b.replace(b"'", b"'\"'\"'") + b"'")
class _SVGConverter(object):
def __init__(self):
self._proc = None
# We cannot rely on the GC to trigger `__del__` at exit because
# other modules (e.g. `subprocess`) may already have their globals
# set to `None`, which make `proc.communicate` or `proc.terminate`
# fail. By relying on `atexit` we ensure the destructor runs before
# `None`-setting occurs.
atexit.register(self.__del__)
def _read_to_prompt(self):
"""Did Inkscape reach the prompt without crashing?
"""
stream = iter(functools.partial(self._proc.stdout.read, 1), b"")
prompt = (b"\n", b">")
n = len(prompt)
its = itertools.tee(stream, n)
for i, it in enumerate(its):
next(itertools.islice(it, i, i), None) # Advance `it` by `i`.
while True:
window = tuple(map(next, its))
if len(window) != n:
# Ran out of data -- one of the `next(it)` raised
# StopIteration, so the tuple is shorter.
return False
if self._proc.poll() is not None:
# Inkscape exited.
return False
if window == prompt:
# Successfully read until prompt.
return True
def __call__(self, orig, dest):
if (not self._proc # First run.
or self._proc.poll() is not None): # Inkscape terminated.
env = os.environ.copy()
# If one passes e.g. a png file to Inkscape, it will try to
# query the user for conversion options via a GUI (even with
# `--without-gui`). Unsetting `DISPLAY` prevents this (and causes
# GTK to crash and Inkscape to terminate, but that'll just be
# reported as a regular exception below).
env.pop("DISPLAY", None) # May already be unset.
# Do not load any user options.
# `os.environ` needs native strings on Py2+Windows.
env[str("INKSCAPE_PROFILE_DIR")] = os.devnull
# Old versions of Inkscape (0.48.3.1, used on Travis as of now)
# seem to sometimes deadlock when stderr is redirected to a pipe,
# so we redirect it to a temporary file instead. This is not
# necessary anymore as of Inkscape 0.92.1.
self._stderr = TemporaryFile()
self._proc = subprocess.Popen(
[str("inkscape"), "--without-gui", "--shell"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=self._stderr, env=env)
if not self._read_to_prompt():
raise OSError("Failed to start Inkscape")
try:
fsencode = os.fsencode
except AttributeError: # Py2.
def fsencode(s):
return s.encode(sys.getfilesystemencoding())
# Inkscape uses glib's `g_shell_parse_argv`, which has a consistent
# behavior across platforms, so we can just use `shlex.quote`.
orig_b, dest_b = map(_shlex_quote_bytes, map(fsencode, [orig, dest]))
if b"\n" in orig_b or b"\n" in dest_b:
# Who knows whether the current folder name has a newline, or if
# our encoding is even ASCII compatible... Just fall back on the
# slow solution (Inkscape uses `fgets` so it will always stop at a
# newline).
return make_external_conversion_command(lambda old, new: [
str('inkscape'), '-z', old, '--export-png', new])(orig, dest)
self._proc.stdin.write(orig_b + b" --export-png=" + dest_b + b"\n")
self._proc.stdin.flush()
if not self._read_to_prompt():
# Inkscape's output is not localized but gtk's is, so the
# output stream probably has a mixed encoding. Using
# `getfilesystemencoding` should at least get the filenames
# right...
self._stderr.seek(0)
raise ImageComparisonFailure(
self._stderr.read().decode(
sys.getfilesystemencoding(), "replace"))
def __del__(self):
if self._proc:
if self._proc.poll() is None: # Not exited yet.
self._proc.communicate(b"quit\n")
self._proc.wait()
self._proc.stdin.close()
self._proc.stdout.close()
self._stderr.close()
def _update_converter():
gs, gs_v = matplotlib.checkdep_ghostscript()
if gs_v is not None:
def cmd(old, new):
return [str(gs), '-q', '-sDEVICE=png16m', '-dNOPAUSE', '-dBATCH',
'-sOutputFile=' + new, old]
converter['pdf'] = make_external_conversion_command(cmd)
converter['eps'] = make_external_conversion_command(cmd)
if matplotlib.checkdep_inkscape() is not None:
converter['svg'] = _SVGConverter()
#: A dictionary that maps filename extensions to functions which
#: themselves map arguments `old` and `new` (filenames) to a list of strings.
#: The list can then be passed to Popen to convert files with that
#: extension to png format.
converter = {}
_update_converter()
def comparable_formats():
"""
Returns the list of file formats that compare_images can compare
on this system.
"""
return ['png'] + list(converter)
def convert(filename, cache):
"""
Convert the named file into a png file. Returns the name of the
created file.
If *cache* is True, the result of the conversion is cached in
`matplotlib._get_cachedir() + '/test_cache/'`. The caching is based
on a hash of the exact contents of the input file. The is no limit
on the size of the cache, so it may need to be manually cleared
periodically.
"""
base, extension = filename.rsplit('.', 1)
if extension not in converter:
reason = "Don't know how to convert %s files to png" % extension
from . import is_called_from_pytest
if is_called_from_pytest():
import pytest
pytest.skip(reason)
else:
from nose import SkipTest
raise SkipTest(reason)
newname = base + '_' + extension + '.png'
if not os.path.exists(filename):
raise IOError("'%s' does not exist" % filename)
# Only convert the file if the destination doesn't already exist or
# is out of date.
if (not os.path.exists(newname) or
os.stat(newname).st_mtime < os.stat(filename).st_mtime):
if cache:
cache_dir = get_cache_dir()
else:
cache_dir = None
if cache_dir is not None:
hash_value = get_file_hash(filename)
new_ext = os.path.splitext(newname)[1]
cached_file = os.path.join(cache_dir, hash_value + new_ext)
if os.path.exists(cached_file):
shutil.copyfile(cached_file, newname)
return newname
converter[extension](filename, newname)
if cache_dir is not None:
shutil.copyfile(newname, cached_file)
return newname
#: Maps file extensions to a function which takes a filename as its
#: only argument to return a list suitable for execution with Popen.
#: The purpose of this is so that the result file (with the given
#: extension) can be verified with tools such as xmllint for svg.
verifiers = {}
# Turning this off, because it seems to cause multiprocessing issues
if False and matplotlib.checkdep_xmllint():
verifiers['svg'] = lambda filename: [
'xmllint', '--valid', '--nowarning', '--noout', filename]
@cbook.deprecated("2.1")
def verify(filename):
"""Verify the file through some sort of verification tool."""
if not os.path.exists(filename):
raise IOError("'%s' does not exist" % filename)
base, extension = filename.rsplit('.', 1)
verifier = verifiers.get(extension, None)
if verifier is not None:
cmd = verifier(filename)
pipe = subprocess.Popen(cmd, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = pipe.communicate()
errcode = pipe.wait()
if errcode != 0:
msg = "File verification command failed:\n%s\n" % ' '.join(cmd)
if stdout:
msg += "Standard output:\n%s\n" % stdout
if stderr:
msg += "Standard error:\n%s\n" % stderr
raise IOError(msg)
def crop_to_same(actual_path, actual_image, expected_path, expected_image):
# clip the images to the same size -- this is useful only when
# comparing eps to pdf
if actual_path[-7:-4] == 'eps' and expected_path[-7:-4] == 'pdf':
aw, ah, ad = actual_image.shape
ew, eh, ed = expected_image.shape
actual_image = actual_image[int(aw / 2 - ew / 2):int(
aw / 2 + ew / 2), int(ah / 2 - eh / 2):int(ah / 2 + eh / 2)]
return actual_image, expected_image
def calculate_rms(expectedImage, actualImage):
"Calculate the per-pixel errors, then compute the root mean square error."
if expectedImage.shape != actualImage.shape:
raise ImageComparisonFailure(
"Image sizes do not match expected size: {0} "
"actual size {1}".format(expectedImage.shape, actualImage.shape))
# Convert to float to avoid overflowing finite integer types.
return np.sqrt(((expectedImage - actualImage).astype(float) ** 2).mean())
def compare_images(expected, actual, tol, in_decorator=False):
"""
Compare two "image" files checking differences within a tolerance.
The two given filenames may point to files which are convertible to
PNG via the `.converter` dictionary. The underlying RMS is calculated
with the `.calculate_rms` function.
Parameters
----------
expected : str
The filename of the expected image.
actual :str
The filename of the actual image.
tol : float
The tolerance (a color value difference, where 255 is the
maximal difference). The test fails if the average pixel
difference is greater than this value.
in_decorator : bool
If called from image_comparison decorator, this should be
True. (default=False)
Examples
--------
img1 = "./baseline/plot.png"
img2 = "./output/plot.png"
compare_images( img1, img2, 0.001 ):
"""
if not os.path.exists(actual):
raise Exception("Output image %s does not exist." % actual)
if os.stat(actual).st_size == 0:
raise Exception("Output image file %s is empty." % actual)
# Convert the image to png
extension = expected.split('.')[-1]
if not os.path.exists(expected):
raise IOError('Baseline image %r does not exist.' % expected)
if extension != 'png':
actual = convert(actual, False)
expected = convert(expected, True)
# open the image files and remove the alpha channel (if it exists)
expectedImage = _png.read_png_int(expected)
actualImage = _png.read_png_int(actual)
expectedImage = expectedImage[:, :, :3]
actualImage = actualImage[:, :, :3]
actualImage, expectedImage = crop_to_same(
actual, actualImage, expected, expectedImage)
diff_image = make_test_filename(actual, 'failed-diff')
if tol <= 0.0:
if np.array_equal(expectedImage, actualImage):
return None
# convert to signed integers, so that the images can be subtracted without
# overflow
expectedImage = expectedImage.astype(np.int16)
actualImage = actualImage.astype(np.int16)
rms = calculate_rms(expectedImage, actualImage)
if rms <= tol:
return None
save_diff_image(expected, actual, diff_image)
results = dict(rms=rms, expected=str(expected),
actual=str(actual), diff=str(diff_image), tol=tol)
if not in_decorator:
# Then the results should be a string suitable for stdout.
template = ['Error: Image files did not match.',
'RMS Value: {rms}',
'Expected: \n {expected}',
'Actual: \n {actual}',
'Difference:\n {diff}',
'Tolerance: \n {tol}', ]
results = '\n '.join([line.format(**results) for line in template])
return results
def save_diff_image(expected, actual, output):
expectedImage = _png.read_png(expected)
actualImage = _png.read_png(actual)
actualImage, expectedImage = crop_to_same(
actual, actualImage, expected, expectedImage)
expectedImage = np.array(expectedImage).astype(float)
actualImage = np.array(actualImage).astype(float)
if expectedImage.shape != actualImage.shape:
raise ImageComparisonFailure(
"Image sizes do not match expected size: {0} "
"actual size {1}".format(expectedImage.shape, actualImage.shape))
absDiffImage = np.abs(expectedImage - actualImage)
# expand differences in luminance domain
absDiffImage *= 255 * 10
save_image_np = np.clip(absDiffImage, 0, 255).astype(np.uint8)
height, width, depth = save_image_np.shape
# The PDF renderer doesn't produce an alpha channel, but the
# matplotlib PNG writer requires one, so expand the array
if depth == 3:
with_alpha = np.empty((height, width, 4), dtype=np.uint8)
with_alpha[:, :, 0:3] = save_image_np
save_image_np = with_alpha
# Hard-code the alpha channel to fully solid
save_image_np[:, :, 3] = 255
_png.write_png(save_image_np, output)
| 17,908 | 35.54898 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/determinism.py
|
"""
Provides utilities to test output reproducibility.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import io
import os
import re
import sys
from subprocess import check_output
import pytest
import matplotlib
from matplotlib import pyplot as plt
def _determinism_save(objects='mhi', format="pdf", usetex=False):
# save current value of SOURCE_DATE_EPOCH and set it
# to a constant value, so that time difference is not
# taken into account
sde = os.environ.pop('SOURCE_DATE_EPOCH', None)
os.environ['SOURCE_DATE_EPOCH'] = "946684800"
matplotlib.rcParams['text.usetex'] = usetex
fig = plt.figure()
if 'm' in objects:
# use different markers...
ax1 = fig.add_subplot(1, 6, 1)
x = range(10)
ax1.plot(x, [1] * 10, marker=u'D')
ax1.plot(x, [2] * 10, marker=u'x')
ax1.plot(x, [3] * 10, marker=u'^')
ax1.plot(x, [4] * 10, marker=u'H')
ax1.plot(x, [5] * 10, marker=u'v')
if 'h' in objects:
# also use different hatch patterns
ax2 = fig.add_subplot(1, 6, 2)
bars = (ax2.bar(range(1, 5), range(1, 5)) +
ax2.bar(range(1, 5), [6] * 4, bottom=range(1, 5)))
ax2.set_xticks([1.5, 2.5, 3.5, 4.5])
patterns = ('-', '+', 'x', '\\', '*', 'o', 'O', '.')
for bar, pattern in zip(bars, patterns):
bar.set_hatch(pattern)
if 'i' in objects:
# also use different images
A = [[1, 2, 3], [2, 3, 1], [3, 1, 2]]
fig.add_subplot(1, 6, 3).imshow(A, interpolation='nearest')
A = [[1, 3, 2], [1, 2, 3], [3, 1, 2]]
fig.add_subplot(1, 6, 4).imshow(A, interpolation='bilinear')
A = [[2, 3, 1], [1, 2, 3], [2, 1, 3]]
fig.add_subplot(1, 6, 5).imshow(A, interpolation='bicubic')
x = range(5)
fig.add_subplot(1, 6, 6).plot(x, x)
if six.PY2 and format == 'ps':
stdout = io.StringIO()
else:
stdout = getattr(sys.stdout, 'buffer', sys.stdout)
fig.savefig(stdout, format=format)
if six.PY2 and format == 'ps':
sys.stdout.write(stdout.getvalue())
# Restores SOURCE_DATE_EPOCH
if sde is None:
os.environ.pop('SOURCE_DATE_EPOCH', None)
else:
os.environ['SOURCE_DATE_EPOCH'] = sde
def _determinism_check(objects='mhi', format="pdf", usetex=False):
"""
Output three times the same graphs and checks that the outputs are exactly
the same.
Parameters
----------
objects : str
contains characters corresponding to objects to be included in the test
document: 'm' for markers, 'h' for hatch patterns, 'i' for images. The
default value is "mhi", so that the test includes all these objects.
format : str
format string. The default value is "pdf".
"""
plots = []
for i in range(3):
result = check_output([sys.executable, '-R', '-c',
'import matplotlib; '
'matplotlib._called_from_pytest = True; '
'matplotlib.use(%r); '
'from matplotlib.testing.determinism '
'import _determinism_save;'
'_determinism_save(%r,%r,%r)'
% (format, objects, format, usetex)])
plots.append(result)
for p in plots[1:]:
if usetex:
if p != plots[0]:
pytest.skip("failed, maybe due to ghostscript timestamps")
else:
assert p == plots[0]
def _determinism_source_date_epoch(format, string, keyword=b"CreationDate"):
"""
Test SOURCE_DATE_EPOCH support. Output a document with the environment
variable SOURCE_DATE_EPOCH set to 2000-01-01 00:00 UTC and check that the
document contains the timestamp that corresponds to this date (given as an
argument).
Parameters
----------
format : str
format string, such as "pdf".
string : str
timestamp string for 2000-01-01 00:00 UTC.
keyword : bytes
a string to look at when searching for the timestamp in the document
(used in case the test fails).
"""
buff = check_output([sys.executable, '-R', '-c',
'import matplotlib; '
'matplotlib._called_from_pytest = True; '
'matplotlib.use(%r); '
'from matplotlib.testing.determinism '
'import _determinism_save;'
'_determinism_save(%r,%r)'
% (format, "", format)])
find_keyword = re.compile(b".*" + keyword + b".*")
key = find_keyword.search(buff)
if key:
print(key.group())
else:
print("Timestamp keyword (%s) not found!" % keyword)
assert string in buff
| 4,924 | 32.732877 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/__init__.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import functools
import warnings
import matplotlib as mpl
from matplotlib import cbook
def is_called_from_pytest():
"""Returns whether the call was done from pytest"""
return getattr(mpl, '_called_from_pytest', False)
def _copy_metadata(src_func, tgt_func):
"""Replicates metadata of the function. Returns target function."""
functools.update_wrapper(tgt_func, src_func)
tgt_func.__wrapped__ = src_func # Python2 compatibility.
return tgt_func
def set_font_settings_for_testing():
mpl.rcParams['font.family'] = 'DejaVu Sans'
mpl.rcParams['text.hinting'] = False
mpl.rcParams['text.hinting_factor'] = 8
def set_reproducibility_for_testing():
mpl.rcParams['svg.hashsalt'] = 'matplotlib'
def setup():
# The baseline images are created in this locale, so we should use
# it during all of the tests.
import locale
from matplotlib.backends import backend_agg, backend_pdf, backend_svg
try:
locale.setlocale(locale.LC_ALL, str('en_US.UTF-8'))
except locale.Error:
try:
locale.setlocale(locale.LC_ALL, str('English_United States.1252'))
except locale.Error:
warnings.warn(
"Could not set locale to English/United States. "
"Some date-related tests may fail")
mpl.use('Agg', warn=False) # use Agg backend for these tests
# These settings *must* be hardcoded for running the comparison
# tests and are not necessarily the default values as specified in
# rcsetup.py
mpl.rcdefaults() # Start with all defaults
set_font_settings_for_testing()
set_reproducibility_for_testing()
| 1,777 | 28.633333 | 78 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/decorators.py
|
from __future__ import absolute_import, division, print_function
import six
import functools
import inspect
import os
import sys
import shutil
import warnings
import unittest
# Note - don't import nose up here - import it only as needed in functions.
# This allows other functions here to be used by pytest-based testing suites
# without requiring nose to be installed.
import matplotlib as mpl
import matplotlib.style
import matplotlib.units
import matplotlib.testing
from matplotlib import cbook
from matplotlib import ticker
from matplotlib import pyplot as plt
from matplotlib import ft2font
from matplotlib.testing.compare import (
comparable_formats, compare_images, make_test_filename)
from . import _copy_metadata, is_called_from_pytest
from .exceptions import ImageComparisonFailure
def _knownfailureif(fail_condition, msg=None, known_exception_class=None):
"""
Assume a will fail if *fail_condition* is True. *fail_condition*
may also be False or the string 'indeterminate'.
*msg* is the error message displayed for the test.
If *known_exception_class* is not None, the failure is only known
if the exception is an instance of this class. (Default = None)
"""
if is_called_from_pytest():
import pytest
if fail_condition == 'indeterminate':
fail_condition, strict = True, False
else:
fail_condition, strict = bool(fail_condition), True
return pytest.mark.xfail(condition=fail_condition, reason=msg,
raises=known_exception_class, strict=strict)
else:
from ._nose.decorators import knownfailureif
return knownfailureif(fail_condition, msg, known_exception_class)
@cbook.deprecated('2.1',
alternative='pytest.xfail or import the plugin')
def knownfailureif(fail_condition, msg=None, known_exception_class=None):
_knownfailureif(fail_condition, msg, known_exception_class)
def _do_cleanup(original_units_registry, original_settings):
plt.close('all')
mpl.rcParams.clear()
mpl.rcParams.update(original_settings)
matplotlib.units.registry.clear()
matplotlib.units.registry.update(original_units_registry)
warnings.resetwarnings() # reset any warning filters set in tests
class CleanupTest(object):
@classmethod
def setup_class(cls):
cls.original_units_registry = matplotlib.units.registry.copy()
cls.original_settings = mpl.rcParams.copy()
matplotlib.testing.setup()
@classmethod
def teardown_class(cls):
_do_cleanup(cls.original_units_registry,
cls.original_settings)
def test(self):
self._func()
class CleanupTestCase(unittest.TestCase):
'''A wrapper for unittest.TestCase that includes cleanup operations'''
@classmethod
def setUpClass(cls):
import matplotlib.units
cls.original_units_registry = matplotlib.units.registry.copy()
cls.original_settings = mpl.rcParams.copy()
@classmethod
def tearDownClass(cls):
_do_cleanup(cls.original_units_registry,
cls.original_settings)
def cleanup(style=None):
"""
A decorator to ensure that any global state is reset before
running a test.
Parameters
----------
style : str, optional
The name of the style to apply.
"""
# If cleanup is used without arguments, `style` will be a
# callable, and we pass it directly to the wrapper generator. If
# cleanup if called with an argument, it is a string naming a
# style, and the function will be passed as an argument to what we
# return. This is a confusing, but somewhat standard, pattern for
# writing a decorator with optional arguments.
def make_cleanup(func):
if inspect.isgeneratorfunction(func):
@functools.wraps(func)
def wrapped_callable(*args, **kwargs):
original_units_registry = matplotlib.units.registry.copy()
original_settings = mpl.rcParams.copy()
matplotlib.style.use(style)
try:
for yielded in func(*args, **kwargs):
yield yielded
finally:
_do_cleanup(original_units_registry,
original_settings)
else:
@functools.wraps(func)
def wrapped_callable(*args, **kwargs):
original_units_registry = matplotlib.units.registry.copy()
original_settings = mpl.rcParams.copy()
matplotlib.style.use(style)
try:
func(*args, **kwargs)
finally:
_do_cleanup(original_units_registry,
original_settings)
return wrapped_callable
if isinstance(style, six.string_types):
return make_cleanup
else:
result = make_cleanup(style)
# Default of mpl_test_settings fixture and image_comparison too.
style = '_classic_test'
return result
def check_freetype_version(ver):
if ver is None:
return True
from distutils import version
if isinstance(ver, six.string_types):
ver = (ver, ver)
ver = [version.StrictVersion(x) for x in ver]
found = version.StrictVersion(ft2font.__freetype_version__)
return found >= ver[0] and found <= ver[1]
def _checked_on_freetype_version(required_freetype_version):
if check_freetype_version(required_freetype_version):
return lambda f: f
reason = ("Mismatched version of freetype. "
"Test requires '%s', you have '%s'" %
(required_freetype_version, ft2font.__freetype_version__))
return _knownfailureif('indeterminate', msg=reason,
known_exception_class=ImageComparisonFailure)
def remove_ticks_and_titles(figure):
figure.suptitle("")
null_formatter = ticker.NullFormatter()
for ax in figure.get_axes():
ax.set_title("")
ax.xaxis.set_major_formatter(null_formatter)
ax.xaxis.set_minor_formatter(null_formatter)
ax.yaxis.set_major_formatter(null_formatter)
ax.yaxis.set_minor_formatter(null_formatter)
try:
ax.zaxis.set_major_formatter(null_formatter)
ax.zaxis.set_minor_formatter(null_formatter)
except AttributeError:
pass
def _raise_on_image_difference(expected, actual, tol):
__tracebackhide__ = True
err = compare_images(expected, actual, tol, in_decorator=True)
if not os.path.exists(expected):
raise ImageComparisonFailure('image does not exist: %s' % expected)
if err:
for key in ["actual", "expected"]:
err[key] = os.path.relpath(err[key])
raise ImageComparisonFailure(
'images not close (RMS %(rms).3f):\n\t%(actual)s\n\t%(expected)s '
% err)
def _xfail_if_format_is_uncomparable(extension):
will_fail = extension not in comparable_formats()
if will_fail:
fail_msg = 'Cannot compare %s files on this system' % extension
else:
fail_msg = 'No failure expected'
return _knownfailureif(will_fail, fail_msg,
known_exception_class=ImageComparisonFailure)
def _mark_xfail_if_format_is_uncomparable(extension):
if isinstance(extension, six.string_types):
will_fail = extension not in comparable_formats()
else:
# Extension might be a pytest marker instead of a plain string.
will_fail = extension.args[0] not in comparable_formats()
if will_fail:
fail_msg = 'Cannot compare %s files on this system' % extension
import pytest
return pytest.mark.xfail(extension, reason=fail_msg, strict=False,
raises=ImageComparisonFailure)
else:
return extension
class _ImageComparisonBase(object):
"""
Image comparison base class
This class provides *just* the comparison-related functionality and avoids
any code that would be specific to any testing framework.
"""
def __init__(self, tol, remove_text, savefig_kwargs):
self.func = self.baseline_dir = self.result_dir = None
self.tol = tol
self.remove_text = remove_text
self.savefig_kwargs = savefig_kwargs
def delayed_init(self, func):
assert self.func is None, "it looks like same decorator used twice"
self.func = func
self.baseline_dir, self.result_dir = _image_directories(func)
def copy_baseline(self, baseline, extension):
baseline_path = os.path.join(self.baseline_dir, baseline)
orig_expected_fname = baseline_path + '.' + extension
if extension == 'eps' and not os.path.exists(orig_expected_fname):
orig_expected_fname = baseline_path + '.pdf'
expected_fname = make_test_filename(
os.path.join(self.result_dir,
os.path.basename(orig_expected_fname)),
'expected')
if os.path.exists(orig_expected_fname):
shutil.copyfile(orig_expected_fname, expected_fname)
else:
reason = ("Do not have baseline image {0} because this "
"file does not exist: {1}".format(expected_fname,
orig_expected_fname))
raise ImageComparisonFailure(reason)
return expected_fname
def compare(self, idx, baseline, extension):
__tracebackhide__ = True
fignum = plt.get_fignums()[idx]
fig = plt.figure(fignum)
if self.remove_text:
remove_ticks_and_titles(fig)
actual_fname = (
os.path.join(self.result_dir, baseline) + '.' + extension)
kwargs = self.savefig_kwargs.copy()
if extension == 'pdf':
kwargs.setdefault('metadata',
{'Creator': None, 'Producer': None,
'CreationDate': None})
fig.savefig(actual_fname, **kwargs)
expected_fname = self.copy_baseline(baseline, extension)
_raise_on_image_difference(expected_fname, actual_fname, self.tol)
class ImageComparisonTest(CleanupTest, _ImageComparisonBase):
"""
Nose-based image comparison class
This class generates tests for a nose-based testing framework. Ideally,
this class would not be public, and the only publicly visible API would
be the :func:`image_comparison` decorator. Unfortunately, there are
existing downstream users of this class (e.g., pytest-mpl) so it cannot yet
be removed.
"""
def __init__(self, baseline_images, extensions, tol,
freetype_version, remove_text, savefig_kwargs, style):
_ImageComparisonBase.__init__(self, tol, remove_text, savefig_kwargs)
self.baseline_images = baseline_images
self.extensions = extensions
self.freetype_version = freetype_version
self.style = style
def setup(self):
func = self.func
plt.close('all')
self.setup_class()
try:
matplotlib.style.use(self.style)
matplotlib.testing.set_font_settings_for_testing()
func()
assert len(plt.get_fignums()) == len(self.baseline_images), (
"Test generated {} images but there are {} baseline images"
.format(len(plt.get_fignums()), len(self.baseline_images)))
except:
# Restore original settings before raising errors.
self.teardown_class()
raise
def teardown(self):
self.teardown_class()
@staticmethod
@cbook.deprecated('2.1',
alternative='remove_ticks_and_titles')
def remove_text(figure):
remove_ticks_and_titles(figure)
def nose_runner(self):
func = self.compare
func = _checked_on_freetype_version(self.freetype_version)(func)
funcs = {extension: _xfail_if_format_is_uncomparable(extension)(func)
for extension in self.extensions}
for idx, baseline in enumerate(self.baseline_images):
for extension in self.extensions:
yield funcs[extension], idx, baseline, extension
def __call__(self, func):
self.delayed_init(func)
import nose.tools
@nose.tools.with_setup(self.setup, self.teardown)
def runner_wrapper():
for case in self.nose_runner():
yield case
return _copy_metadata(func, runner_wrapper)
def _pytest_image_comparison(baseline_images, extensions, tol,
freetype_version, remove_text, savefig_kwargs,
style):
"""
Decorate function with image comparison for pytest.
This function creates a decorator that wraps a figure-generating function
with image comparison code. Pytest can become confused if we change the
signature of the function, so we indirectly pass anything we need via the
`mpl_image_comparison_parameters` fixture and extra markers.
"""
import pytest
extensions = map(_mark_xfail_if_format_is_uncomparable, extensions)
def decorator(func):
# Parameter indirection; see docstring above and comment below.
@pytest.mark.usefixtures('mpl_image_comparison_parameters')
@pytest.mark.parametrize('extension', extensions)
@pytest.mark.baseline_images(baseline_images)
# END Parameter indirection.
@pytest.mark.style(style)
@_checked_on_freetype_version(freetype_version)
@functools.wraps(func)
def wrapper(*args, **kwargs):
__tracebackhide__ = True
img = _ImageComparisonBase(tol=tol, remove_text=remove_text,
savefig_kwargs=savefig_kwargs)
img.delayed_init(func)
matplotlib.testing.set_font_settings_for_testing()
func(*args, **kwargs)
# Parameter indirection:
# This is hacked on via the mpl_image_comparison_parameters fixture
# so that we don't need to modify the function's real signature for
# any parametrization. Modifying the signature is very very tricky
# and likely to confuse pytest.
baseline_images, extension = func.parameters
assert len(plt.get_fignums()) == len(baseline_images), (
"Test generated {} images but there are {} baseline images"
.format(len(plt.get_fignums()), len(baseline_images)))
for idx, baseline in enumerate(baseline_images):
img.compare(idx, baseline, extension)
wrapper.__wrapped__ = func # For Python 2.7.
return _copy_metadata(func, wrapper)
return decorator
def image_comparison(baseline_images, extensions=None, tol=0,
freetype_version=None, remove_text=False,
savefig_kwarg=None,
# Default of mpl_test_settings fixture and cleanup too.
style='_classic_test'):
"""
Compare images generated by the test with those specified in
*baseline_images*, which must correspond else an
ImageComparisonFailure exception will be raised.
Arguments
---------
baseline_images : list or None
A list of strings specifying the names of the images generated by
calls to :meth:`matplotlib.figure.savefig`.
If *None*, the test function must use the ``baseline_images`` fixture,
either as a parameter or with pytest.mark.usefixtures. This value is
only allowed when using pytest.
extensions : [ None | list ]
If None, defaults to all supported extensions.
Otherwise, a list of extensions to test. For example ['png','pdf'].
tol : float, optional, default: 0
The RMS threshold above which the test is considered failed.
freetype_version : str or tuple
The expected freetype version or range of versions for this test to
pass.
remove_text : bool
Remove the title and tick text from the figure before comparison.
This does not remove other, more deliberate, text, such as legends and
annotations.
savefig_kwarg : dict
Optional arguments that are passed to the savefig method.
style : string
Optional name for the base style to apply to the image test. The test
itself can also apply additional styles if desired. Defaults to the
'_classic_test' style.
"""
if extensions is None:
# default extensions to test
extensions = ['png', 'pdf', 'svg']
if savefig_kwarg is None:
#default no kwargs to savefig
savefig_kwarg = dict()
if is_called_from_pytest():
return _pytest_image_comparison(
baseline_images=baseline_images, extensions=extensions, tol=tol,
freetype_version=freetype_version, remove_text=remove_text,
savefig_kwargs=savefig_kwarg, style=style)
else:
if baseline_images is None:
raise ValueError('baseline_images must be specified')
return ImageComparisonTest(
baseline_images=baseline_images, extensions=extensions, tol=tol,
freetype_version=freetype_version, remove_text=remove_text,
savefig_kwargs=savefig_kwarg, style=style)
def _image_directories(func):
"""
Compute the baseline and result image directories for testing *func*.
Create the result directory if it doesn't exist.
"""
module_name = func.__module__
if module_name == '__main__':
# FIXME: this won't work for nested packages in matplotlib.tests
warnings.warn(
'Test module run as script. Guessing baseline image locations.')
script_name = sys.argv[0]
basedir = os.path.abspath(os.path.dirname(script_name))
subdir = os.path.splitext(os.path.split(script_name)[1])[0]
else:
mods = module_name.split('.')
if len(mods) >= 3:
mods.pop(0)
# mods[0] will be the name of the package being tested (in
# most cases "matplotlib") However if this is a
# namespace package pip installed and run via the nose
# multiprocess plugin or as a specific test this may be
# missing. See https://github.com/matplotlib/matplotlib/issues/3314
if mods.pop(0) != 'tests':
warnings.warn(
"Module {!r} does not live in a parent module named 'tests'. "
"This is probably ok, but we may not be able to guess the "
"correct subdirectory containing the baseline images. If "
"things go wrong please make sure that there is a parent "
"directory named 'tests' and that it contains a __init__.py "
"file (can be empty).".format(module_name))
subdir = os.path.join(*mods)
import imp
def find_dotted_module(module_name, path=None):
"""A version of imp which can handle dots in the module name.
As for imp.find_module(), the return value is a 3-element
tuple (file, pathname, description)."""
res = None
for sub_mod in module_name.split('.'):
try:
res = file, path, _ = imp.find_module(sub_mod, path)
path = [path]
if file is not None:
file.close()
except ImportError:
# assume namespace package
path = list(sys.modules[sub_mod].__path__)
res = None, path, None
return res
mod_file = find_dotted_module(func.__module__)[1]
basedir = os.path.dirname(mod_file)
baseline_dir = os.path.join(basedir, 'baseline_images', subdir)
result_dir = os.path.abspath(os.path.join('result_images', subdir))
if not os.path.exists(result_dir):
cbook.mkdirs(result_dir)
return baseline_dir, result_dir
def switch_backend(backend):
# Local import to avoid a hard nose dependency and only incur the
# import time overhead at actual test-time.
def switch_backend_decorator(func):
@functools.wraps(func)
def backend_switcher(*args, **kwargs):
try:
prev_backend = mpl.get_backend()
matplotlib.testing.setup()
plt.switch_backend(backend)
result = func(*args, **kwargs)
finally:
plt.switch_backend(prev_backend)
return result
return _copy_metadata(func, backend_switcher)
return switch_backend_decorator
def skip_if_command_unavailable(cmd):
"""
skips a test if a command is unavailable.
Parameters
----------
cmd : list of str
must be a complete command which should not
return a non zero exit code, something like
["latex", "-version"]
"""
from matplotlib.compat.subprocess import check_output
try:
check_output(cmd)
except:
import pytest
return pytest.mark.skip(reason='missing command: %s' % cmd[0])
return lambda f: f
| 21,319 | 35.758621 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/_nose/exceptions.py
|
class KnownFailureTest(Exception):
"""
Raise this exception to mark a test as a known failing test.
"""
class KnownFailureDidNotFailTest(Exception):
"""
Raise this exception to mark a test should have failed but did not.
"""
| 251 | 21.909091 | 71 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/_nose/__init__.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
def get_extra_test_plugins():
from .plugins.performgc import PerformGC
from .plugins.knownfailure import KnownFailure
from nose.plugins import attrib
return [PerformGC, KnownFailure, attrib.Plugin]
def get_env():
env = {'NOSE_COVER_PACKAGE': ['matplotlib', 'mpl_toolkits'],
'NOSE_COVER_HTML': 1,
'NOSE_COVER_NO_PRINT': 1}
return env
def check_deps():
try:
import nose
try:
from unittest import mock
except ImportError:
import mock
except ImportError:
print("matplotlib.test requires nose and mock to run.")
raise
def test(verbosity=None, coverage=False, switch_backend_warn=True,
recursionlimit=0, **kwargs):
from ... import default_test_modules, get_backend, use
old_backend = get_backend()
old_recursionlimit = sys.getrecursionlimit()
try:
use('agg')
if recursionlimit:
sys.setrecursionlimit(recursionlimit)
import nose
from nose.plugins import multiprocess
# Nose doesn't automatically instantiate all of the plugins in the
# child processes, so we have to provide the multiprocess plugin with
# a list.
extra_plugins = get_extra_test_plugins()
multiprocess._instantiate_plugins = extra_plugins
env = get_env()
if coverage:
env['NOSE_WITH_COVERAGE'] = 1
if verbosity is not None:
env['NOSE_VERBOSE'] = verbosity
success = nose.run(
addplugins=[plugin() for plugin in extra_plugins],
env=env,
defaultTest=default_test_modules,
**kwargs
)
finally:
if old_backend.lower() != 'agg':
use(old_backend, warn=switch_backend_warn)
if recursionlimit:
sys.setrecursionlimit(old_recursionlimit)
return success
def knownfail(msg):
from .exceptions import KnownFailureTest
# Keep the next ultra-long comment so it shows in console.
raise KnownFailureTest(msg) # An error here when running nose means that you don't have the matplotlib.testing.nose.plugins:KnownFailure plugin in use. # noqa
| 2,319 | 28.367089 | 164 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/_nose/decorators.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from .. import _copy_metadata
from . import knownfail
from .exceptions import KnownFailureDidNotFailTest
def knownfailureif(fail_condition, msg=None, known_exception_class=None):
# based on numpy.testing.dec.knownfailureif
if msg is None:
msg = 'Test known to fail'
def known_fail_decorator(f):
def failer(*args, **kwargs):
try:
# Always run the test (to generate images).
result = f(*args, **kwargs)
except Exception as err:
if fail_condition:
if known_exception_class is not None:
if not isinstance(err, known_exception_class):
# This is not the expected exception
raise
knownfail(msg)
else:
raise
if fail_condition and fail_condition != 'indeterminate':
raise KnownFailureDidNotFailTest(msg)
return result
return _copy_metadata(f, failer)
return known_fail_decorator
| 1,202 | 34.382353 | 73 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/_nose/plugins/knownfailure.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin
from ..exceptions import KnownFailureTest
class KnownFailure(ErrorClassPlugin):
'''Plugin that installs a KNOWNFAIL error class for the
KnownFailureClass exception. When KnownFailureTest is raised,
the exception will be logged in the knownfail attribute of the
result, 'K' or 'KNOWNFAIL' (verbose) will be output, and the
exception will not be counted as an error or failure.
This is based on numpy.testing.noseclasses.KnownFailure.
'''
enabled = True
knownfail = ErrorClass(KnownFailureTest,
label='KNOWNFAIL',
isfailure=False)
def options(self, parser, env=os.environ):
env_opt = 'NOSE_WITHOUT_KNOWNFAIL'
parser.add_option('--no-knownfail', action='store_true',
dest='noKnownFail', default=env.get(env_opt, False),
help='Disable special handling of KnownFailureTest '
'exceptions')
def configure(self, options, conf):
if not self.can_configure:
return
self.conf = conf
disable = getattr(options, 'noKnownFail', False)
if disable:
self.enabled = False
def addError(self, test, err, *zero_nine_capt_args):
# Fixme (Really weird): if I don't leave empty method here,
# nose gets confused and KnownFails become testing errors when
# using the MplNosePlugin and MplTestCase.
# The *zero_nine_capt_args captures an extra argument. There
# seems to be a bug in
# nose.testing.manager.ZeroNinePlugin.addError() in which a
# 3rd positional argument ("capt") is passed to the plugin's
# addError() method, even if one is not explicitly using the
# ZeroNinePlugin.
pass
| 1,991 | 38.84 | 78 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/_nose/plugins/performgc.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import gc
import os
from nose.plugins import Plugin
class PerformGC(Plugin):
"""This plugin adds option to call ``gc.collect`` after each test"""
enabled = False
def options(self, parser, env=os.environ):
env_opt = 'PERFORM_GC'
parser.add_option('--perform-gc', action='store_true',
dest='performGC', default=env.get(env_opt, False),
help='Call gc.collect() after each test')
def configure(self, options, conf):
if not self.can_configure:
return
self.enabled = getattr(options, 'performGC', False)
def afterTest(self, test):
gc.collect()
| 778 | 27.851852 | 76 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/_nose/plugins/__init__.py
| 0 | 0 | 0 |
py
|
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/jpl_units/EpochConverter.py
|
#===========================================================================
#
# EpochConverter
#
#===========================================================================
"""EpochConverter module containing class EpochConverter."""
#===========================================================================
# Place all imports after here.
#
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import matplotlib.units as units
import matplotlib.dates as date_ticker
from matplotlib.cbook import iterable
#
# Place all imports before here.
#===========================================================================
__all__ = [ 'EpochConverter' ]
#===========================================================================
class EpochConverter( units.ConversionInterface ):
""": A matplotlib converter class. Provides matplotlib conversion
functionality for Monte Epoch and Duration classes.
"""
# julian date reference for "Jan 1, 0001" minus 1 day because
# matplotlib really wants "Jan 0, 0001"
jdRef = 1721425.5 - 1
#------------------------------------------------------------------------
@staticmethod
def axisinfo( unit, axis ):
""": Returns information on how to handle an axis that has Epoch data.
= INPUT VARIABLES
- unit The units to use for a axis with Epoch data.
= RETURN VALUE
- Returns a matplotlib AxisInfo data structure that contains
minor/major formatters, major/minor locators, and default
label information.
"""
majloc = date_ticker.AutoDateLocator()
majfmt = date_ticker.AutoDateFormatter( majloc )
return units.AxisInfo( majloc = majloc,
majfmt = majfmt,
label = unit )
#------------------------------------------------------------------------
@staticmethod
def float2epoch( value, unit ):
""": Convert a matplotlib floating-point date into an Epoch of the
specified units.
= INPUT VARIABLES
- value The matplotlib floating-point date.
- unit The unit system to use for the Epoch.
= RETURN VALUE
- Returns the value converted to an Epoch in the specified time system.
"""
# Delay-load due to circular dependencies.
import matplotlib.testing.jpl_units as U
secPastRef = value * 86400.0 * U.UnitDbl( 1.0, 'sec' )
return U.Epoch( unit, secPastRef, EpochConverter.jdRef )
#------------------------------------------------------------------------
@staticmethod
def epoch2float( value, unit ):
""": Convert an Epoch value to a float suitible for plotting as a
python datetime object.
= INPUT VARIABLES
- value An Epoch or list of Epochs that need to be converted.
- unit The units to use for an axis with Epoch data.
= RETURN VALUE
- Returns the value parameter converted to floats.
"""
return value.julianDate( unit ) - EpochConverter.jdRef
#------------------------------------------------------------------------
@staticmethod
def duration2float( value ):
""": Convert a Duration value to a float suitible for plotting as a
python datetime object.
= INPUT VARIABLES
- value A Duration or list of Durations that need to be converted.
= RETURN VALUE
- Returns the value parameter converted to floats.
"""
return value.seconds() / 86400.0
#------------------------------------------------------------------------
@staticmethod
def convert( value, unit, axis ):
""": Convert value using unit to a float. If value is a sequence, return
the converted sequence.
= INPUT VARIABLES
- value The value or list of values that need to be converted.
- unit The units to use for an axis with Epoch data.
= RETURN VALUE
- Returns the value parameter converted to floats.
"""
# Delay-load due to circular dependencies.
import matplotlib.testing.jpl_units as U
isNotEpoch = True
isDuration = False
if ( iterable(value) and not isinstance(value, six.string_types) ):
if ( len(value) == 0 ):
return []
else:
return [ EpochConverter.convert( x, unit, axis ) for x in value ]
if ( isinstance(value, U.Epoch) ):
isNotEpoch = False
elif ( isinstance(value, U.Duration) ):
isDuration = True
if ( isNotEpoch and not isDuration and
units.ConversionInterface.is_numlike( value ) ):
return value
if ( unit == None ):
unit = EpochConverter.default_units( value, axis )
if ( isDuration ):
return EpochConverter.duration2float( value )
else:
return EpochConverter.epoch2float( value, unit )
#------------------------------------------------------------------------
@staticmethod
def default_units( value, axis ):
""": Return the default unit for value, or None.
= INPUT VARIABLES
- value The value or list of values that need units.
= RETURN VALUE
- Returns the default units to use for value.
"""
frame = None
if ( iterable(value) and not isinstance(value, six.string_types) ):
return EpochConverter.default_units( value[0], axis )
else:
frame = value.frame()
return frame
| 5,492 | 32.090361 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/jpl_units/StrConverter.py
|
#===========================================================================
#
# StrConverter
#
#===========================================================================
"""StrConverter module containing class StrConverter."""
#===========================================================================
# Place all imports after here.
#
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange
import matplotlib.units as units
from matplotlib.cbook import iterable
# Place all imports before here.
#===========================================================================
__all__ = [ 'StrConverter' ]
#===========================================================================
class StrConverter( units.ConversionInterface ):
""": A matplotlib converter class. Provides matplotlib conversion
functionality for string data values.
Valid units for string are:
- 'indexed' : Values are indexed as they are specified for plotting.
- 'sorted' : Values are sorted alphanumerically.
- 'inverted' : Values are inverted so that the first value is on top.
- 'sorted-inverted' : A combination of 'sorted' and 'inverted'
"""
#------------------------------------------------------------------------
@staticmethod
def axisinfo( unit, axis ):
""": Returns information on how to handle an axis that has string data.
= INPUT VARIABLES
- axis The axis using this converter.
- unit The units to use for a axis with string data.
= RETURN VALUE
- Returns a matplotlib AxisInfo data structure that contains
minor/major formatters, major/minor locators, and default
label information.
"""
return None
#------------------------------------------------------------------------
@staticmethod
def convert( value, unit, axis ):
""": Convert value using unit to a float. If value is a sequence, return
the converted sequence.
= INPUT VARIABLES
- axis The axis using this converter.
- value The value or list of values that need to be converted.
- unit The units to use for a axis with Epoch data.
= RETURN VALUE
- Returns the value parameter converted to floats.
"""
if ( units.ConversionInterface.is_numlike( value ) ):
return value
if ( value == [] ):
return []
# we delay loading to make matplotlib happy
ax = axis.axes
if axis is ax.get_xaxis():
isXAxis = True
else:
isXAxis = False
axis.get_major_ticks()
ticks = axis.get_ticklocs()
labels = axis.get_ticklabels()
labels = [ l.get_text() for l in labels if l.get_text() ]
if ( not labels ):
ticks = []
labels = []
if ( not iterable( value ) ):
value = [ value ]
newValues = []
for v in value:
if ( (v not in labels) and (v not in newValues) ):
newValues.append( v )
for v in newValues:
if ( labels ):
labels.append( v )
else:
labels = [ v ]
#DISABLED: This is disabled because matplotlib bar plots do not
#DISABLED: recalculate the unit conversion of the data values
#DISABLED: this is due to design and is not really a bug.
#DISABLED: If this gets changed, then we can activate the following
#DISABLED: block of code. Note that this works for line plots.
#DISABLED if ( unit ):
#DISABLED if ( unit.find( "sorted" ) > -1 ):
#DISABLED labels.sort()
#DISABLED if ( unit.find( "inverted" ) > -1 ):
#DISABLED labels = labels[ ::-1 ]
# add padding (so they do not appear on the axes themselves)
labels = [ '' ] + labels + [ '' ]
ticks = list(xrange( len(labels) ))
ticks[0] = 0.5
ticks[-1] = ticks[-1] - 0.5
axis.set_ticks( ticks )
axis.set_ticklabels( labels )
# we have to do the following lines to make ax.autoscale_view work
loc = axis.get_major_locator()
loc.set_bounds( ticks[0], ticks[-1] )
if ( isXAxis ):
ax.set_xlim( ticks[0], ticks[-1] )
else:
ax.set_ylim( ticks[0], ticks[-1] )
result = []
for v in value:
# If v is not in labels then something went wrong with adding new
# labels to the list of old labels.
errmsg = "This is due to a logic error in the StrConverter class. "
errmsg += "Please report this error and its message in bugzilla."
assert ( v in labels ), errmsg
result.append( ticks[ labels.index(v) ] )
ax.viewLim.ignore(-1)
return result
#------------------------------------------------------------------------
@staticmethod
def default_units( value, axis ):
""": Return the default unit for value, or None.
= INPUT VARIABLES
- axis The axis using this converter.
- value The value or list of values that need units.
= RETURN VALUE
- Returns the default units to use for value.
Return the default unit for value, or None.
"""
# The default behavior for string indexing.
return "indexed"
| 5,293 | 31.084848 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/jpl_units/Epoch.py
|
#===========================================================================
#
# Epoch
#
#===========================================================================
"""Epoch module."""
#===========================================================================
# Place all imports after here.
#
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import math
import datetime as DT
from matplotlib.dates import date2num
#
# Place all imports before here.
#===========================================================================
#===========================================================================
class Epoch(object):
# Frame conversion offsets in seconds
# t(TO) = t(FROM) + allowed[ FROM ][ TO ]
allowed = {
"ET" : {
"UTC" : +64.1839,
},
"UTC" : {
"ET" : -64.1839,
},
}
#-----------------------------------------------------------------------
def __init__( self, frame, sec=None, jd=None, daynum=None, dt=None ):
"""Create a new Epoch object.
Build an epoch 1 of 2 ways:
Using seconds past a Julian date:
# Epoch( 'ET', sec=1e8, jd=2451545 )
or using a matplotlib day number
# Epoch( 'ET', daynum=730119.5 )
= ERROR CONDITIONS
- If the input units are not in the allowed list, an error is thrown.
= INPUT VARIABLES
- frame The frame of the epoch. Must be 'ET' or 'UTC'
- sec The number of seconds past the input JD.
- jd The Julian date of the epoch.
- daynum The matplotlib day number of the epoch.
- dt A python datetime instance.
"""
if ( ( sec is None and jd is not None ) or
( sec is not None and jd is None ) or
( daynum is not None and ( sec is not None or jd is not None ) ) or
( daynum is None and dt is None and ( sec is None or jd is None ) ) or
( daynum is not None and dt is not None ) or
( dt is not None and ( sec is not None or jd is not None ) ) or
( (dt is not None) and not isinstance(dt, DT.datetime) ) ):
msg = "Invalid inputs. Must enter sec and jd together, " \
"daynum by itself, or dt (must be a python datetime).\n" \
"Sec = %s\nJD = %s\ndnum= %s\ndt = %s" \
% ( str( sec ), str( jd ), str( daynum ), str( dt ) )
raise ValueError( msg )
if frame not in self.allowed:
msg = "Input frame '%s' is not one of the supported frames of %s" \
% ( frame, str( list(six.iterkeys(self.allowed) ) ) )
raise ValueError(msg)
self._frame = frame
if dt is not None:
daynum = date2num( dt )
if daynum is not None:
# 1-JAN-0001 in JD = 1721425.5
jd = float( daynum ) + 1721425.5
self._jd = math.floor( jd )
self._seconds = ( jd - self._jd ) * 86400.0
else:
self._seconds = float( sec )
self._jd = float( jd )
# Resolve seconds down to [ 0, 86400 )
deltaDays = int( math.floor( self._seconds / 86400.0 ) )
self._jd += deltaDays
self._seconds -= deltaDays * 86400.0
#-----------------------------------------------------------------------
def convert( self, frame ):
if self._frame == frame:
return self
offset = self.allowed[ self._frame ][ frame ]
return Epoch( frame, self._seconds + offset, self._jd )
#-----------------------------------------------------------------------
def frame( self ):
return self._frame
#-----------------------------------------------------------------------
def julianDate( self, frame ):
t = self
if frame != self._frame:
t = self.convert( frame )
return t._jd + t._seconds / 86400.0
#-----------------------------------------------------------------------
def secondsPast( self, frame, jd ):
t = self
if frame != self._frame:
t = self.convert( frame )
delta = t._jd - jd
return t._seconds + delta * 86400
#-----------------------------------------------------------------------
def __cmp__( self, rhs ):
"""Compare two Epoch's.
= INPUT VARIABLES
- rhs The Epoch to compare against.
= RETURN VALUE
- Returns -1 if self < rhs, 0 if self == rhs, +1 if self > rhs.
"""
t = self
if self._frame != rhs._frame:
t = self.convert( rhs._frame )
if t._jd != rhs._jd:
return cmp( t._jd, rhs._jd )
return cmp( t._seconds, rhs._seconds )
#-----------------------------------------------------------------------
def __add__( self, rhs ):
"""Add a duration to an Epoch.
= INPUT VARIABLES
- rhs The Epoch to subtract.
= RETURN VALUE
- Returns the difference of ourselves and the input Epoch.
"""
t = self
if self._frame != rhs.frame():
t = self.convert( rhs._frame )
sec = t._seconds + rhs.seconds()
return Epoch( t._frame, sec, t._jd )
#-----------------------------------------------------------------------
def __sub__( self, rhs ):
"""Subtract two Epoch's or a Duration from an Epoch.
Valid:
Duration = Epoch - Epoch
Epoch = Epoch - Duration
= INPUT VARIABLES
- rhs The Epoch to subtract.
= RETURN VALUE
- Returns either the duration between to Epoch's or the a new
Epoch that is the result of subtracting a duration from an epoch.
"""
# Delay-load due to circular dependencies.
import matplotlib.testing.jpl_units as U
# Handle Epoch - Duration
if isinstance( rhs, U.Duration ):
return self + -rhs
t = self
if self._frame != rhs._frame:
t = self.convert( rhs._frame )
days = t._jd - rhs._jd
sec = t._seconds - rhs._seconds
return U.Duration( rhs._frame, days*86400 + sec )
#-----------------------------------------------------------------------
def __str__( self ):
"""Print the Epoch."""
return "%22.15e %s" % ( self.julianDate( self._frame ), self._frame )
#-----------------------------------------------------------------------
def __repr__( self ):
"""Print the Epoch."""
return str( self )
#-----------------------------------------------------------------------
def range( start, stop, step ):
"""Generate a range of Epoch objects.
Similar to the Python range() method. Returns the range [
start, stop ) at the requested step. Each element will be a
Epoch object.
= INPUT VARIABLES
- start The starting value of the range.
- stop The stop value of the range.
- step Step to use.
= RETURN VALUE
- Returns a list contianing the requested Epoch values.
"""
elems = []
i = 0
while True:
d = start + i * step
if d >= stop:
break
elems.append( d )
i += 1
return elems
range = staticmethod( range )
#===========================================================================
| 7,251 | 29.343096 | 81 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/jpl_units/UnitDbl.py
|
#===========================================================================
#
# UnitDbl
#
#===========================================================================
"""UnitDbl module."""
#===========================================================================
# Place all imports after here.
#
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
#
# Place all imports before here.
#===========================================================================
#===========================================================================
class UnitDbl(object):
"""Class UnitDbl in development.
"""
#-----------------------------------------------------------------------
# Unit conversion table. Small subset of the full one but enough
# to test the required functions. First field is a scale factor to
# convert the input units to the units of the second field. Only
# units in this table are allowed.
allowed = {
"m" : ( 0.001, "km" ),
"km" : ( 1, "km" ),
"mile" : ( 1.609344, "km" ),
"rad" : ( 1, "rad" ),
"deg" : ( 1.745329251994330e-02, "rad" ),
"sec" : ( 1, "sec" ),
"min" : ( 60.0, "sec" ),
"hour" : ( 3600, "sec" ),
}
_types = {
"km" : "distance",
"rad" : "angle",
"sec" : "time",
}
#-----------------------------------------------------------------------
def __init__( self, value, units ):
"""Create a new UnitDbl object.
Units are internally converted to km, rad, and sec. The only
valid inputs for units are [ m, km, mile, rad, deg, sec, min, hour ].
The field UnitDbl.value will contain the converted value. Use
the convert() method to get a specific type of units back.
= ERROR CONDITIONS
- If the input units are not in the allowed list, an error is thrown.
= INPUT VARIABLES
- value The numeric value of the UnitDbl.
- units The string name of the units the value is in.
"""
self.checkUnits( units )
data = self.allowed[ units ]
self._value = float( value * data[0] )
self._units = data[1]
#-----------------------------------------------------------------------
def convert( self, units ):
"""Convert the UnitDbl to a specific set of units.
= ERROR CONDITIONS
- If the input units are not in the allowed list, an error is thrown.
= INPUT VARIABLES
- units The string name of the units to convert to.
= RETURN VALUE
- Returns the value of the UnitDbl in the requested units as a floating
point number.
"""
if self._units == units:
return self._value
self.checkUnits( units )
data = self.allowed[ units ]
if self._units != data[1]:
msg = "Error trying to convert to different units.\n" \
" Invalid conversion requested.\n" \
" UnitDbl: %s\n" \
" Units: %s\n" % ( str( self ), units )
raise ValueError( msg )
return self._value / data[0]
#-----------------------------------------------------------------------
def __abs__( self ):
"""Return the absolute value of this UnitDbl."""
return UnitDbl( abs( self._value ), self._units )
#-----------------------------------------------------------------------
def __neg__( self ):
"""Return the negative value of this UnitDbl."""
return UnitDbl( -self._value, self._units )
#-----------------------------------------------------------------------
def __nonzero__( self ):
"""Test a UnitDbl for a non-zero value.
= RETURN VALUE
- Returns true if the value is non-zero.
"""
if six.PY3:
return self._value.__bool__()
else:
return self._value.__nonzero__()
if six.PY3:
__bool__ = __nonzero__
#-----------------------------------------------------------------------
def __cmp__( self, rhs ):
"""Compare two UnitDbl's.
= ERROR CONDITIONS
- If the input rhs units are not the same as our units,
an error is thrown.
= INPUT VARIABLES
- rhs The UnitDbl to compare against.
= RETURN VALUE
- Returns -1 if self < rhs, 0 if self == rhs, +1 if self > rhs.
"""
self.checkSameUnits( rhs, "compare" )
return cmp( self._value, rhs._value )
#-----------------------------------------------------------------------
def __add__( self, rhs ):
"""Add two UnitDbl's.
= ERROR CONDITIONS
- If the input rhs units are not the same as our units,
an error is thrown.
= INPUT VARIABLES
- rhs The UnitDbl to add.
= RETURN VALUE
- Returns the sum of ourselves and the input UnitDbl.
"""
self.checkSameUnits( rhs, "add" )
return UnitDbl( self._value + rhs._value, self._units )
#-----------------------------------------------------------------------
def __sub__( self, rhs ):
"""Subtract two UnitDbl's.
= ERROR CONDITIONS
- If the input rhs units are not the same as our units,
an error is thrown.
= INPUT VARIABLES
- rhs The UnitDbl to subtract.
= RETURN VALUE
- Returns the difference of ourselves and the input UnitDbl.
"""
self.checkSameUnits( rhs, "subtract" )
return UnitDbl( self._value - rhs._value, self._units )
#-----------------------------------------------------------------------
def __mul__( self, rhs ):
"""Scale a UnitDbl by a value.
= INPUT VARIABLES
- rhs The scalar to multiply by.
= RETURN VALUE
- Returns the scaled UnitDbl.
"""
return UnitDbl( self._value * rhs, self._units )
#-----------------------------------------------------------------------
def __rmul__( self, lhs ):
"""Scale a UnitDbl by a value.
= INPUT VARIABLES
- lhs The scalar to multiply by.
= RETURN VALUE
- Returns the scaled UnitDbl.
"""
return UnitDbl( self._value * lhs, self._units )
#-----------------------------------------------------------------------
def __div__( self, rhs ):
"""Divide a UnitDbl by a value.
= INPUT VARIABLES
- rhs The scalar to divide by.
= RETURN VALUE
- Returns the scaled UnitDbl.
"""
return UnitDbl( self._value / rhs, self._units )
#-----------------------------------------------------------------------
def __str__( self ):
"""Print the UnitDbl."""
return "%g *%s" % ( self._value, self._units )
#-----------------------------------------------------------------------
def __repr__( self ):
"""Print the UnitDbl."""
return "UnitDbl( %g, '%s' )" % ( self._value, self._units )
#-----------------------------------------------------------------------
def type( self ):
"""Return the type of UnitDbl data."""
return self._types[ self._units ]
#-----------------------------------------------------------------------
def range( start, stop, step=None ):
"""Generate a range of UnitDbl objects.
Similar to the Python range() method. Returns the range [
start, stop ) at the requested step. Each element will be a
UnitDbl object.
= INPUT VARIABLES
- start The starting value of the range.
- stop The stop value of the range.
- step Optional step to use. If set to None, then a UnitDbl of
value 1 w/ the units of the start is used.
= RETURN VALUE
- Returns a list contianing the requested UnitDbl values.
"""
if step is None:
step = UnitDbl( 1, start._units )
elems = []
i = 0
while True:
d = start + i * step
if d >= stop:
break
elems.append( d )
i += 1
return elems
range = staticmethod( range )
#-----------------------------------------------------------------------
def checkUnits( self, units ):
"""Check to see if some units are valid.
= ERROR CONDITIONS
- If the input units are not in the allowed list, an error is thrown.
= INPUT VARIABLES
- units The string name of the units to check.
"""
if units not in self.allowed:
msg = "Input units '%s' are not one of the supported types of %s" \
% ( units, str( list(six.iterkeys(self.allowed)) ) )
raise ValueError( msg )
#-----------------------------------------------------------------------
def checkSameUnits( self, rhs, func ):
"""Check to see if units are the same.
= ERROR CONDITIONS
- If the units of the rhs UnitDbl are not the same as our units,
an error is thrown.
= INPUT VARIABLES
- rhs The UnitDbl to check for the same units
- func The name of the function doing the check.
"""
if self._units != rhs._units:
msg = "Cannot %s units of different types.\n" \
"LHS: %s\n" \
"RHS: %s" % ( func, self._units, rhs._units )
raise ValueError( msg )
#===========================================================================
| 9,408 | 30.573826 | 77 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/jpl_units/__init__.py
|
#=======================================================================
"""
This is a sample set of units for use with testing unit conversion
of matplotlib routines. These are used because they use very strict
enforcement of unitized data which will test the entire spectrum of how
unitized data might be used (it is not always meaningful to convert to
a float without specific units given).
UnitDbl is essentially a unitized floating point number. It has a
minimal set of supported units (enough for testing purposes). All
of the mathematical operation are provided to fully test any behaviour
that might occur with unitized data. Remember that unitized data has
rules as to how it can be applied to one another (a value of distance
cannot be added to a value of time). Thus we need to guard against any
accidental "default" conversion that will strip away the meaning of the
data and render it neutered.
Epoch is different than a UnitDbl of time. Time is something that can be
measured where an Epoch is a specific moment in time. Epochs are typically
referenced as an offset from some predetermined epoch.
A difference of two epochs is a Duration. The distinction between a Duration
and a UnitDbl of time is made because an Epoch can have different frames (or
units). In the case of our test Epoch class the two allowed frames are 'UTC'
and 'ET' (Note that these are rough estimates provided for testing purposes
and should not be used in production code where accuracy of time frames is
desired). As such a Duration also has a frame of reference and therefore needs
to be called out as different that a simple measurement of time since a delta-t
in one frame may not be the same in another.
"""
#=======================================================================
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from .Duration import Duration
from .Epoch import Epoch
from .UnitDbl import UnitDbl
from .StrConverter import StrConverter
from .EpochConverter import EpochConverter
from .UnitDblConverter import UnitDblConverter
from .UnitDblFormatter import UnitDblFormatter
#=======================================================================
__version__ = "1.0"
__all__ = [
'register',
'Duration',
'Epoch',
'UnitDbl',
'UnitDblFormatter',
]
#=======================================================================
def register():
"""Register the unit conversion classes with matplotlib."""
import matplotlib.units as mplU
mplU.registry[ str ] = StrConverter()
mplU.registry[ Epoch ] = EpochConverter()
mplU.registry[ Duration ] = EpochConverter()
mplU.registry[ UnitDbl ] = UnitDblConverter()
#=======================================================================
# Some default unit instances
# Distances
m = UnitDbl( 1.0, "m" )
km = UnitDbl( 1.0, "km" )
mile = UnitDbl( 1.0, "mile" )
# Angles
deg = UnitDbl( 1.0, "deg" )
rad = UnitDbl( 1.0, "rad" )
# Time
sec = UnitDbl( 1.0, "sec" )
min = UnitDbl( 1.0, "min" )
hr = UnitDbl( 1.0, "hour" )
day = UnitDbl( 24.0, "hour" )
sec = UnitDbl( 1.0, "sec" )
| 3,203 | 35 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/jpl_units/Duration.py
|
#===========================================================================
#
# Duration
#
#===========================================================================
"""Duration module."""
#===========================================================================
# Place all imports after here.
#
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
#
# Place all imports before here.
#===========================================================================
#===========================================================================
class Duration(object):
"""Class Duration in development.
"""
allowed = [ "ET", "UTC" ]
#-----------------------------------------------------------------------
def __init__( self, frame, seconds ):
"""Create a new Duration object.
= ERROR CONDITIONS
- If the input frame is not in the allowed list, an error is thrown.
= INPUT VARIABLES
- frame The frame of the duration. Must be 'ET' or 'UTC'
- seconds The number of seconds in the Duration.
"""
if frame not in self.allowed:
msg = "Input frame '%s' is not one of the supported frames of %s" \
% ( frame, str( self.allowed ) )
raise ValueError( msg )
self._frame = frame
self._seconds = seconds
#-----------------------------------------------------------------------
def frame( self ):
"""Return the frame the duration is in."""
return self._frame
#-----------------------------------------------------------------------
def __abs__( self ):
"""Return the absolute value of the duration."""
return Duration( self._frame, abs( self._seconds ) )
#-----------------------------------------------------------------------
def __neg__( self ):
"""Return the negative value of this Duration."""
return Duration( self._frame, -self._seconds )
#-----------------------------------------------------------------------
def seconds( self ):
"""Return the number of seconds in the Duration."""
return self._seconds
#-----------------------------------------------------------------------
def __nonzero__( self ):
"""Compare two Durations.
= INPUT VARIABLES
- rhs The Duration to compare against.
= RETURN VALUE
- Returns -1 if self < rhs, 0 if self == rhs, +1 if self > rhs.
"""
return self._seconds != 0
if six.PY3:
__bool__ = __nonzero__
#-----------------------------------------------------------------------
def __cmp__( self, rhs ):
"""Compare two Durations.
= ERROR CONDITIONS
- If the input rhs is not in the same frame, an error is thrown.
= INPUT VARIABLES
- rhs The Duration to compare against.
= RETURN VALUE
- Returns -1 if self < rhs, 0 if self == rhs, +1 if self > rhs.
"""
self.checkSameFrame( rhs, "compare" )
return cmp( self._seconds, rhs._seconds )
#-----------------------------------------------------------------------
def __add__( self, rhs ):
"""Add two Durations.
= ERROR CONDITIONS
- If the input rhs is not in the same frame, an error is thrown.
= INPUT VARIABLES
- rhs The Duration to add.
= RETURN VALUE
- Returns the sum of ourselves and the input Duration.
"""
# Delay-load due to circular dependencies.
import matplotlib.testing.jpl_units as U
if isinstance( rhs, U.Epoch ):
return rhs + self
self.checkSameFrame( rhs, "add" )
return Duration( self._frame, self._seconds + rhs._seconds )
#-----------------------------------------------------------------------
def __sub__( self, rhs ):
"""Subtract two Durations.
= ERROR CONDITIONS
- If the input rhs is not in the same frame, an error is thrown.
= INPUT VARIABLES
- rhs The Duration to subtract.
= RETURN VALUE
- Returns the difference of ourselves and the input Duration.
"""
self.checkSameFrame( rhs, "sub" )
return Duration( self._frame, self._seconds - rhs._seconds )
#-----------------------------------------------------------------------
def __mul__( self, rhs ):
"""Scale a UnitDbl by a value.
= INPUT VARIABLES
- rhs The scalar to multiply by.
= RETURN VALUE
- Returns the scaled Duration.
"""
return Duration( self._frame, self._seconds * float( rhs ) )
#-----------------------------------------------------------------------
def __rmul__( self, lhs ):
"""Scale a Duration by a value.
= INPUT VARIABLES
- lhs The scalar to multiply by.
= RETURN VALUE
- Returns the scaled Duration.
"""
return Duration( self._frame, self._seconds * float( lhs ) )
#-----------------------------------------------------------------------
def __div__( self, rhs ):
"""Divide a Duration by a value.
= INPUT VARIABLES
- rhs The scalar to divide by.
= RETURN VALUE
- Returns the scaled Duration.
"""
return Duration( self._frame, self._seconds / rhs )
#-----------------------------------------------------------------------
def __rdiv__( self, rhs ):
"""Divide a Duration by a value.
= INPUT VARIABLES
- rhs The scalar to divide by.
= RETURN VALUE
- Returns the scaled Duration.
"""
return Duration( self._frame, rhs / self._seconds )
#-----------------------------------------------------------------------
def __str__( self ):
"""Print the Duration."""
return "%g %s" % ( self._seconds, self._frame )
#-----------------------------------------------------------------------
def __repr__( self ):
"""Print the Duration."""
return "Duration( '%s', %g )" % ( self._frame, self._seconds )
#-----------------------------------------------------------------------
def checkSameFrame( self, rhs, func ):
"""Check to see if frames are the same.
= ERROR CONDITIONS
- If the frame of the rhs Duration is not the same as our frame,
an error is thrown.
= INPUT VARIABLES
- rhs The Duration to check for the same frame
- func The name of the function doing the check.
"""
if self._frame != rhs._frame:
msg = "Cannot %s Duration's with different frames.\n" \
"LHS: %s\n" \
"RHS: %s" % ( func, self._frame, rhs._frame )
raise ValueError( msg )
#===========================================================================
| 6,718 | 30.693396 | 76 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/jpl_units/UnitDblConverter.py
|
#===========================================================================
#
# UnitDblConverter
#
#===========================================================================
"""UnitDblConverter module containing class UnitDblConverter."""
#===========================================================================
# Place all imports after here.
#
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import numpy as np
import matplotlib.units as units
import matplotlib.projections.polar as polar
from matplotlib.cbook import iterable
#
# Place all imports before here.
#===========================================================================
__all__ = [ 'UnitDblConverter' ]
#===========================================================================
# A special function for use with the matplotlib FuncFormatter class
# for formatting axes with radian units.
# This was copied from matplotlib example code.
def rad_fn(x, pos = None ):
"""Radian function formatter."""
n = int((x / np.pi) * 2.0 + 0.25)
if n == 0:
return str(x)
elif n == 1:
return r'$\pi/2$'
elif n == 2:
return r'$\pi$'
elif n % 2 == 0:
return r'$%s\pi$' % (n/2,)
else:
return r'$%s\pi/2$' % (n,)
#===========================================================================
class UnitDblConverter( units.ConversionInterface ):
""": A matplotlib converter class. Provides matplotlib conversion
functionality for the Monte UnitDbl class.
"""
# default for plotting
defaults = {
"distance" : 'km',
"angle" : 'deg',
"time" : 'sec',
}
#------------------------------------------------------------------------
@staticmethod
def axisinfo( unit, axis ):
""": Returns information on how to handle an axis that has Epoch data.
= INPUT VARIABLES
- unit The units to use for a axis with Epoch data.
= RETURN VALUE
- Returns a matplotlib AxisInfo data structure that contains
minor/major formatters, major/minor locators, and default
label information.
"""
# Delay-load due to circular dependencies.
import matplotlib.testing.jpl_units as U
# Check to see if the value used for units is a string unit value
# or an actual instance of a UnitDbl so that we can use the unit
# value for the default axis label value.
if ( unit ):
if ( isinstance( unit, six.string_types ) ):
label = unit
else:
label = unit.label()
else:
label = None
if ( label == "deg" ) and isinstance( axis.axes, polar.PolarAxes ):
# If we want degrees for a polar plot, use the PolarPlotFormatter
majfmt = polar.PolarAxes.ThetaFormatter()
else:
majfmt = U.UnitDblFormatter( useOffset = False )
return units.AxisInfo( majfmt = majfmt, label = label )
#------------------------------------------------------------------------
@staticmethod
def convert( value, unit, axis ):
""": Convert value using unit to a float. If value is a sequence, return
the converted sequence.
= INPUT VARIABLES
- value The value or list of values that need to be converted.
- unit The units to use for a axis with Epoch data.
= RETURN VALUE
- Returns the value parameter converted to floats.
"""
# Delay-load due to circular dependencies.
import matplotlib.testing.jpl_units as U
isNotUnitDbl = True
if ( iterable(value) and not isinstance(value, six.string_types) ):
if ( len(value) == 0 ):
return []
else:
return [ UnitDblConverter.convert( x, unit, axis ) for x in value ]
# We need to check to see if the incoming value is actually a UnitDbl and
# set a flag. If we get an empty list, then just return an empty list.
if ( isinstance(value, U.UnitDbl) ):
isNotUnitDbl = False
# If the incoming value behaves like a number, but is not a UnitDbl,
# then just return it because we don't know how to convert it
# (or it is already converted)
if ( isNotUnitDbl and units.ConversionInterface.is_numlike( value ) ):
return value
# If no units were specified, then get the default units to use.
if ( unit == None ):
unit = UnitDblConverter.default_units( value, axis )
# Convert the incoming UnitDbl value/values to float/floats
if isinstance( axis.axes, polar.PolarAxes ) and value.type() == "angle":
# Guarantee that units are radians for polar plots.
return value.convert( "rad" )
return value.convert( unit )
#------------------------------------------------------------------------
@staticmethod
def default_units( value, axis ):
""": Return the default unit for value, or None.
= INPUT VARIABLES
- value The value or list of values that need units.
= RETURN VALUE
- Returns the default units to use for value.
Return the default unit for value, or None.
"""
# Determine the default units based on the user preferences set for
# default units when printing a UnitDbl.
if ( iterable(value) and not isinstance(value, six.string_types) ):
return UnitDblConverter.default_units( value[0], axis )
else:
return UnitDblConverter.defaults[ value.type() ]
| 5,537 | 33.6125 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/testing/jpl_units/UnitDblFormatter.py
|
#===========================================================================
#
# UnitDblFormatter
#
#===========================================================================
"""UnitDblFormatter module containing class UnitDblFormatter."""
#===========================================================================
# Place all imports after here.
#
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import matplotlib.ticker as ticker
#
# Place all imports before here.
#===========================================================================
__all__ = [ 'UnitDblFormatter' ]
#===========================================================================
class UnitDblFormatter( ticker.ScalarFormatter ):
"""The formatter for UnitDbl data types. This allows for formatting
with the unit string.
"""
def __init__( self, *args, **kwargs ):
'The arguments are identical to matplotlib.ticker.ScalarFormatter.'
ticker.ScalarFormatter.__init__( self, *args, **kwargs )
def __call__( self, x, pos = None ):
'Return the format for tick val x at position pos'
if len(self.locs) == 0:
return ''
else:
return '{:.12}'.format(x)
def format_data_short( self, value ):
"Return the value formatted in 'short' format."
return '{:.12}'.format(value)
def format_data( self, value ):
"Return the value formatted into a string."
return '{:.12}'.format(value)
| 1,521 | 30.708333 | 76 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/compat/subprocess.py
|
"""
A replacement wrapper around the subprocess module, with a number of
work-arounds:
- Provides a stub implementation of subprocess members on Google App Engine
(which are missing in subprocess).
- Use subprocess32, backport from python 3.2 on Linux/Mac work-around for
https://github.com/matplotlib/matplotlib/issues/5314
Instead of importing subprocess, other modules should use this as follows:
from matplotlib.compat import subprocess
This module is safe to import from anywhere within matplotlib.
"""
from __future__ import absolute_import # Required to import subprocess
from __future__ import print_function
import os
import sys
if os.name == 'posix' and sys.version_info[0] < 3:
# work around for https://github.com/matplotlib/matplotlib/issues/5314
try:
import subprocess32 as subprocess
except ImportError:
import subprocess
else:
import subprocess
__all__ = ['Popen', 'PIPE', 'STDOUT', 'check_output', 'CalledProcessError']
if hasattr(subprocess, 'Popen'):
Popen = subprocess.Popen
# Assume that it also has the other constants.
PIPE = subprocess.PIPE
STDOUT = subprocess.STDOUT
CalledProcessError = subprocess.CalledProcessError
check_output = subprocess.check_output
else:
# In restricted environments (such as Google App Engine), these are
# non-existent. Replace them with dummy versions that always raise OSError.
def Popen(*args, **kwargs):
raise OSError("subprocess.Popen is not supported")
def check_output(*args, **kwargs):
raise OSError("subprocess.check_output is not supported")
PIPE = -1
STDOUT = -2
# There is no need to catch CalledProcessError. These stubs cannot raise
# it. None in an except clause will simply not match any exceptions.
CalledProcessError = None
| 1,817 | 33.961538 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/compat/__init__.py
| 0 | 0 | 0 |
py
|
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/backends/backend_gtkcairo.py
|
"""
GTK+ Matplotlib interface using cairo (not GDK) drawing operations.
Author: Steve Chaplin
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import gtk
if gtk.pygtk_version < (2, 7, 0):
import cairo.gtk
from matplotlib import cbook
from matplotlib.backends import backend_cairo
from matplotlib.backends.backend_gtk import *
from matplotlib.backends.backend_gtk import _BackendGTK
backend_version = ('PyGTK(%d.%d.%d) ' % gtk.pygtk_version
+ 'Pycairo(%s)' % backend_cairo.backend_version)
class RendererGTKCairo (backend_cairo.RendererCairo):
if gtk.pygtk_version >= (2,7,0):
def set_pixmap (self, pixmap):
self.gc.ctx = pixmap.cairo_create()
else:
def set_pixmap (self, pixmap):
self.gc.ctx = cairo.gtk.gdk_cairo_create (pixmap)
class FigureCanvasGTKCairo(backend_cairo.FigureCanvasCairo, FigureCanvasGTK):
filetypes = FigureCanvasGTK.filetypes.copy()
filetypes.update(backend_cairo.FigureCanvasCairo.filetypes)
def __init__(self, *args, **kwargs):
warn_deprecated('2.2',
message=('The GTKCairo backend is deprecated. It is '
'untested and will be removed in Matplotlib '
'3.0. Use the GTK3Cairo backend instead. See '
'Matplotlib usage FAQ for more info on '
'backends.'),
alternative='GTK3Cairo')
super(FigureCanvasGTKCairo, self).__init__(*args, **kwargs)
def _renderer_init(self):
"""Override to use cairo (rather than GDK) renderer"""
self._renderer = RendererGTKCairo(self.figure.dpi)
# This class has been unused for a while at least.
@cbook.deprecated("2.1")
class FigureManagerGTKCairo(FigureManagerGTK):
def _get_toolbar(self, canvas):
# must be inited after the window, drawingArea and figure
# attrs are set
if matplotlib.rcParams['toolbar']=='toolbar2':
toolbar = NavigationToolbar2GTKCairo (canvas, self.window)
else:
toolbar = None
return toolbar
# This class has been unused for a while at least.
@cbook.deprecated("2.1")
class NavigationToolbar2Cairo(NavigationToolbar2GTK):
def _get_canvas(self, fig):
return FigureCanvasGTKCairo(fig)
@_BackendGTK.export
class _BackendGTKCairo(_BackendGTK):
FigureCanvas = FigureCanvasGTKCairo
FigureManager = FigureManagerGTK
| 2,558 | 33.12 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/backends/backend_tkcairo.py
|
from __future__ import absolute_import, division, print_function
import sys
import numpy as np
from . import tkagg # Paint image to Tk photo blitter extension.
from .backend_cairo import cairo, FigureCanvasCairo, RendererCairo
from ._backend_tk import _BackendTk, FigureCanvasTk
class FigureCanvasTkCairo(FigureCanvasCairo, FigureCanvasTk):
def __init__(self, *args, **kwargs):
super(FigureCanvasTkCairo, self).__init__(*args, **kwargs)
self._renderer = RendererCairo(self.figure.dpi)
def draw(self):
width = int(self.figure.bbox.width)
height = int(self.figure.bbox.height)
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)
self._renderer.set_ctx_from_surface(surface)
self._renderer.set_width_height(width, height)
self.figure.draw(self._renderer)
buf = np.reshape(surface.get_data(), (height, width, 4))
# Convert from ARGB32 to RGBA8888. Using .take() instead of directly
# indexing ensures C-contiguity of the result, which is needed by
# tkagg.
buf = buf.take(
[2, 1, 0, 3] if sys.byteorder == "little" else [1, 2, 3, 0],
axis=2)
tkagg.blit(self._tkphoto, buf, colormode=2)
self._master.update_idletasks()
@_BackendTk.export
class _BackendTkCairo(_BackendTk):
FigureCanvas = FigureCanvasTkCairo
| 1,382 | 35.394737 | 77 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/backends/backend_gtk3.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import logging
import os
import sys
import matplotlib
from matplotlib import backend_tools, rcParams
from matplotlib._pylab_helpers import Gcf
from matplotlib.backend_bases import (
_Backend, FigureCanvasBase, FigureManagerBase, NavigationToolbar2,
StatusbarBase, TimerBase, ToolContainerBase, cursors)
from matplotlib.backend_managers import ToolManager
from matplotlib.figure import Figure
from matplotlib.widgets import SubplotTool
from ._gtk3_compat import GLib, GObject, Gtk, Gdk
_log = logging.getLogger(__name__)
backend_version = "%s.%s.%s" % (
Gtk.get_major_version(), Gtk.get_micro_version(), Gtk.get_minor_version())
# the true dots per inch on the screen; should be display dependent
# see http://groups.google.com/groups?q=screen+dpi+x11&hl=en&lr=&ie=UTF-8&oe=UTF-8&safe=off&selm=7077.26e81ad5%40swift.cs.tcd.ie&rnum=5 for some info about screen dpi
PIXELS_PER_INCH = 96
cursord = {
cursors.MOVE : Gdk.Cursor.new(Gdk.CursorType.FLEUR),
cursors.HAND : Gdk.Cursor.new(Gdk.CursorType.HAND2),
cursors.POINTER : Gdk.Cursor.new(Gdk.CursorType.LEFT_PTR),
cursors.SELECT_REGION : Gdk.Cursor.new(Gdk.CursorType.TCROSS),
cursors.WAIT : Gdk.Cursor.new(Gdk.CursorType.WATCH),
}
class TimerGTK3(TimerBase):
'''
Subclass of :class:`backend_bases.TimerBase` using GTK3 for timer events.
Attributes
----------
interval : int
The time between timer events in milliseconds. Default is 1000 ms.
single_shot : bool
Boolean flag indicating whether this timer should operate as single
shot (run once and then stop). Defaults to False.
callbacks : list
Stores list of (func, args) tuples that will be called upon timer
events. This list can be manipulated directly, or the functions
`add_callback` and `remove_callback` can be used.
'''
def _timer_start(self):
# Need to stop it, otherwise we potentially leak a timer id that will
# never be stopped.
self._timer_stop()
self._timer = GLib.timeout_add(self._interval, self._on_timer)
def _timer_stop(self):
if self._timer is not None:
GLib.source_remove(self._timer)
self._timer = None
def _timer_set_interval(self):
# Only stop and restart it if the timer has already been started
if self._timer is not None:
self._timer_stop()
self._timer_start()
def _on_timer(self):
TimerBase._on_timer(self)
# Gtk timeout_add() requires that the callback returns True if it
# is to be called again.
if len(self.callbacks) > 0 and not self._single:
return True
else:
self._timer = None
return False
class FigureCanvasGTK3(Gtk.DrawingArea, FigureCanvasBase):
keyvald = {65507 : 'control',
65505 : 'shift',
65513 : 'alt',
65508 : 'control',
65506 : 'shift',
65514 : 'alt',
65361 : 'left',
65362 : 'up',
65363 : 'right',
65364 : 'down',
65307 : 'escape',
65470 : 'f1',
65471 : 'f2',
65472 : 'f3',
65473 : 'f4',
65474 : 'f5',
65475 : 'f6',
65476 : 'f7',
65477 : 'f8',
65478 : 'f9',
65479 : 'f10',
65480 : 'f11',
65481 : 'f12',
65300 : 'scroll_lock',
65299 : 'break',
65288 : 'backspace',
65293 : 'enter',
65379 : 'insert',
65535 : 'delete',
65360 : 'home',
65367 : 'end',
65365 : 'pageup',
65366 : 'pagedown',
65438 : '0',
65436 : '1',
65433 : '2',
65435 : '3',
65430 : '4',
65437 : '5',
65432 : '6',
65429 : '7',
65431 : '8',
65434 : '9',
65451 : '+',
65453 : '-',
65450 : '*',
65455 : '/',
65439 : 'dec',
65421 : 'enter',
}
# Setting this as a static constant prevents
# this resulting expression from leaking
event_mask = (Gdk.EventMask.BUTTON_PRESS_MASK |
Gdk.EventMask.BUTTON_RELEASE_MASK |
Gdk.EventMask.EXPOSURE_MASK |
Gdk.EventMask.KEY_PRESS_MASK |
Gdk.EventMask.KEY_RELEASE_MASK |
Gdk.EventMask.ENTER_NOTIFY_MASK |
Gdk.EventMask.LEAVE_NOTIFY_MASK |
Gdk.EventMask.POINTER_MOTION_MASK |
Gdk.EventMask.POINTER_MOTION_HINT_MASK|
Gdk.EventMask.SCROLL_MASK)
def __init__(self, figure):
FigureCanvasBase.__init__(self, figure)
GObject.GObject.__init__(self)
self._idle_draw_id = 0
self._lastCursor = None
self.connect('scroll_event', self.scroll_event)
self.connect('button_press_event', self.button_press_event)
self.connect('button_release_event', self.button_release_event)
self.connect('configure_event', self.configure_event)
self.connect('draw', self.on_draw_event)
self.connect('key_press_event', self.key_press_event)
self.connect('key_release_event', self.key_release_event)
self.connect('motion_notify_event', self.motion_notify_event)
self.connect('leave_notify_event', self.leave_notify_event)
self.connect('enter_notify_event', self.enter_notify_event)
self.connect('size_allocate', self.size_allocate)
self.set_events(self.__class__.event_mask)
self.set_double_buffered(True)
self.set_can_focus(True)
self._renderer_init()
default_context = GLib.main_context_get_thread_default() or GLib.main_context_default()
def destroy(self):
#Gtk.DrawingArea.destroy(self)
self.close_event()
if self._idle_draw_id != 0:
GLib.source_remove(self._idle_draw_id)
def scroll_event(self, widget, event):
x = event.x
# flipy so y=0 is bottom of canvas
y = self.get_allocation().height - event.y
if event.direction==Gdk.ScrollDirection.UP:
step = 1
else:
step = -1
FigureCanvasBase.scroll_event(self, x, y, step, guiEvent=event)
return False # finish event propagation?
def button_press_event(self, widget, event):
x = event.x
# flipy so y=0 is bottom of canvas
y = self.get_allocation().height - event.y
FigureCanvasBase.button_press_event(self, x, y, event.button, guiEvent=event)
return False # finish event propagation?
def button_release_event(self, widget, event):
x = event.x
# flipy so y=0 is bottom of canvas
y = self.get_allocation().height - event.y
FigureCanvasBase.button_release_event(self, x, y, event.button, guiEvent=event)
return False # finish event propagation?
def key_press_event(self, widget, event):
key = self._get_key(event)
FigureCanvasBase.key_press_event(self, key, guiEvent=event)
return True # stop event propagation
def key_release_event(self, widget, event):
key = self._get_key(event)
FigureCanvasBase.key_release_event(self, key, guiEvent=event)
return True # stop event propagation
def motion_notify_event(self, widget, event):
if event.is_hint:
t, x, y, state = event.window.get_pointer()
else:
x, y, state = event.x, event.y, event.get_state()
# flipy so y=0 is bottom of canvas
y = self.get_allocation().height - y
FigureCanvasBase.motion_notify_event(self, x, y, guiEvent=event)
return False # finish event propagation?
def leave_notify_event(self, widget, event):
FigureCanvasBase.leave_notify_event(self, event)
def enter_notify_event(self, widget, event):
FigureCanvasBase.enter_notify_event(self, event)
def size_allocate(self, widget, allocation):
dpival = self.figure.dpi
winch = allocation.width / dpival
hinch = allocation.height / dpival
self.figure.set_size_inches(winch, hinch, forward=False)
FigureCanvasBase.resize_event(self)
self.draw_idle()
def _get_key(self, event):
if event.keyval in self.keyvald:
key = self.keyvald[event.keyval]
elif event.keyval < 256:
key = chr(event.keyval)
else:
key = None
modifiers = [
(Gdk.ModifierType.MOD4_MASK, 'super'),
(Gdk.ModifierType.MOD1_MASK, 'alt'),
(Gdk.ModifierType.CONTROL_MASK, 'ctrl'),
]
for key_mask, prefix in modifiers:
if event.state & key_mask:
key = '{0}+{1}'.format(prefix, key)
return key
def configure_event(self, widget, event):
if widget.get_property("window") is None:
return
w, h = event.width, event.height
if w < 3 or h < 3:
return # empty fig
# resize the figure (in inches)
dpi = self.figure.dpi
self.figure.set_size_inches(w/dpi, h/dpi, forward=False)
return False # finish event propagation?
def on_draw_event(self, widget, ctx):
# to be overwritten by GTK3Agg or GTK3Cairo
pass
def draw(self):
if self.get_visible() and self.get_mapped():
self.queue_draw()
# do a synchronous draw (its less efficient than an async draw,
# but is required if/when animation is used)
self.get_property("window").process_updates (False)
def draw_idle(self):
if self._idle_draw_id != 0:
return
def idle_draw(*args):
try:
self.draw()
finally:
self._idle_draw_id = 0
return False
self._idle_draw_id = GLib.idle_add(idle_draw)
def new_timer(self, *args, **kwargs):
"""
Creates a new backend-specific subclass of :class:`backend_bases.Timer`.
This is useful for getting periodic events through the backend's native
event loop. Implemented only for backends with GUIs.
Other Parameters
----------------
interval : scalar
Timer interval in milliseconds
callbacks : list
Sequence of (func, args, kwargs) where ``func(*args, **kwargs)``
will be executed by the timer every *interval*.
"""
return TimerGTK3(*args, **kwargs)
def flush_events(self):
Gdk.threads_enter()
while Gtk.events_pending():
Gtk.main_iteration()
Gdk.flush()
Gdk.threads_leave()
class FigureManagerGTK3(FigureManagerBase):
"""
Attributes
----------
canvas : `FigureCanvas`
The FigureCanvas instance
num : int or str
The Figure number
toolbar : Gtk.Toolbar
The Gtk.Toolbar (gtk only)
vbox : Gtk.VBox
The Gtk.VBox containing the canvas and toolbar (gtk only)
window : Gtk.Window
The Gtk.Window (gtk only)
"""
def __init__(self, canvas, num):
FigureManagerBase.__init__(self, canvas, num)
self.window = Gtk.Window()
self.window.set_wmclass("matplotlib", "Matplotlib")
self.set_window_title("Figure %d" % num)
try:
self.window.set_icon_from_file(window_icon)
except (SystemExit, KeyboardInterrupt):
# re-raise exit type Exceptions
raise
except:
# some versions of gtk throw a glib.GError but not
# all, so I am not sure how to catch it. I am unhappy
# doing a blanket catch here, but am not sure what a
# better way is - JDH
_log.info('Could not load matplotlib icon: %s', sys.exc_info()[1])
self.vbox = Gtk.Box()
self.vbox.set_property("orientation", Gtk.Orientation.VERTICAL)
self.window.add(self.vbox)
self.vbox.show()
self.canvas.show()
self.vbox.pack_start(self.canvas, True, True, 0)
# calculate size for window
w = int (self.canvas.figure.bbox.width)
h = int (self.canvas.figure.bbox.height)
self.toolmanager = self._get_toolmanager()
self.toolbar = self._get_toolbar()
self.statusbar = None
def add_widget(child, expand, fill, padding):
child.show()
self.vbox.pack_end(child, False, False, 0)
size_request = child.size_request()
return size_request.height
if self.toolmanager:
backend_tools.add_tools_to_manager(self.toolmanager)
if self.toolbar:
backend_tools.add_tools_to_container(self.toolbar)
self.statusbar = StatusbarGTK3(self.toolmanager)
h += add_widget(self.statusbar, False, False, 0)
h += add_widget(Gtk.HSeparator(), False, False, 0)
if self.toolbar is not None:
self.toolbar.show()
h += add_widget(self.toolbar, False, False, 0)
self.window.set_default_size (w, h)
def destroy(*args):
Gcf.destroy(num)
self.window.connect("destroy", destroy)
self.window.connect("delete_event", destroy)
if matplotlib.is_interactive():
self.window.show()
self.canvas.draw_idle()
def notify_axes_change(fig):
'this will be called whenever the current axes is changed'
if self.toolmanager is not None:
pass
elif self.toolbar is not None:
self.toolbar.update()
self.canvas.figure.add_axobserver(notify_axes_change)
self.canvas.grab_focus()
def destroy(self, *args):
self.vbox.destroy()
self.window.destroy()
self.canvas.destroy()
if self.toolbar:
self.toolbar.destroy()
if (Gcf.get_num_fig_managers() == 0 and
not matplotlib.is_interactive() and
Gtk.main_level() >= 1):
Gtk.main_quit()
def show(self):
# show the figure window
self.window.show()
self.window.present()
def full_screen_toggle (self):
self._full_screen_flag = not self._full_screen_flag
if self._full_screen_flag:
self.window.fullscreen()
else:
self.window.unfullscreen()
_full_screen_flag = False
def _get_toolbar(self):
# must be inited after the window, drawingArea and figure
# attrs are set
if rcParams['toolbar'] == 'toolbar2':
toolbar = NavigationToolbar2GTK3(self.canvas, self.window)
elif rcParams['toolbar'] == 'toolmanager':
toolbar = ToolbarGTK3(self.toolmanager)
else:
toolbar = None
return toolbar
def _get_toolmanager(self):
# must be initialised after toolbar has been set
if rcParams['toolbar'] == 'toolmanager':
toolmanager = ToolManager(self.canvas.figure)
else:
toolmanager = None
return toolmanager
def get_window_title(self):
return self.window.get_title()
def set_window_title(self, title):
self.window.set_title(title)
def resize(self, width, height):
'set the canvas size in pixels'
#_, _, cw, ch = self.canvas.allocation
#_, _, ww, wh = self.window.allocation
#self.window.resize (width-cw+ww, height-ch+wh)
self.window.resize(width, height)
class NavigationToolbar2GTK3(NavigationToolbar2, Gtk.Toolbar):
def __init__(self, canvas, window):
self.win = window
GObject.GObject.__init__(self)
NavigationToolbar2.__init__(self, canvas)
self.ctx = None
def set_message(self, s):
self.message.set_label(s)
def set_cursor(self, cursor):
self.canvas.get_property("window").set_cursor(cursord[cursor])
Gtk.main_iteration()
def release(self, event):
try: del self._pixmapBack
except AttributeError: pass
def draw_rubberband(self, event, x0, y0, x1, y1):
'adapted from http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/189744'
self.ctx = self.canvas.get_property("window").cairo_create()
# todo: instead of redrawing the entire figure, copy the part of
# the figure that was covered by the previous rubberband rectangle
self.canvas.draw()
height = self.canvas.figure.bbox.height
y1 = height - y1
y0 = height - y0
w = abs(x1 - x0)
h = abs(y1 - y0)
rect = [int(val) for val in (min(x0,x1), min(y0, y1), w, h)]
self.ctx.new_path()
self.ctx.set_line_width(0.5)
self.ctx.rectangle(rect[0], rect[1], rect[2], rect[3])
self.ctx.set_source_rgb(0, 0, 0)
self.ctx.stroke()
def _init_toolbar(self):
self.set_style(Gtk.ToolbarStyle.ICONS)
basedir = os.path.join(rcParams['datapath'],'images')
for text, tooltip_text, image_file, callback in self.toolitems:
if text is None:
self.insert( Gtk.SeparatorToolItem(), -1 )
continue
fname = os.path.join(basedir, image_file + '.png')
image = Gtk.Image()
image.set_from_file(fname)
tbutton = Gtk.ToolButton()
tbutton.set_label(text)
tbutton.set_icon_widget(image)
self.insert(tbutton, -1)
tbutton.connect('clicked', getattr(self, callback))
tbutton.set_tooltip_text(tooltip_text)
toolitem = Gtk.SeparatorToolItem()
self.insert(toolitem, -1)
toolitem.set_draw(False)
toolitem.set_expand(True)
toolitem = Gtk.ToolItem()
self.insert(toolitem, -1)
self.message = Gtk.Label()
toolitem.add(self.message)
self.show_all()
def get_filechooser(self):
fc = FileChooserDialog(
title='Save the figure',
parent=self.win,
path=os.path.expanduser(rcParams['savefig.directory']),
filetypes=self.canvas.get_supported_filetypes(),
default_filetype=self.canvas.get_default_filetype())
fc.set_current_name(self.canvas.get_default_filename())
return fc
def save_figure(self, *args):
chooser = self.get_filechooser()
fname, format = chooser.get_filename_from_user()
chooser.destroy()
if fname:
startpath = os.path.expanduser(rcParams['savefig.directory'])
# Save dir for next time, unless empty str (i.e., use cwd).
if startpath != "":
rcParams['savefig.directory'] = (
os.path.dirname(six.text_type(fname)))
try:
self.canvas.figure.savefig(fname, format=format)
except Exception as e:
error_msg_gtk(str(e), parent=self)
def configure_subplots(self, button):
toolfig = Figure(figsize=(6,3))
canvas = self._get_canvas(toolfig)
toolfig.subplots_adjust(top=0.9)
tool = SubplotTool(self.canvas.figure, toolfig)
w = int(toolfig.bbox.width)
h = int(toolfig.bbox.height)
window = Gtk.Window()
try:
window.set_icon_from_file(window_icon)
except (SystemExit, KeyboardInterrupt):
# re-raise exit type Exceptions
raise
except:
# we presumably already logged a message on the
# failure of the main plot, don't keep reporting
pass
window.set_title("Subplot Configuration Tool")
window.set_default_size(w, h)
vbox = Gtk.Box()
vbox.set_property("orientation", Gtk.Orientation.VERTICAL)
window.add(vbox)
vbox.show()
canvas.show()
vbox.pack_start(canvas, True, True, 0)
window.show()
def _get_canvas(self, fig):
return self.canvas.__class__(fig)
class FileChooserDialog(Gtk.FileChooserDialog):
"""GTK+ file selector which remembers the last file/directory
selected and presents the user with a menu of supported image formats
"""
def __init__ (self,
title = 'Save file',
parent = None,
action = Gtk.FileChooserAction.SAVE,
buttons = (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_SAVE, Gtk.ResponseType.OK),
path = None,
filetypes = [],
default_filetype = None
):
super (FileChooserDialog, self).__init__ (title, parent, action,
buttons)
self.set_default_response (Gtk.ResponseType.OK)
if not path: path = os.getcwd() + os.sep
# create an extra widget to list supported image formats
self.set_current_folder (path)
self.set_current_name ('image.' + default_filetype)
hbox = Gtk.Box(spacing=10)
hbox.pack_start(Gtk.Label(label="File Format:"), False, False, 0)
liststore = Gtk.ListStore(GObject.TYPE_STRING)
cbox = Gtk.ComboBox() #liststore)
cbox.set_model(liststore)
cell = Gtk.CellRendererText()
cbox.pack_start(cell, True)
cbox.add_attribute(cell, 'text', 0)
hbox.pack_start(cbox, False, False, 0)
self.filetypes = filetypes
self.sorted_filetypes = sorted(six.iteritems(filetypes))
default = 0
for i, (ext, name) in enumerate(self.sorted_filetypes):
liststore.append(["%s (*.%s)" % (name, ext)])
if ext == default_filetype:
default = i
cbox.set_active(default)
self.ext = default_filetype
def cb_cbox_changed (cbox, data=None):
"""File extension changed"""
head, filename = os.path.split(self.get_filename())
root, ext = os.path.splitext(filename)
ext = ext[1:]
new_ext = self.sorted_filetypes[cbox.get_active()][0]
self.ext = new_ext
if ext in self.filetypes:
filename = root + '.' + new_ext
elif ext == '':
filename = filename.rstrip('.') + '.' + new_ext
self.set_current_name (filename)
cbox.connect ("changed", cb_cbox_changed)
hbox.show_all()
self.set_extra_widget(hbox)
def get_filename_from_user (self):
while True:
filename = None
if self.run() != int(Gtk.ResponseType.OK):
break
filename = self.get_filename()
break
return filename, self.ext
class RubberbandGTK3(backend_tools.RubberbandBase):
def __init__(self, *args, **kwargs):
backend_tools.RubberbandBase.__init__(self, *args, **kwargs)
self.ctx = None
def draw_rubberband(self, x0, y0, x1, y1):
# 'adapted from http://aspn.activestate.com/ASPN/Cookbook/Python/
# Recipe/189744'
self.ctx = self.figure.canvas.get_property("window").cairo_create()
# todo: instead of redrawing the entire figure, copy the part of
# the figure that was covered by the previous rubberband rectangle
self.figure.canvas.draw()
height = self.figure.bbox.height
y1 = height - y1
y0 = height - y0
w = abs(x1 - x0)
h = abs(y1 - y0)
rect = [int(val) for val in (min(x0, x1), min(y0, y1), w, h)]
self.ctx.new_path()
self.ctx.set_line_width(0.5)
self.ctx.rectangle(rect[0], rect[1], rect[2], rect[3])
self.ctx.set_source_rgb(0, 0, 0)
self.ctx.stroke()
class ToolbarGTK3(ToolContainerBase, Gtk.Box):
_icon_extension = '.png'
def __init__(self, toolmanager):
ToolContainerBase.__init__(self, toolmanager)
Gtk.Box.__init__(self)
self.set_property("orientation", Gtk.Orientation.VERTICAL)
self._toolarea = Gtk.Box()
self._toolarea.set_property('orientation', Gtk.Orientation.HORIZONTAL)
self.pack_start(self._toolarea, False, False, 0)
self._toolarea.show_all()
self._groups = {}
self._toolitems = {}
def add_toolitem(self, name, group, position, image_file, description,
toggle):
if toggle:
tbutton = Gtk.ToggleToolButton()
else:
tbutton = Gtk.ToolButton()
tbutton.set_label(name)
if image_file is not None:
image = Gtk.Image()
image.set_from_file(image_file)
tbutton.set_icon_widget(image)
if position is None:
position = -1
self._add_button(tbutton, group, position)
signal = tbutton.connect('clicked', self._call_tool, name)
tbutton.set_tooltip_text(description)
tbutton.show_all()
self._toolitems.setdefault(name, [])
self._toolitems[name].append((tbutton, signal))
def _add_button(self, button, group, position):
if group not in self._groups:
if self._groups:
self._add_separator()
toolbar = Gtk.Toolbar()
toolbar.set_style(Gtk.ToolbarStyle.ICONS)
self._toolarea.pack_start(toolbar, False, False, 0)
toolbar.show_all()
self._groups[group] = toolbar
self._groups[group].insert(button, position)
def _call_tool(self, btn, name):
self.trigger_tool(name)
def toggle_toolitem(self, name, toggled):
if name not in self._toolitems:
return
for toolitem, signal in self._toolitems[name]:
toolitem.handler_block(signal)
toolitem.set_active(toggled)
toolitem.handler_unblock(signal)
def remove_toolitem(self, name):
if name not in self._toolitems:
self.toolmanager.message_event('%s Not in toolbar' % name, self)
return
for group in self._groups:
for toolitem, _signal in self._toolitems[name]:
if toolitem in self._groups[group]:
self._groups[group].remove(toolitem)
del self._toolitems[name]
def _add_separator(self):
sep = Gtk.Separator()
sep.set_property("orientation", Gtk.Orientation.VERTICAL)
self._toolarea.pack_start(sep, False, True, 0)
sep.show_all()
class StatusbarGTK3(StatusbarBase, Gtk.Statusbar):
def __init__(self, *args, **kwargs):
StatusbarBase.__init__(self, *args, **kwargs)
Gtk.Statusbar.__init__(self)
self._context = self.get_context_id('message')
def set_message(self, s):
self.pop(self._context)
self.push(self._context, s)
class SaveFigureGTK3(backend_tools.SaveFigureBase):
def get_filechooser(self):
fc = FileChooserDialog(
title='Save the figure',
parent=self.figure.canvas.manager.window,
path=os.path.expanduser(rcParams['savefig.directory']),
filetypes=self.figure.canvas.get_supported_filetypes(),
default_filetype=self.figure.canvas.get_default_filetype())
fc.set_current_name(self.figure.canvas.get_default_filename())
return fc
def trigger(self, *args, **kwargs):
chooser = self.get_filechooser()
fname, format_ = chooser.get_filename_from_user()
chooser.destroy()
if fname:
startpath = os.path.expanduser(rcParams['savefig.directory'])
if startpath == '':
# explicitly missing key or empty str signals to use cwd
rcParams['savefig.directory'] = startpath
else:
# save dir for next time
rcParams['savefig.directory'] = os.path.dirname(
six.text_type(fname))
try:
self.figure.canvas.print_figure(fname, format=format_)
except Exception as e:
error_msg_gtk(str(e), parent=self)
class SetCursorGTK3(backend_tools.SetCursorBase):
def set_cursor(self, cursor):
self.figure.canvas.get_property("window").set_cursor(cursord[cursor])
class ConfigureSubplotsGTK3(backend_tools.ConfigureSubplotsBase, Gtk.Window):
def __init__(self, *args, **kwargs):
backend_tools.ConfigureSubplotsBase.__init__(self, *args, **kwargs)
self.window = None
def init_window(self):
if self.window:
return
self.window = Gtk.Window(title="Subplot Configuration Tool")
try:
self.window.window.set_icon_from_file(window_icon)
except (SystemExit, KeyboardInterrupt):
# re-raise exit type Exceptions
raise
except:
# we presumably already logged a message on the
# failure of the main plot, don't keep reporting
pass
self.vbox = Gtk.Box()
self.vbox.set_property("orientation", Gtk.Orientation.VERTICAL)
self.window.add(self.vbox)
self.vbox.show()
self.window.connect('destroy', self.destroy)
toolfig = Figure(figsize=(6, 3))
canvas = self.figure.canvas.__class__(toolfig)
toolfig.subplots_adjust(top=0.9)
SubplotTool(self.figure, toolfig)
w = int(toolfig.bbox.width)
h = int(toolfig.bbox.height)
self.window.set_default_size(w, h)
canvas.show()
self.vbox.pack_start(canvas, True, True, 0)
self.window.show()
def destroy(self, *args):
self.window.destroy()
self.window = None
def _get_canvas(self, fig):
return self.canvas.__class__(fig)
def trigger(self, sender, event, data=None):
self.init_window()
self.window.present()
# Define the file to use as the GTk icon
if sys.platform == 'win32':
icon_filename = 'matplotlib.png'
else:
icon_filename = 'matplotlib.svg'
window_icon = os.path.join(
matplotlib.rcParams['datapath'], 'images', icon_filename)
def error_msg_gtk(msg, parent=None):
if parent is not None: # find the toplevel Gtk.Window
parent = parent.get_toplevel()
if not parent.is_toplevel():
parent = None
if not isinstance(msg, six.string_types):
msg = ','.join(map(str, msg))
dialog = Gtk.MessageDialog(
parent = parent,
type = Gtk.MessageType.ERROR,
buttons = Gtk.ButtonsType.OK,
message_format = msg)
dialog.run()
dialog.destroy()
backend_tools.ToolSaveFigure = SaveFigureGTK3
backend_tools.ToolConfigureSubplots = ConfigureSubplotsGTK3
backend_tools.ToolSetCursor = SetCursorGTK3
backend_tools.ToolRubberband = RubberbandGTK3
Toolbar = ToolbarGTK3
@_Backend.export
class _BackendGTK3(_Backend):
FigureCanvas = FigureCanvasGTK3
FigureManager = FigureManagerGTK3
@staticmethod
def trigger_manager_draw(manager):
manager.canvas.draw_idle()
@staticmethod
def mainloop():
if Gtk.main_level() == 0:
Gtk.main()
| 31,712 | 33.433225 | 166 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/backends/backend_svg.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import six
from six import unichr
from six.moves import xrange
import base64
import codecs
import gzip
import hashlib
import io
import logging
import re
import uuid
import numpy as np
from matplotlib import cbook, __version__, rcParams
from matplotlib.backend_bases import (
_Backend, FigureCanvasBase, FigureManagerBase, RendererBase)
from matplotlib.backends.backend_mixed import MixedModeRenderer
from matplotlib.cbook import is_writable_file_like, maxdict
from matplotlib.colors import rgb2hex
from matplotlib.font_manager import findfont, get_font
from matplotlib.ft2font import LOAD_NO_HINTING
from matplotlib.mathtext import MathTextParser
from matplotlib.path import Path
from matplotlib import _path
from matplotlib.transforms import Affine2D, Affine2DBase
from matplotlib import _png
_log = logging.getLogger(__name__)
backend_version = __version__
# ----------------------------------------------------------------------
# SimpleXMLWriter class
#
# Based on an original by Fredrik Lundh, but modified here to:
# 1. Support modern Python idioms
# 2. Remove encoding support (it's handled by the file writer instead)
# 3. Support proper indentation
# 4. Minify things a little bit
# --------------------------------------------------------------------
# The SimpleXMLWriter module is
#
# Copyright (c) 2001-2004 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
def escape_cdata(s):
s = s.replace("&", "&")
s = s.replace("<", "<")
s = s.replace(">", ">")
return s
_escape_xml_comment = re.compile(r'-(?=-)')
def escape_comment(s):
s = escape_cdata(s)
return _escape_xml_comment.sub('- ', s)
def escape_attrib(s):
s = s.replace("&", "&")
s = s.replace("'", "'")
s = s.replace("\"", """)
s = s.replace("<", "<")
s = s.replace(">", ">")
return s
def short_float_fmt(x):
"""
Create a short string representation of a float, which is %f
formatting with trailing zeros and the decimal point removed.
"""
return '{0:f}'.format(x).rstrip('0').rstrip('.')
##
# XML writer class.
#
# @param file A file or file-like object. This object must implement
# a <b>write</b> method that takes an 8-bit string.
class XMLWriter(object):
def __init__(self, file):
self.__write = file.write
if hasattr(file, "flush"):
self.flush = file.flush
self.__open = 0 # true if start tag is open
self.__tags = []
self.__data = []
self.__indentation = " " * 64
def __flush(self, indent=True):
# flush internal buffers
if self.__open:
if indent:
self.__write(">\n")
else:
self.__write(">")
self.__open = 0
if self.__data:
data = ''.join(self.__data)
self.__write(escape_cdata(data))
self.__data = []
## Opens a new element. Attributes can be given as keyword
# arguments, or as a string/string dictionary. The method returns
# an opaque identifier that can be passed to the <b>close</b>
# method, to close all open elements up to and including this one.
#
# @param tag Element tag.
# @param attrib Attribute dictionary. Alternatively, attributes
# can be given as keyword arguments.
# @return An element identifier.
def start(self, tag, attrib={}, **extra):
self.__flush()
tag = escape_cdata(tag)
self.__data = []
self.__tags.append(tag)
self.__write(self.__indentation[:len(self.__tags) - 1])
self.__write("<%s" % tag)
if attrib or extra:
attrib = attrib.copy()
attrib.update(extra)
attrib = sorted(six.iteritems(attrib))
for k, v in attrib:
if not v == '':
k = escape_cdata(k)
v = escape_attrib(v)
self.__write(" %s=\"%s\"" % (k, v))
self.__open = 1
return len(self.__tags)-1
##
# Adds a comment to the output stream.
#
# @param comment Comment text, as a Unicode string.
def comment(self, comment):
self.__flush()
self.__write(self.__indentation[:len(self.__tags)])
self.__write("<!-- %s -->\n" % escape_comment(comment))
##
# Adds character data to the output stream.
#
# @param text Character data, as a Unicode string.
def data(self, text):
self.__data.append(text)
##
# Closes the current element (opened by the most recent call to
# <b>start</b>).
#
# @param tag Element tag. If given, the tag must match the start
# tag. If omitted, the current element is closed.
def end(self, tag=None, indent=True):
if tag:
assert self.__tags, "unbalanced end(%s)" % tag
assert escape_cdata(tag) == self.__tags[-1],\
"expected end(%s), got %s" % (self.__tags[-1], tag)
else:
assert self.__tags, "unbalanced end()"
tag = self.__tags.pop()
if self.__data:
self.__flush(indent)
elif self.__open:
self.__open = 0
self.__write("/>\n")
return
if indent:
self.__write(self.__indentation[:len(self.__tags)])
self.__write("</%s>\n" % tag)
##
# Closes open elements, up to (and including) the element identified
# by the given identifier.
#
# @param id Element identifier, as returned by the <b>start</b> method.
def close(self, id):
while len(self.__tags) > id:
self.end()
##
# Adds an entire element. This is the same as calling <b>start</b>,
# <b>data</b>, and <b>end</b> in sequence. The <b>text</b> argument
# can be omitted.
def element(self, tag, text=None, attrib={}, **extra):
self.start(*(tag, attrib), **extra)
if text:
self.data(text)
self.end(indent=False)
##
# Flushes the output stream.
def flush(self):
pass # replaced by the constructor
# ----------------------------------------------------------------------
def generate_transform(transform_list=[]):
if len(transform_list):
output = io.StringIO()
for type, value in transform_list:
if type == 'scale' and (value == (1.0,) or value == (1.0, 1.0)):
continue
if type == 'translate' and value == (0.0, 0.0):
continue
if type == 'rotate' and value == (0.0,):
continue
if type == 'matrix' and isinstance(value, Affine2DBase):
value = value.to_values()
output.write('%s(%s)' % (
type, ' '.join(short_float_fmt(x) for x in value)))
return output.getvalue()
return ''
def generate_css(attrib={}):
if attrib:
output = io.StringIO()
attrib = sorted(six.iteritems(attrib))
for k, v in attrib:
k = escape_attrib(k)
v = escape_attrib(v)
output.write("%s:%s;" % (k, v))
return output.getvalue()
return ''
_capstyle_d = {'projecting' : 'square', 'butt' : 'butt', 'round': 'round',}
class RendererSVG(RendererBase):
FONT_SCALE = 100.0
fontd = maxdict(50)
def __init__(self, width, height, svgwriter, basename=None, image_dpi=72):
self.width = width
self.height = height
self.writer = XMLWriter(svgwriter)
self.image_dpi = image_dpi # the actual dpi we want to rasterize stuff with
self._groupd = {}
if not rcParams['svg.image_inline']:
assert basename is not None
self.basename = basename
self._imaged = {}
self._clipd = OrderedDict()
self._char_defs = {}
self._markers = {}
self._path_collection_id = 0
self._imaged = {}
self._hatchd = OrderedDict()
self._has_gouraud = False
self._n_gradients = 0
self._fonts = OrderedDict()
self.mathtext_parser = MathTextParser('SVG')
RendererBase.__init__(self)
self._glyph_map = dict()
str_height = short_float_fmt(height)
str_width = short_float_fmt(width)
svgwriter.write(svgProlog)
self._start_id = self.writer.start(
'svg',
width='%spt' % str_width,
height='%spt' % str_height,
viewBox='0 0 %s %s' % (str_width, str_height),
xmlns="http://www.w3.org/2000/svg",
version="1.1",
attrib={'xmlns:xlink': "http://www.w3.org/1999/xlink"})
self._write_default_style()
def finalize(self):
self._write_clips()
self._write_hatches()
self._write_svgfonts()
self.writer.close(self._start_id)
self.writer.flush()
def _write_default_style(self):
writer = self.writer
default_style = generate_css({
'stroke-linejoin': 'round',
'stroke-linecap': 'butt'})
writer.start('defs')
writer.start('style', type='text/css')
writer.data('*{%s}\n' % default_style)
writer.end('style')
writer.end('defs')
def _make_id(self, type, content):
content = str(content)
if rcParams['svg.hashsalt'] is None:
salt = str(uuid.uuid4())
else:
salt = rcParams['svg.hashsalt']
if six.PY3:
content = content.encode('utf8')
salt = salt.encode('utf8')
m = hashlib.md5()
m.update(salt)
m.update(content)
return '%s%s' % (type, m.hexdigest()[:10])
def _make_flip_transform(self, transform):
return (transform +
Affine2D()
.scale(1.0, -1.0)
.translate(0.0, self.height))
def _get_font(self, prop):
fname = findfont(prop)
font = get_font(fname)
font.clear()
size = prop.get_size_in_points()
font.set_size(size, 72.0)
return font
def _get_hatch(self, gc, rgbFace):
"""
Create a new hatch pattern
"""
if rgbFace is not None:
rgbFace = tuple(rgbFace)
edge = gc.get_hatch_color()
if edge is not None:
edge = tuple(edge)
dictkey = (gc.get_hatch(), rgbFace, edge)
oid = self._hatchd.get(dictkey)
if oid is None:
oid = self._make_id('h', dictkey)
self._hatchd[dictkey] = ((gc.get_hatch_path(), rgbFace, edge), oid)
else:
_, oid = oid
return oid
def _write_hatches(self):
if not len(self._hatchd):
return
HATCH_SIZE = 72
writer = self.writer
writer.start('defs')
for ((path, face, stroke), oid) in six.itervalues(self._hatchd):
writer.start(
'pattern',
id=oid,
patternUnits="userSpaceOnUse",
x="0", y="0", width=six.text_type(HATCH_SIZE),
height=six.text_type(HATCH_SIZE))
path_data = self._convert_path(
path,
Affine2D().scale(HATCH_SIZE).scale(1.0, -1.0).translate(0, HATCH_SIZE),
simplify=False)
if face is None:
fill = 'none'
else:
fill = rgb2hex(face)
writer.element(
'rect',
x="0", y="0", width=six.text_type(HATCH_SIZE+1),
height=six.text_type(HATCH_SIZE+1),
fill=fill)
writer.element(
'path',
d=path_data,
style=generate_css({
'fill': rgb2hex(stroke),
'stroke': rgb2hex(stroke),
'stroke-width': six.text_type(rcParams['hatch.linewidth']),
'stroke-linecap': 'butt',
'stroke-linejoin': 'miter'
})
)
writer.end('pattern')
writer.end('defs')
def _get_style_dict(self, gc, rgbFace):
"""
return the style string. style is generated from the
GraphicsContext and rgbFace
"""
attrib = {}
forced_alpha = gc.get_forced_alpha()
if gc.get_hatch() is not None:
attrib['fill'] = "url(#%s)" % self._get_hatch(gc, rgbFace)
if rgbFace is not None and len(rgbFace) == 4 and rgbFace[3] != 1.0 and not forced_alpha:
attrib['fill-opacity'] = short_float_fmt(rgbFace[3])
else:
if rgbFace is None:
attrib['fill'] = 'none'
else:
if tuple(rgbFace[:3]) != (0, 0, 0):
attrib['fill'] = rgb2hex(rgbFace)
if len(rgbFace) == 4 and rgbFace[3] != 1.0 and not forced_alpha:
attrib['fill-opacity'] = short_float_fmt(rgbFace[3])
if forced_alpha and gc.get_alpha() != 1.0:
attrib['opacity'] = short_float_fmt(gc.get_alpha())
offset, seq = gc.get_dashes()
if seq is not None:
attrib['stroke-dasharray'] = ','.join([short_float_fmt(val) for val in seq])
attrib['stroke-dashoffset'] = short_float_fmt(float(offset))
linewidth = gc.get_linewidth()
if linewidth:
rgb = gc.get_rgb()
attrib['stroke'] = rgb2hex(rgb)
if not forced_alpha and rgb[3] != 1.0:
attrib['stroke-opacity'] = short_float_fmt(rgb[3])
if linewidth != 1.0:
attrib['stroke-width'] = short_float_fmt(linewidth)
if gc.get_joinstyle() != 'round':
attrib['stroke-linejoin'] = gc.get_joinstyle()
if gc.get_capstyle() != 'butt':
attrib['stroke-linecap'] = _capstyle_d[gc.get_capstyle()]
return attrib
def _get_style(self, gc, rgbFace):
return generate_css(self._get_style_dict(gc, rgbFace))
def _get_clip(self, gc):
cliprect = gc.get_clip_rectangle()
clippath, clippath_trans = gc.get_clip_path()
if clippath is not None:
clippath_trans = self._make_flip_transform(clippath_trans)
dictkey = (id(clippath), str(clippath_trans))
elif cliprect is not None:
x, y, w, h = cliprect.bounds
y = self.height-(y+h)
dictkey = (x, y, w, h)
else:
return None
clip = self._clipd.get(dictkey)
if clip is None:
oid = self._make_id('p', dictkey)
if clippath is not None:
self._clipd[dictkey] = ((clippath, clippath_trans), oid)
else:
self._clipd[dictkey] = (dictkey, oid)
else:
clip, oid = clip
return oid
def _write_clips(self):
if not len(self._clipd):
return
writer = self.writer
writer.start('defs')
for clip, oid in six.itervalues(self._clipd):
writer.start('clipPath', id=oid)
if len(clip) == 2:
clippath, clippath_trans = clip
path_data = self._convert_path(clippath, clippath_trans, simplify=False)
writer.element('path', d=path_data)
else:
x, y, w, h = clip
writer.element(
'rect',
x=short_float_fmt(x),
y=short_float_fmt(y),
width=short_float_fmt(w),
height=short_float_fmt(h))
writer.end('clipPath')
writer.end('defs')
def _write_svgfonts(self):
if not rcParams['svg.fonttype'] == 'svgfont':
return
writer = self.writer
writer.start('defs')
for font_fname, chars in six.iteritems(self._fonts):
font = get_font(font_fname)
font.set_size(72, 72)
sfnt = font.get_sfnt()
writer.start('font', id=sfnt[1, 0, 0, 4].decode("mac_roman"))
writer.element(
'font-face',
attrib={
'font-family': font.family_name,
'font-style': font.style_name.lower(),
'units-per-em': '72',
'bbox': ' '.join(
short_float_fmt(x / 64.0) for x in font.bbox)})
for char in chars:
glyph = font.load_char(char, flags=LOAD_NO_HINTING)
verts, codes = font.get_path()
path = Path(verts, codes)
path_data = self._convert_path(path)
# name = font.get_glyph_name(char)
writer.element(
'glyph',
d=path_data,
attrib={
# 'glyph-name': name,
'unicode': unichr(char),
'horiz-adv-x':
short_float_fmt(glyph.linearHoriAdvance / 65536.0)})
writer.end('font')
writer.end('defs')
def open_group(self, s, gid=None):
"""
Open a grouping element with label *s*. If *gid* is given, use
*gid* as the id of the group.
"""
if gid:
self.writer.start('g', id=gid)
else:
self._groupd[s] = self._groupd.get(s, 0) + 1
self.writer.start('g', id="%s_%d" % (s, self._groupd[s]))
def close_group(self, s):
self.writer.end('g')
def option_image_nocomposite(self):
"""
return whether to generate a composite image from multiple images on
a set of axes
"""
return not rcParams['image.composite_image']
def _convert_path(self, path, transform=None, clip=None, simplify=None,
sketch=None):
if clip:
clip = (0.0, 0.0, self.width, self.height)
else:
clip = None
return _path.convert_to_string(
path, transform, clip, simplify, sketch, 6,
[b'M', b'L', b'Q', b'C', b'z'], False).decode('ascii')
def draw_path(self, gc, path, transform, rgbFace=None):
trans_and_flip = self._make_flip_transform(transform)
clip = (rgbFace is None and gc.get_hatch_path() is None)
simplify = path.should_simplify and clip
path_data = self._convert_path(
path, trans_and_flip, clip=clip, simplify=simplify,
sketch=gc.get_sketch_params())
attrib = {}
attrib['style'] = self._get_style(gc, rgbFace)
clipid = self._get_clip(gc)
if clipid is not None:
attrib['clip-path'] = 'url(#%s)' % clipid
if gc.get_url() is not None:
self.writer.start('a', {'xlink:href': gc.get_url()})
self.writer.element('path', d=path_data, attrib=attrib)
if gc.get_url() is not None:
self.writer.end('a')
def draw_markers(self, gc, marker_path, marker_trans, path, trans, rgbFace=None):
if not len(path.vertices):
return
writer = self.writer
path_data = self._convert_path(
marker_path,
marker_trans + Affine2D().scale(1.0, -1.0),
simplify=False)
style = self._get_style_dict(gc, rgbFace)
dictkey = (path_data, generate_css(style))
oid = self._markers.get(dictkey)
style = generate_css({k: v for k, v in six.iteritems(style)
if k.startswith('stroke')})
if oid is None:
oid = self._make_id('m', dictkey)
writer.start('defs')
writer.element('path', id=oid, d=path_data, style=style)
writer.end('defs')
self._markers[dictkey] = oid
attrib = {}
clipid = self._get_clip(gc)
if clipid is not None:
attrib['clip-path'] = 'url(#%s)' % clipid
writer.start('g', attrib=attrib)
trans_and_flip = self._make_flip_transform(trans)
attrib = {'xlink:href': '#%s' % oid}
clip = (0, 0, self.width*72, self.height*72)
for vertices, code in path.iter_segments(
trans_and_flip, clip=clip, simplify=False):
if len(vertices):
x, y = vertices[-2:]
attrib['x'] = short_float_fmt(x)
attrib['y'] = short_float_fmt(y)
attrib['style'] = self._get_style(gc, rgbFace)
writer.element('use', attrib=attrib)
writer.end('g')
def draw_path_collection(self, gc, master_transform, paths, all_transforms,
offsets, offsetTrans, facecolors, edgecolors,
linewidths, linestyles, antialiaseds, urls,
offset_position):
# Is the optimization worth it? Rough calculation:
# cost of emitting a path in-line is
# (len_path + 5) * uses_per_path
# cost of definition+use is
# (len_path + 3) + 9 * uses_per_path
len_path = len(paths[0].vertices) if len(paths) > 0 else 0
uses_per_path = self._iter_collection_uses_per_path(
paths, all_transforms, offsets, facecolors, edgecolors)
should_do_optimization = \
len_path + 9 * uses_per_path + 3 < (len_path + 5) * uses_per_path
if not should_do_optimization:
return RendererBase.draw_path_collection(
self, gc, master_transform, paths, all_transforms,
offsets, offsetTrans, facecolors, edgecolors,
linewidths, linestyles, antialiaseds, urls,
offset_position)
writer = self.writer
path_codes = []
writer.start('defs')
for i, (path, transform) in enumerate(self._iter_collection_raw_paths(
master_transform, paths, all_transforms)):
transform = Affine2D(transform.get_matrix()).scale(1.0, -1.0)
d = self._convert_path(path, transform, simplify=False)
oid = 'C%x_%x_%s' % (self._path_collection_id, i,
self._make_id('', d))
writer.element('path', id=oid, d=d)
path_codes.append(oid)
writer.end('defs')
for xo, yo, path_id, gc0, rgbFace in self._iter_collection(
gc, master_transform, all_transforms, path_codes, offsets,
offsetTrans, facecolors, edgecolors, linewidths, linestyles,
antialiaseds, urls, offset_position):
clipid = self._get_clip(gc0)
url = gc0.get_url()
if url is not None:
writer.start('a', attrib={'xlink:href': url})
if clipid is not None:
writer.start('g', attrib={'clip-path': 'url(#%s)' % clipid})
attrib = {
'xlink:href': '#%s' % path_id,
'x': short_float_fmt(xo),
'y': short_float_fmt(self.height - yo),
'style': self._get_style(gc0, rgbFace)
}
writer.element('use', attrib=attrib)
if clipid is not None:
writer.end('g')
if url is not None:
writer.end('a')
self._path_collection_id += 1
def draw_gouraud_triangle(self, gc, points, colors, trans):
# This uses a method described here:
#
# http://www.svgopen.org/2005/papers/Converting3DFaceToSVG/index.html
#
# that uses three overlapping linear gradients to simulate a
# Gouraud triangle. Each gradient goes from fully opaque in
# one corner to fully transparent along the opposite edge.
# The line between the stop points is perpendicular to the
# opposite edge. Underlying these three gradients is a solid
# triangle whose color is the average of all three points.
writer = self.writer
if not self._has_gouraud:
self._has_gouraud = True
writer.start(
'filter',
id='colorAdd')
writer.element(
'feComposite',
attrib={'in': 'SourceGraphic'},
in2='BackgroundImage',
operator='arithmetic',
k2="1", k3="1")
writer.end('filter')
avg_color = np.sum(colors[:, :], axis=0) / 3.0
# Just skip fully-transparent triangles
if avg_color[-1] == 0.0:
return
trans_and_flip = self._make_flip_transform(trans)
tpoints = trans_and_flip.transform(points)
writer.start('defs')
for i in range(3):
x1, y1 = tpoints[i]
x2, y2 = tpoints[(i + 1) % 3]
x3, y3 = tpoints[(i + 2) % 3]
c = colors[i][:]
if x2 == x3:
xb = x2
yb = y1
elif y2 == y3:
xb = x1
yb = y2
else:
m1 = (y2 - y3) / (x2 - x3)
b1 = y2 - (m1 * x2)
m2 = -(1.0 / m1)
b2 = y1 - (m2 * x1)
xb = (-b1 + b2) / (m1 - m2)
yb = m2 * xb + b2
writer.start(
'linearGradient',
id="GR%x_%d" % (self._n_gradients, i),
x1=short_float_fmt(x1), y1=short_float_fmt(y1),
x2=short_float_fmt(xb), y2=short_float_fmt(yb))
writer.element(
'stop',
offset='0',
style=generate_css({'stop-color': rgb2hex(c),
'stop-opacity': short_float_fmt(c[-1])}))
writer.element(
'stop',
offset='1',
style=generate_css({'stop-color': rgb2hex(c),
'stop-opacity': "0"}))
writer.end('linearGradient')
writer.element(
'polygon',
id='GT%x' % self._n_gradients,
points=" ".join([short_float_fmt(x)
for x in (x1, y1, x2, y2, x3, y3)]))
writer.end('defs')
avg_color = np.sum(colors[:, :], axis=0) / 3.0
href = '#GT%x' % self._n_gradients
writer.element(
'use',
attrib={'xlink:href': href,
'fill': rgb2hex(avg_color),
'fill-opacity': short_float_fmt(avg_color[-1])})
for i in range(3):
writer.element(
'use',
attrib={'xlink:href': href,
'fill': 'url(#GR%x_%d)' % (self._n_gradients, i),
'fill-opacity': '1',
'filter': 'url(#colorAdd)'})
self._n_gradients += 1
def draw_gouraud_triangles(self, gc, triangles_array, colors_array,
transform):
attrib = {}
clipid = self._get_clip(gc)
if clipid is not None:
attrib['clip-path'] = 'url(#%s)' % clipid
self.writer.start('g', attrib=attrib)
transform = transform.frozen()
for tri, col in zip(triangles_array, colors_array):
self.draw_gouraud_triangle(gc, tri, col, transform)
self.writer.end('g')
def option_scale_image(self):
return True
def get_image_magnification(self):
return self.image_dpi / 72.0
def draw_image(self, gc, x, y, im, transform=None):
h, w = im.shape[:2]
if w == 0 or h == 0:
return
attrib = {}
clipid = self._get_clip(gc)
if clipid is not None:
# Can't apply clip-path directly to the image because the
# image has a transformation, which would also be applied
# to the clip-path
self.writer.start('g', attrib={'clip-path': 'url(#%s)' % clipid})
oid = gc.get_gid()
url = gc.get_url()
if url is not None:
self.writer.start('a', attrib={'xlink:href': url})
if rcParams['svg.image_inline']:
bytesio = io.BytesIO()
_png.write_png(im, bytesio)
oid = oid or self._make_id('image', bytesio.getvalue())
attrib['xlink:href'] = (
"data:image/png;base64,\n" +
base64.b64encode(bytesio.getvalue()).decode('ascii'))
else:
self._imaged[self.basename] = self._imaged.get(self.basename, 0) + 1
filename = '%s.image%d.png'%(self.basename, self._imaged[self.basename])
_log.info('Writing image file for inclusion: %s', filename)
_png.write_png(im, filename)
oid = oid or 'Im_' + self._make_id('image', filename)
attrib['xlink:href'] = filename
attrib['id'] = oid
if transform is None:
w = 72.0 * w / self.image_dpi
h = 72.0 * h / self.image_dpi
self.writer.element(
'image',
transform=generate_transform([
('scale', (1, -1)), ('translate', (0, -h))]),
x=short_float_fmt(x),
y=short_float_fmt(-(self.height - y - h)),
width=short_float_fmt(w), height=short_float_fmt(h),
attrib=attrib)
else:
alpha = gc.get_alpha()
if alpha != 1.0:
attrib['opacity'] = short_float_fmt(alpha)
flipped = (
Affine2D().scale(1.0 / w, 1.0 / h) +
transform +
Affine2D()
.translate(x, y)
.scale(1.0, -1.0)
.translate(0.0, self.height))
attrib['transform'] = generate_transform(
[('matrix', flipped.frozen())])
self.writer.element(
'image',
width=short_float_fmt(w), height=short_float_fmt(h),
attrib=attrib)
if url is not None:
self.writer.end('a')
if clipid is not None:
self.writer.end('g')
def _adjust_char_id(self, char_id):
return char_id.replace("%20", "_")
def _draw_text_as_path(self, gc, x, y, s, prop, angle, ismath, mtext=None):
"""
draw the text by converting them to paths using textpath module.
Parameters
----------
prop : `matplotlib.font_manager.FontProperties`
font property
s : str
text to be converted
usetex : bool
If True, use matplotlib usetex mode.
ismath : bool
If True, use mathtext parser. If "TeX", use *usetex* mode.
"""
writer = self.writer
writer.comment(s)
glyph_map=self._glyph_map
text2path = self._text2path
color = rgb2hex(gc.get_rgb())
fontsize = prop.get_size_in_points()
style = {}
if color != '#000000':
style['fill'] = color
if gc.get_alpha() != 1.0:
style['opacity'] = short_float_fmt(gc.get_alpha())
if not ismath:
font = text2path._get_font(prop)
_glyphs = text2path.get_glyphs_with_font(
font, s, glyph_map=glyph_map, return_new_glyphs_only=True)
glyph_info, glyph_map_new, rects = _glyphs
if glyph_map_new:
writer.start('defs')
for char_id, glyph_path in six.iteritems(glyph_map_new):
path = Path(*glyph_path)
path_data = self._convert_path(path, simplify=False)
writer.element('path', id=char_id, d=path_data)
writer.end('defs')
glyph_map.update(glyph_map_new)
attrib = {}
attrib['style'] = generate_css(style)
font_scale = fontsize / text2path.FONT_SCALE
attrib['transform'] = generate_transform([
('translate', (x, y)),
('rotate', (-angle,)),
('scale', (font_scale, -font_scale))])
writer.start('g', attrib=attrib)
for glyph_id, xposition, yposition, scale in glyph_info:
attrib={'xlink:href': '#%s' % glyph_id}
if xposition != 0.0:
attrib['x'] = short_float_fmt(xposition)
if yposition != 0.0:
attrib['y'] = short_float_fmt(yposition)
writer.element(
'use',
attrib=attrib)
writer.end('g')
else:
if ismath == "TeX":
_glyphs = text2path.get_glyphs_tex(prop, s, glyph_map=glyph_map,
return_new_glyphs_only=True)
else:
_glyphs = text2path.get_glyphs_mathtext(prop, s, glyph_map=glyph_map,
return_new_glyphs_only=True)
glyph_info, glyph_map_new, rects = _glyphs
# we store the character glyphs w/o flipping. Instead, the
# coordinate will be flipped when this characters are
# used.
if glyph_map_new:
writer.start('defs')
for char_id, glyph_path in six.iteritems(glyph_map_new):
char_id = self._adjust_char_id(char_id)
# Some characters are blank
if not len(glyph_path[0]):
path_data = ""
else:
path = Path(*glyph_path)
path_data = self._convert_path(path, simplify=False)
writer.element('path', id=char_id, d=path_data)
writer.end('defs')
glyph_map.update(glyph_map_new)
attrib = {}
font_scale = fontsize / text2path.FONT_SCALE
attrib['style'] = generate_css(style)
attrib['transform'] = generate_transform([
('translate', (x, y)),
('rotate', (-angle,)),
('scale', (font_scale, -font_scale))])
writer.start('g', attrib=attrib)
for char_id, xposition, yposition, scale in glyph_info:
char_id = self._adjust_char_id(char_id)
writer.element(
'use',
transform=generate_transform([
('translate', (xposition, yposition)),
('scale', (scale,)),
]),
attrib={'xlink:href': '#%s' % char_id})
for verts, codes in rects:
path = Path(verts, codes)
path_data = self._convert_path(path, simplify=False)
writer.element('path', d=path_data)
writer.end('g')
def _draw_text_as_text(self, gc, x, y, s, prop, angle, ismath, mtext=None):
writer = self.writer
color = rgb2hex(gc.get_rgb())
style = {}
if color != '#000000':
style['fill'] = color
if gc.get_alpha() != 1.0:
style['opacity'] = short_float_fmt(gc.get_alpha())
if not ismath:
font = self._get_font(prop)
font.set_text(s, 0.0, flags=LOAD_NO_HINTING)
fontsize = prop.get_size_in_points()
fontfamily = font.family_name
fontstyle = prop.get_style()
attrib = {}
# Must add "px" to workaround a Firefox bug
style['font-size'] = short_float_fmt(fontsize) + 'px'
style['font-family'] = six.text_type(fontfamily)
style['font-style'] = prop.get_style().lower()
style['font-weight'] = six.text_type(prop.get_weight()).lower()
attrib['style'] = generate_css(style)
if mtext and (angle == 0 or mtext.get_rotation_mode() == "anchor"):
# If text anchoring can be supported, get the original
# coordinates and add alignment information.
# Get anchor coordinates.
transform = mtext.get_transform()
ax, ay = transform.transform_point(mtext.get_position())
ay = self.height - ay
# Don't do vertical anchor alignment. Most applications do not
# support 'alignment-baseline' yet. Apply the vertical layout
# to the anchor point manually for now.
angle_rad = np.deg2rad(angle)
dir_vert = np.array([np.sin(angle_rad), np.cos(angle_rad)])
v_offset = np.dot(dir_vert, [(x - ax), (y - ay)])
ax = ax + v_offset * dir_vert[0]
ay = ay + v_offset * dir_vert[1]
ha_mpl_to_svg = {'left': 'start', 'right': 'end',
'center': 'middle'}
style['text-anchor'] = ha_mpl_to_svg[mtext.get_ha()]
attrib['x'] = short_float_fmt(ax)
attrib['y'] = short_float_fmt(ay)
attrib['style'] = generate_css(style)
attrib['transform'] = "rotate(%s, %s, %s)" % (
short_float_fmt(-angle),
short_float_fmt(ax),
short_float_fmt(ay))
writer.element('text', s, attrib=attrib)
else:
attrib['transform'] = generate_transform([
('translate', (x, y)),
('rotate', (-angle,))])
writer.element('text', s, attrib=attrib)
if rcParams['svg.fonttype'] == 'svgfont':
fontset = self._fonts.setdefault(font.fname, set())
for c in s:
fontset.add(ord(c))
else:
writer.comment(s)
width, height, descent, svg_elements, used_characters = \
self.mathtext_parser.parse(s, 72, prop)
svg_glyphs = svg_elements.svg_glyphs
svg_rects = svg_elements.svg_rects
attrib = {}
attrib['style'] = generate_css(style)
attrib['transform'] = generate_transform([
('translate', (x, y)),
('rotate', (-angle,))])
# Apply attributes to 'g', not 'text', because we likely
# have some rectangles as well with the same style and
# transformation
writer.start('g', attrib=attrib)
writer.start('text')
# Sort the characters by font, and output one tspan for
# each
spans = OrderedDict()
for font, fontsize, thetext, new_x, new_y, metrics in svg_glyphs:
style = generate_css({
'font-size': short_float_fmt(fontsize) + 'px',
'font-family': font.family_name,
'font-style': font.style_name.lower(),
'font-weight': font.style_name.lower()})
if thetext == 32:
thetext = 0xa0 # non-breaking space
spans.setdefault(style, []).append((new_x, -new_y, thetext))
if rcParams['svg.fonttype'] == 'svgfont':
for font, fontsize, thetext, new_x, new_y, metrics in svg_glyphs:
fontset = self._fonts.setdefault(font.fname, set())
fontset.add(thetext)
for style, chars in six.iteritems(spans):
chars.sort()
same_y = True
if len(chars) > 1:
last_y = chars[0][1]
for i in xrange(1, len(chars)):
if chars[i][1] != last_y:
same_y = False
break
if same_y:
ys = six.text_type(chars[0][1])
else:
ys = ' '.join(six.text_type(c[1]) for c in chars)
attrib = {
'style': style,
'x': ' '.join(short_float_fmt(c[0]) for c in chars),
'y': ys
}
writer.element(
'tspan',
''.join(unichr(c[2]) for c in chars),
attrib=attrib)
writer.end('text')
if len(svg_rects):
for x, y, width, height in svg_rects:
writer.element(
'rect',
x=short_float_fmt(x),
y=short_float_fmt(-y + height),
width=short_float_fmt(width),
height=short_float_fmt(height)
)
writer.end('g')
def draw_tex(self, gc, x, y, s, prop, angle, ismath='TeX!', mtext=None):
self._draw_text_as_path(gc, x, y, s, prop, angle, ismath="TeX")
def draw_text(self, gc, x, y, s, prop, angle, ismath=False, mtext=None):
clipid = self._get_clip(gc)
if clipid is not None:
# Cannot apply clip-path directly to the text, because
# is has a transformation
self.writer.start(
'g', attrib={'clip-path': 'url(#%s)' % clipid})
if gc.get_url() is not None:
self.writer.start('a', {'xlink:href': gc.get_url()})
if rcParams['svg.fonttype'] == 'path':
self._draw_text_as_path(gc, x, y, s, prop, angle, ismath, mtext)
else:
self._draw_text_as_text(gc, x, y, s, prop, angle, ismath, mtext)
if gc.get_url() is not None:
self.writer.end('a')
if clipid is not None:
self.writer.end('g')
def flipy(self):
return True
def get_canvas_width_height(self):
return self.width, self.height
def get_text_width_height_descent(self, s, prop, ismath):
return self._text2path.get_text_width_height_descent(s, prop, ismath)
class FigureCanvasSVG(FigureCanvasBase):
filetypes = {'svg': 'Scalable Vector Graphics',
'svgz': 'Scalable Vector Graphics'}
fixed_dpi = 72
def print_svg(self, filename, *args, **kwargs):
with cbook.open_file_cm(filename, "w", encoding="utf-8") as fh:
filename = getattr(fh, 'name', '')
if not isinstance(filename, six.string_types):
filename = ''
if cbook.file_requires_unicode(fh):
detach = False
else:
if six.PY3:
fh = io.TextIOWrapper(fh, 'utf-8')
else:
fh = codecs.getwriter('utf-8')(fh)
detach = True
result = self._print_svg(filename, fh, **kwargs)
# Detach underlying stream from wrapper so that it remains open in
# the caller.
if detach:
if six.PY3:
fh.detach()
else:
fh.reset()
fh.stream = io.BytesIO()
return result
def print_svgz(self, filename, *args, **kwargs):
with cbook.open_file_cm(filename, "wb") as fh, \
gzip.GzipFile(mode='w', fileobj=fh) as gzipwriter:
return self.print_svg(gzipwriter)
def _print_svg(self, filename, fh, **kwargs):
image_dpi = kwargs.pop("dpi", 72)
self.figure.set_dpi(72.0)
width, height = self.figure.get_size_inches()
w, h = width * 72, height * 72
_bbox_inches_restore = kwargs.pop("bbox_inches_restore", None)
renderer = MixedModeRenderer(
self.figure, width, height, image_dpi,
RendererSVG(w, h, fh, filename, image_dpi),
bbox_inches_restore=_bbox_inches_restore)
self.figure.draw(renderer)
renderer.finalize()
def get_default_filetype(self):
return 'svg'
class FigureManagerSVG(FigureManagerBase):
pass
svgProlog = """\
<?xml version="1.0" encoding="utf-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Created with matplotlib (http://matplotlib.org/) -->
"""
@_Backend.export
class _BackendSVG(_Backend):
FigureCanvas = FigureCanvasSVG
FigureManager = FigureManagerSVG
| 45,503 | 35.057052 | 100 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/backends/backend_agg.py
|
"""
An agg http://antigrain.com/ backend
Features that are implemented
* capstyles and join styles
* dashes
* linewidth
* lines, rectangles, ellipses
* clipping to a rectangle
* output to RGBA and PNG, optionally JPEG and TIFF
* alpha blending
* DPI scaling properly - everything scales properly (dashes, linewidths, etc)
* draw polygon
* freetype2 w/ ft2font
TODO:
* integrate screen dpi w/ ppi and text
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import threading
import numpy as np
from collections import OrderedDict
from math import radians, cos, sin
from matplotlib import cbook, rcParams, __version__
from matplotlib.backend_bases import (
_Backend, FigureCanvasBase, FigureManagerBase, RendererBase, cursors)
from matplotlib.cbook import maxdict
from matplotlib.figure import Figure
from matplotlib.font_manager import findfont, get_font
from matplotlib.ft2font import (LOAD_FORCE_AUTOHINT, LOAD_NO_HINTING,
LOAD_DEFAULT, LOAD_NO_AUTOHINT)
from matplotlib.mathtext import MathTextParser
from matplotlib.path import Path
from matplotlib.transforms import Bbox, BboxBase
from matplotlib import colors as mcolors
from matplotlib.backends._backend_agg import RendererAgg as _RendererAgg
from matplotlib import _png
try:
from PIL import Image
_has_pil = True
except ImportError:
_has_pil = False
backend_version = 'v2.2'
def get_hinting_flag():
mapping = {
True: LOAD_FORCE_AUTOHINT,
False: LOAD_NO_HINTING,
'either': LOAD_DEFAULT,
'native': LOAD_NO_AUTOHINT,
'auto': LOAD_FORCE_AUTOHINT,
'none': LOAD_NO_HINTING
}
return mapping[rcParams['text.hinting']]
class RendererAgg(RendererBase):
"""
The renderer handles all the drawing primitives using a graphics
context instance that controls the colors/styles
"""
@property
@cbook.deprecated("2.2")
def debug(self):
return 1
# we want to cache the fonts at the class level so that when
# multiple figures are created we can reuse them. This helps with
# a bug on windows where the creation of too many figures leads to
# too many open file handles. However, storing them at the class
# level is not thread safe. The solution here is to let the
# FigureCanvas acquire a lock on the fontd at the start of the
# draw, and release it when it is done. This allows multiple
# renderers to share the cached fonts, but only one figure can
# draw at time and so the font cache is used by only one
# renderer at a time.
lock = threading.RLock()
def __init__(self, width, height, dpi):
RendererBase.__init__(self)
self.dpi = dpi
self.width = width
self.height = height
self._renderer = _RendererAgg(int(width), int(height), dpi)
self._filter_renderers = []
self._update_methods()
self.mathtext_parser = MathTextParser('Agg')
self.bbox = Bbox.from_bounds(0, 0, self.width, self.height)
def __getstate__(self):
# We only want to preserve the init keywords of the Renderer.
# Anything else can be re-created.
return {'width': self.width, 'height': self.height, 'dpi': self.dpi}
def __setstate__(self, state):
self.__init__(state['width'], state['height'], state['dpi'])
def _get_hinting_flag(self):
if rcParams['text.hinting']:
return LOAD_FORCE_AUTOHINT
else:
return LOAD_NO_HINTING
# for filtering to work with rasterization, methods needs to be wrapped.
# maybe there is better way to do it.
def draw_markers(self, *kl, **kw):
return self._renderer.draw_markers(*kl, **kw)
def draw_path_collection(self, *kl, **kw):
return self._renderer.draw_path_collection(*kl, **kw)
def _update_methods(self):
self.draw_quad_mesh = self._renderer.draw_quad_mesh
self.draw_gouraud_triangle = self._renderer.draw_gouraud_triangle
self.draw_gouraud_triangles = self._renderer.draw_gouraud_triangles
self.draw_image = self._renderer.draw_image
self.copy_from_bbox = self._renderer.copy_from_bbox
self.get_content_extents = self._renderer.get_content_extents
def tostring_rgba_minimized(self):
extents = self.get_content_extents()
bbox = [[extents[0], self.height - (extents[1] + extents[3])],
[extents[0] + extents[2], self.height - extents[1]]]
region = self.copy_from_bbox(bbox)
return np.array(region), extents
def draw_path(self, gc, path, transform, rgbFace=None):
"""
Draw the path
"""
nmax = rcParams['agg.path.chunksize'] # here at least for testing
npts = path.vertices.shape[0]
if (nmax > 100 and npts > nmax and path.should_simplify and
rgbFace is None and gc.get_hatch() is None):
nch = np.ceil(npts / nmax)
chsize = int(np.ceil(npts / nch))
i0 = np.arange(0, npts, chsize)
i1 = np.zeros_like(i0)
i1[:-1] = i0[1:] - 1
i1[-1] = npts
for ii0, ii1 in zip(i0, i1):
v = path.vertices[ii0:ii1, :]
c = path.codes
if c is not None:
c = c[ii0:ii1]
c[0] = Path.MOVETO # move to end of last chunk
p = Path(v, c)
try:
self._renderer.draw_path(gc, p, transform, rgbFace)
except OverflowError:
raise OverflowError("Exceeded cell block limit (set "
"'agg.path.chunksize' rcparam)")
else:
try:
self._renderer.draw_path(gc, path, transform, rgbFace)
except OverflowError:
raise OverflowError("Exceeded cell block limit (set "
"'agg.path.chunksize' rcparam)")
def draw_mathtext(self, gc, x, y, s, prop, angle):
"""
Draw the math text using matplotlib.mathtext
"""
ox, oy, width, height, descent, font_image, used_characters = \
self.mathtext_parser.parse(s, self.dpi, prop)
xd = descent * sin(radians(angle))
yd = descent * cos(radians(angle))
x = np.round(x + ox + xd)
y = np.round(y - oy + yd)
self._renderer.draw_text_image(font_image, x, y + 1, angle, gc)
def draw_text(self, gc, x, y, s, prop, angle, ismath=False, mtext=None):
"""
Render the text
"""
if ismath:
return self.draw_mathtext(gc, x, y, s, prop, angle)
flags = get_hinting_flag()
font = self._get_agg_font(prop)
if font is None:
return None
if len(s) == 1 and ord(s) > 127:
font.load_char(ord(s), flags=flags)
else:
# We pass '0' for angle here, since it will be rotated (in raster
# space) in the following call to draw_text_image).
font.set_text(s, 0, flags=flags)
font.draw_glyphs_to_bitmap(antialiased=rcParams['text.antialiased'])
d = font.get_descent() / 64.0
# The descent needs to be adjusted for the angle.
xo, yo = font.get_bitmap_offset()
xo /= 64.0
yo /= 64.0
xd = -d * sin(radians(angle))
yd = d * cos(radians(angle))
self._renderer.draw_text_image(
font, np.round(x - xd + xo), np.round(y + yd + yo) + 1, angle, gc)
def get_text_width_height_descent(self, s, prop, ismath):
"""
Get the width, height, and descent (offset from the bottom
to the baseline), in display coords, of the string *s* with
:class:`~matplotlib.font_manager.FontProperties` *prop*
"""
if ismath in ["TeX", "TeX!"]:
# todo: handle props
size = prop.get_size_in_points()
texmanager = self.get_texmanager()
fontsize = prop.get_size_in_points()
w, h, d = texmanager.get_text_width_height_descent(
s, fontsize, renderer=self)
return w, h, d
if ismath:
ox, oy, width, height, descent, fonts, used_characters = \
self.mathtext_parser.parse(s, self.dpi, prop)
return width, height, descent
flags = get_hinting_flag()
font = self._get_agg_font(prop)
font.set_text(s, 0.0, flags=flags)
w, h = font.get_width_height() # width and height of unrotated string
d = font.get_descent()
w /= 64.0 # convert from subpixels
h /= 64.0
d /= 64.0
return w, h, d
def draw_tex(self, gc, x, y, s, prop, angle, ismath='TeX!', mtext=None):
# todo, handle props, angle, origins
size = prop.get_size_in_points()
texmanager = self.get_texmanager()
Z = texmanager.get_grey(s, size, self.dpi)
Z = np.array(Z * 255.0, np.uint8)
w, h, d = self.get_text_width_height_descent(s, prop, ismath)
xd = d * sin(radians(angle))
yd = d * cos(radians(angle))
x = np.round(x + xd)
y = np.round(y + yd)
self._renderer.draw_text_image(Z, x, y, angle, gc)
def get_canvas_width_height(self):
'return the canvas width and height in display coords'
return self.width, self.height
def _get_agg_font(self, prop):
"""
Get the font for text instance t, cacheing for efficiency
"""
fname = findfont(prop)
font = get_font(fname)
font.clear()
size = prop.get_size_in_points()
font.set_size(size, self.dpi)
return font
def points_to_pixels(self, points):
"""
convert point measures to pixes using dpi and the pixels per
inch of the display
"""
return points*self.dpi/72.0
def tostring_rgb(self):
return self._renderer.tostring_rgb()
def tostring_argb(self):
return self._renderer.tostring_argb()
def buffer_rgba(self):
return self._renderer.buffer_rgba()
def clear(self):
self._renderer.clear()
def option_image_nocomposite(self):
# It is generally faster to composite each image directly to
# the Figure, and there's no file size benefit to compositing
# with the Agg backend
return True
def option_scale_image(self):
"""
agg backend doesn't support arbitrary scaling of image.
"""
return False
def restore_region(self, region, bbox=None, xy=None):
"""
Restore the saved region. If bbox (instance of BboxBase, or
its extents) is given, only the region specified by the bbox
will be restored. *xy* (a tuple of two floasts) optionally
specifies the new position (the LLC of the original region,
not the LLC of the bbox) where the region will be restored.
>>> region = renderer.copy_from_bbox()
>>> x1, y1, x2, y2 = region.get_extents()
>>> renderer.restore_region(region, bbox=(x1+dx, y1, x2, y2),
... xy=(x1-dx, y1))
"""
if bbox is not None or xy is not None:
if bbox is None:
x1, y1, x2, y2 = region.get_extents()
elif isinstance(bbox, BboxBase):
x1, y1, x2, y2 = bbox.extents
else:
x1, y1, x2, y2 = bbox
if xy is None:
ox, oy = x1, y1
else:
ox, oy = xy
# The incoming data is float, but the _renderer type-checking wants
# to see integers.
self._renderer.restore_region(region, int(x1), int(y1),
int(x2), int(y2), int(ox), int(oy))
else:
self._renderer.restore_region(region)
def start_filter(self):
"""
Start filtering. It simply create a new canvas (the old one is saved).
"""
self._filter_renderers.append(self._renderer)
self._renderer = _RendererAgg(int(self.width), int(self.height),
self.dpi)
self._update_methods()
def stop_filter(self, post_processing):
"""
Save the plot in the current canvas as a image and apply
the *post_processing* function.
def post_processing(image, dpi):
# ny, nx, depth = image.shape
# image (numpy array) has RGBA channels and has a depth of 4.
...
# create a new_image (numpy array of 4 channels, size can be
# different). The resulting image may have offsets from
# lower-left corner of the original image
return new_image, offset_x, offset_y
The saved renderer is restored and the returned image from
post_processing is plotted (using draw_image) on it.
"""
# WARNING: For agg_filter to work, the renderer's method need to
# overridden in the class. See draw_markers and draw_path_collections.
width, height = int(self.width), int(self.height)
buffer, bounds = self.tostring_rgba_minimized()
l, b, w, h = bounds
self._renderer = self._filter_renderers.pop()
self._update_methods()
if w > 0 and h > 0:
img = np.fromstring(buffer, np.uint8)
img, ox, oy = post_processing(img.reshape((h, w, 4)) / 255.,
self.dpi)
gc = self.new_gc()
if img.dtype.kind == 'f':
img = np.asarray(img * 255., np.uint8)
img = img[::-1]
self._renderer.draw_image(
gc, l + ox, height - b - h + oy, img)
class FigureCanvasAgg(FigureCanvasBase):
"""
The canvas the figure renders into. Calls the draw and print fig
methods, creates the renderers, etc...
Attributes
----------
figure : `matplotlib.figure.Figure`
A high-level Figure instance
"""
def copy_from_bbox(self, bbox):
renderer = self.get_renderer()
return renderer.copy_from_bbox(bbox)
def restore_region(self, region, bbox=None, xy=None):
renderer = self.get_renderer()
return renderer.restore_region(region, bbox, xy)
def draw(self):
"""
Draw the figure using the renderer
"""
self.renderer = self.get_renderer(cleared=True)
# acquire a lock on the shared font cache
RendererAgg.lock.acquire()
toolbar = self.toolbar
try:
# if toolbar:
# toolbar.set_cursor(cursors.WAIT)
self.figure.draw(self.renderer)
# A GUI class may be need to update a window using this draw, so
# don't forget to call the superclass.
super(FigureCanvasAgg, self).draw()
finally:
# if toolbar:
# toolbar.set_cursor(toolbar._lastCursor)
RendererAgg.lock.release()
def get_renderer(self, cleared=False):
l, b, w, h = self.figure.bbox.bounds
key = w, h, self.figure.dpi
try: self._lastKey, self.renderer
except AttributeError: need_new_renderer = True
else: need_new_renderer = (self._lastKey != key)
if need_new_renderer:
self.renderer = RendererAgg(w, h, self.figure.dpi)
self._lastKey = key
elif cleared:
self.renderer.clear()
return self.renderer
def tostring_rgb(self):
'''Get the image as an RGB byte string
`draw` must be called at least once before this function will work and
to update the renderer for any subsequent changes to the Figure.
Returns
-------
bytes
'''
return self.renderer.tostring_rgb()
def tostring_argb(self):
'''Get the image as an ARGB byte string
`draw` must be called at least once before this function will work and
to update the renderer for any subsequent changes to the Figure.
Returns
-------
bytes
'''
return self.renderer.tostring_argb()
def buffer_rgba(self):
'''Get the image as an RGBA byte string
`draw` must be called at least once before this function will work and
to update the renderer for any subsequent changes to the Figure.
Returns
-------
bytes
'''
return self.renderer.buffer_rgba()
def print_raw(self, filename_or_obj, *args, **kwargs):
FigureCanvasAgg.draw(self)
renderer = self.get_renderer()
original_dpi = renderer.dpi
renderer.dpi = self.figure.dpi
if isinstance(filename_or_obj, six.string_types):
fileobj = open(filename_or_obj, 'wb')
close = True
else:
fileobj = filename_or_obj
close = False
try:
fileobj.write(renderer._renderer.buffer_rgba())
finally:
if close:
fileobj.close()
renderer.dpi = original_dpi
print_rgba = print_raw
def print_png(self, filename_or_obj, *args, **kwargs):
FigureCanvasAgg.draw(self)
renderer = self.get_renderer()
original_dpi = renderer.dpi
renderer.dpi = self.figure.dpi
version_str = 'matplotlib version ' + __version__ + \
', http://matplotlib.org/'
metadata = OrderedDict({'Software': version_str})
user_metadata = kwargs.pop("metadata", None)
if user_metadata is not None:
metadata.update(user_metadata)
try:
with cbook.open_file_cm(filename_or_obj, "wb") as fh:
_png.write_png(renderer._renderer, fh,
self.figure.dpi, metadata=metadata)
finally:
renderer.dpi = original_dpi
def print_to_buffer(self):
FigureCanvasAgg.draw(self)
renderer = self.get_renderer()
original_dpi = renderer.dpi
renderer.dpi = self.figure.dpi
try:
result = (renderer._renderer.buffer_rgba(),
(int(renderer.width), int(renderer.height)))
finally:
renderer.dpi = original_dpi
return result
if _has_pil:
# add JPEG support
def print_jpg(self, filename_or_obj, *args, **kwargs):
"""
Other Parameters
----------------
quality : int
The image quality, on a scale from 1 (worst) to
95 (best). The default is 95, if not given in the
matplotlibrc file in the savefig.jpeg_quality parameter.
Values above 95 should be avoided; 100 completely
disables the JPEG quantization stage.
optimize : bool
If present, indicates that the encoder should
make an extra pass over the image in order to select
optimal encoder settings.
progressive : bool
If present, indicates that this image
should be stored as a progressive JPEG file.
"""
buf, size = self.print_to_buffer()
if kwargs.pop("dryrun", False):
return
# The image is "pasted" onto a white background image to safely
# handle any transparency
image = Image.frombuffer('RGBA', size, buf, 'raw', 'RGBA', 0, 1)
rgba = mcolors.to_rgba(rcParams['savefig.facecolor'])
color = tuple([int(x * 255.0) for x in rgba[:3]])
background = Image.new('RGB', size, color)
background.paste(image, image)
options = {k: kwargs[k]
for k in ['quality', 'optimize', 'progressive', 'dpi']
if k in kwargs}
options.setdefault('quality', rcParams['savefig.jpeg_quality'])
if 'dpi' in options:
# Set the same dpi in both x and y directions
options['dpi'] = (options['dpi'], options['dpi'])
return background.save(filename_or_obj, format='jpeg', **options)
print_jpeg = print_jpg
# add TIFF support
def print_tif(self, filename_or_obj, *args, **kwargs):
buf, size = self.print_to_buffer()
if kwargs.pop("dryrun", False):
return
image = Image.frombuffer('RGBA', size, buf, 'raw', 'RGBA', 0, 1)
dpi = (self.figure.dpi, self.figure.dpi)
return image.save(filename_or_obj, format='tiff',
dpi=dpi)
print_tiff = print_tif
@_Backend.export
class _BackendAgg(_Backend):
FigureCanvas = FigureCanvasAgg
FigureManager = FigureManagerBase
| 20,963 | 33.766169 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/backends/qt_compat.py
|
""" A Qt API selector that can be used to switch between PyQt and PySide.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os
import logging
import sys
from matplotlib import rcParams
_log = logging.getLogger(__name__)
# Available APIs.
QT_API_PYQT = 'PyQt4' # API is not set here; Python 2.x default is V 1
QT_API_PYQTv2 = 'PyQt4v2' # forced to Version 2 API
QT_API_PYSIDE = 'PySide' # only supports Version 2 API
QT_API_PYQT5 = 'PyQt5' # use PyQt5 API; Version 2 with module shim
QT_API_PYSIDE2 = 'PySide2' # Version 2 API with module shim
ETS = dict(pyqt=(QT_API_PYQTv2, 4), pyside=(QT_API_PYSIDE, 4),
pyqt5=(QT_API_PYQT5, 5), pyside2=(QT_API_PYSIDE2, 5))
# ETS is a dict of env variable to (QT_API, QT_MAJOR_VERSION)
# If the ETS QT_API environment variable is set, use it, but only
# if the varible if of the same major QT version. Note that
# ETS requires the version 2 of PyQt4, which is not the platform
# default for Python 2.x.
QT_API_ENV = os.environ.get('QT_API')
if rcParams['backend'] == 'Qt5Agg':
QT_RC_MAJOR_VERSION = 5
elif rcParams['backend'] == 'Qt4Agg':
QT_RC_MAJOR_VERSION = 4
else:
# A different backend was specified, but we still got here because a Qt
# related file was imported. This is allowed, so lets try and guess
# what we should be using.
if "PyQt4" in sys.modules or "PySide" in sys.modules:
# PyQt4 or PySide is actually used.
QT_RC_MAJOR_VERSION = 4
else:
# This is a fallback: PyQt5
QT_RC_MAJOR_VERSION = 5
QT_API = None
# check if any binding is already imported, if so silently ignore the
# rcparams/ENV settings and use what ever is already imported.
if 'PySide' in sys.modules:
# user has imported PySide before importing mpl
QT_API = QT_API_PYSIDE
if 'PySide2' in sys.modules:
# user has imported PySide before importing mpl
QT_API = QT_API_PYSIDE2
if 'PyQt4' in sys.modules:
# user has imported PyQt4 before importing mpl
# this case also handles the PyQt4v2 case as once sip is imported
# the API versions can not be changed so do not try
QT_API = QT_API_PYQT
if 'PyQt5' in sys.modules:
# the user has imported PyQt5 before importing mpl
QT_API = QT_API_PYQT5
if (QT_API_ENV is not None) and QT_API is None:
try:
QT_ENV_MAJOR_VERSION = ETS[QT_API_ENV][1]
except KeyError:
raise RuntimeError(
('Unrecognized environment variable %r, valid values are:'
' %r, %r, %r or %r'
% (QT_API_ENV, 'pyqt', 'pyside', 'pyqt5', 'pyside2')))
if QT_ENV_MAJOR_VERSION == QT_RC_MAJOR_VERSION:
# Only if backend and env qt major version are
# compatible use the env variable.
QT_API = ETS[QT_API_ENV][0]
_fallback_to_qt4 = False
if QT_API is None:
# No ETS environment or incompatible so use rcParams.
if rcParams['backend'] == 'Qt5Agg':
QT_API = QT_API_PYQT5
elif rcParams['backend'] == 'Qt4Agg':
QT_API = QT_API_PYQT
else:
# A non-Qt backend was specified, no version of the Qt
# bindings is imported, but we still got here because a Qt
# related file was imported. This is allowed, fall back to Qt5
# using which ever binding the rparams ask for.
_fallback_to_qt4 = True
QT_API = QT_API_PYQT5
# We will define an appropriate wrapper for the differing versions
# of file dialog.
_getSaveFileName = None
# Flag to check if sip could be imported
_sip_imported = False
# Now perform the imports.
if QT_API in (QT_API_PYQT, QT_API_PYQTv2, QT_API_PYQT5):
try:
import sip
_sip_imported = True
except ImportError:
# Try using PySide
if QT_RC_MAJOR_VERSION == 5:
QT_API = QT_API_PYSIDE2
else:
QT_API = QT_API_PYSIDE
cond = ("Could not import sip; falling back on PySide\n"
"in place of PyQt4 or PyQt5.\n")
_log.info(cond)
if _sip_imported:
if QT_API == QT_API_PYQTv2:
if QT_API_ENV == 'pyqt':
cond = ("Found 'QT_API=pyqt' environment variable. "
"Setting PyQt4 API accordingly.\n")
else:
cond = "PyQt API v2 specified."
try:
sip.setapi('QString', 2)
except:
res = 'QString API v2 specification failed. Defaulting to v1.'
_log.info(cond + res)
# condition has now been reported, no need to repeat it:
cond = ""
try:
sip.setapi('QVariant', 2)
except:
res = 'QVariant API v2 specification failed. Defaulting to v1.'
_log.info(cond + res)
if QT_API == QT_API_PYQT5:
try:
from PyQt5 import QtCore, QtGui, QtWidgets
_getSaveFileName = QtWidgets.QFileDialog.getSaveFileName
except ImportError:
if _fallback_to_qt4:
# fell through, tried PyQt5, failed fall back to PyQt4
QT_API = QT_API_PYQT
QT_RC_MAJOR_VERSION = 4
else:
raise
# needs to be if so we can re-test the value of QT_API which may
# have been changed in the above if block
if QT_API in [QT_API_PYQT, QT_API_PYQTv2]: # PyQt4 API
from PyQt4 import QtCore, QtGui
try:
if sip.getapi("QString") > 1:
# Use new getSaveFileNameAndFilter()
_getSaveFileName = QtGui.QFileDialog.getSaveFileNameAndFilter
else:
# Use old getSaveFileName()
def _getSaveFileName(*args, **kwargs):
return (QtGui.QFileDialog.getSaveFileName(*args, **kwargs),
None)
except (AttributeError, KeyError):
# call to getapi() can fail in older versions of sip
def _getSaveFileName(*args, **kwargs):
return QtGui.QFileDialog.getSaveFileName(*args, **kwargs), None
try:
# Alias PyQt-specific functions for PySide compatibility.
QtCore.Signal = QtCore.pyqtSignal
try:
QtCore.Slot = QtCore.pyqtSlot
except AttributeError:
# Not a perfect match but works in simple cases
QtCore.Slot = QtCore.pyqtSignature
QtCore.Property = QtCore.pyqtProperty
__version__ = QtCore.PYQT_VERSION_STR
except NameError:
# QtCore did not get imported, fall back to pyside
if QT_RC_MAJOR_VERSION == 5:
QT_API = QT_API_PYSIDE2
else:
QT_API = QT_API_PYSIDE
if QT_API == QT_API_PYSIDE2:
try:
from PySide2 import QtCore, QtGui, QtWidgets, __version__
_getSaveFileName = QtWidgets.QFileDialog.getSaveFileName
except ImportError:
# tried PySide2, failed, fall back to PySide
QT_RC_MAJOR_VERSION = 4
QT_API = QT_API_PYSIDE
if QT_API == QT_API_PYSIDE: # try importing pyside
try:
from PySide import QtCore, QtGui, __version__, __version_info__
except ImportError:
raise ImportError(
"Matplotlib qt-based backends require an external PyQt4, PyQt5,\n"
"PySide or PySide2 package to be installed, but it was not found.")
if __version_info__ < (1, 0, 3):
raise ImportError(
"Matplotlib backend_qt4 and backend_qt4agg require PySide >=1.0.3")
_getSaveFileName = QtGui.QFileDialog.getSaveFileName
# Apply shim to Qt4 APIs to make them look like Qt5
if QT_API in (QT_API_PYQT, QT_API_PYQTv2, QT_API_PYSIDE):
'''Import all used QtGui objects into QtWidgets
Here I've opted to simple copy QtGui into QtWidgets as that
achieves the same result as copying over the objects, and will
continue to work if other objects are used.
'''
QtWidgets = QtGui
def is_pyqt5():
return QT_API == QT_API_PYQT5
| 7,931 | 33.789474 | 79 |
py
|
cba-pipeline-public
|
cba-pipeline-public-master/containernet/ndn-containers/ndn_headless-player/bandits/venv/lib/python3.6/site-packages/matplotlib/backends/backend_qt4agg.py
|
"""
Render to qt from agg
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from .backend_qt5agg import (
_BackendQT5Agg, FigureCanvasQTAgg, FigureManagerQT, NavigationToolbar2QT)
@_BackendQT5Agg.export
class _BackendQT4Agg(_BackendQT5Agg):
pass
| 332 | 19.8125 | 77 |
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.